repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
Lambda-School-Labs/betterreads-frontend | src/components/search/styles/SearchStyle.js | import styled from 'styled-components';
const SearchContainer = styled.div`
.ant-back-top-content {
background-color: rgba(84, 120, 98, 0.75);
}
@media (min-width: 1120px) {
width: 1120px;
margin: 0 auto;
display: flex;
justify-content: space-between;
}
`;
export default SearchContainer;
|
deora-earth/Habitat | src/token/deployment/2-TokenTurner.js | <filename>src/token/deployment/2-TokenTurner.js
import { Artifacts, deploy, wallet, network } from './lib.js';
const { TokenTurnerMainnet, TokenTurnerRopsten } = Artifacts;
const target = network === 'mainnet' ? TokenTurnerMainnet : TokenTurnerRopsten;
const tokenTurner = await deploy(target, wallet);
//const initialSupply = 2_000_000n * (10n**10n);
//await (await hbt.transfer(tokenTurner.address, initialSupply)).wait();
|
labs15-career-endorsement-tracker/frontend | src/components/lib/Loaders/fullPageLoader.js | import React from "react"
import "./index.scss"
import Loader from "react-loader-spinner"
import "react-loader-spinner/dist/loader/css/react-spinner-loader.css"
const FullPageLoader = () => {
return (
<div className="fullpage-loader">
<Loader
type="Triangle"
color="#29AD44"
height={100}
width={100}
// timeout={8000} //8 secs
/>
</div>
)
}
export default FullPageLoader
|
Wonjuny0804/JavaScript | SecretCode/UI/case3_InfiniteScroll/solution/2.other's_1/s2_js_debounce_trottle/util.js | <reponame>Wonjuny0804/JavaScript
const getRandomSeconds = () => (Math.round(Math.random() * 5) + 1) * 250;
export const randomTimer = (func, ...args) => (resolve) => {
setTimeout(() => resolve(func(...args)), getRandomSeconds());
};
export const debounce = (func, delay) => {
/**
* setTimeout ์คํ์ ํ์คํฌ ์์ด๋๋ฅผ ์ ์ฅ
*/
let timeoutId = null;
return (...args) => {
/**
* ์ด๋ฏธ ์คํ ๋๊ธฐ์ค์ธ ํ์คํฌ๊ฐ ์กด์ฌํ๋ ๊ฒฝ์ฐ ํด๋น ํ์คํฌ๋ฅผ ์ ๊ฑฐ
*/
if (timeoutId) {
clearTimeout(timeoutId);
}
/**
* ์
๋ ฅ๋ฐ์ `delay ํ์ ํด๋น ํจ์๊ฐ ์คํ๋๋๋ก `setTimeout` ์คํ
*/
timeoutId = setTimeout(func, delay, ...args);
}
};
export const dummyFetcher = (method, args) =>
new Promise(randomTimer(method, args));
/**
* `debounce`๋ ํด๋ก์ (closure)์ ์๋ฆฌ๋ฅผ ์ดํดํ๊ณ , `setTimeout`๋ฅผ ์๊ณ ์์ผ๋ฉด ๊ฐ๋จํ ๊ตฌํํ ์ ์์ต๋๋ค.
*
* @see https://developer.mozilla.org/ko/docs/Web/JavaScript/Guide/Closures Closure
* @see https://developer.mozilla.org/ko/docs/Web/API/WindowTimers/setTimeout SetTimeout
*/ |
LJLintermittent/leetcode | src/main/java/com/learn/leetcode/designpattern/decorator/BatterCakeDecorator.java | package com.learn.leetcode.designpattern.decorator;
/**
* Description:
* date: 2021/9/11 19:38
* Package: com.learn.leetcode.designpattern.decorator
*
* @author ๆไฝณไน
* @email <EMAIL>
*/
@SuppressWarnings("all")
public abstract class BatterCakeDecorator extends BatterCake {
private BatterCake batterCake;
public BatterCakeDecorator(BatterCake batterCake) {
this.batterCake = batterCake;
}
protected abstract void doSomething();
@Override
protected String getMsg() {
return this.batterCake.getMsg();
}
@Override
protected int getPrice() {
return this.batterCake.getPrice();
}
}
|
mahaplatform/mahaplatform.com | src/apps/forms/serializers/response_serializer.js | <filename>src/apps/forms/serializers/response_serializer.js
import { expandData } from '@apps/forms/services/responses'
const ResponseSerializer = async (req, result) => ({
id: result.get('id'),
contact: contact(result.related('contact')),
data: await data(req, result.related('form'), result.get('data')),
enrollment: enrollment(result.related('enrollment')),
referer: result.get('referer'),
ipaddress: result.get('ipaddress'),
duration: result.get('duration'),
is_known: result.get('is_known'),
revenue: result.get('revenue'),
invoice_id: result.get('invoice_id'),
payment: payment(result.related('payment')),
created_at: result.get('created_at'),
updated_at: result.get('updated_at')
})
const contact = (contact) => {
if(!contact) return null
return {
id: contact.get('id'),
display_name: contact.get('display_name'),
initials: contact.get('initials'),
email: contact.get('email'),
photo: contact.related('photo') ? contact.related('photo').get('path') : null
}
}
const payment = (payment) => {
if(!payment) return null
return {
id: payment.get('id'),
method: payment.get('method'),
reference: payment.get('reference'),
amount: payment.get('amount')
}
}
const enrollment = (enrollment) => {
if(!enrollment) return null
return {
id: enrollment.get('id'),
workflow_id: enrollment.get('workflow_id')
}
}
const data = async (req, form, data) => {
if(!data) return {}
if(!form.id) return data
return await expandData(req, {
fields: form.get('config').fields,
data
})
}
export default ResponseSerializer
|
anshika581/competitive-programming-1 | src/contest/noi/NOI_2014_Enchanted_Forest_2.cc | #include <bits/stdc++.h>
using namespace std;
#define mp make_pair
#define pb push_back
typedef pair<int, int> pi;
int n, m;
vector<vector<pair<int, pi>>> adj(50000);
int minB[50000];
int main() {
scanf("%d%d", &n, &m);
set<int> uniA;
for (int i = 0; i < m; i++) {
int a, b, c, d;
scanf("%d%d%d%d", &a, &b, &c, &d);
a--, b--;
uniA.insert(c);
adj[a].pb(mp(b, mp(c, d)));
adj[b].pb(mp(a, mp(c, d)));
}
int res = 1 << 20;
bool flag[n];
for (int a : uniA) {
for (int i = 0; i < n; i++) {
minB[i] = 1 << 20;
}
memset(flag, false, sizeof flag);
minB[0] = 0;
queue<int> q;
q.push(0);
flag[0] = true;
while (!q.empty()) {
int curr = q.front();
flag[curr] = false;
q.pop();
for (pair<int, pi> next : adj[curr]) {
if (next.second.first > a)
continue;
if (max(minB[curr], next.second.second) < minB[next.first]) {
minB[next.first] = max(minB[curr], next.second.second);
if (!flag[next.first]) {
q.push(next.first);
flag[next.first] = true;
}
}
}
}
res = min(res, a + minB[n - 1]);
}
if (res == 1 << 20)
printf("%d\n", -1);
else
printf("%d\n", res);
}
|
aleasoluciones/infrabbitmq3 | infrabbitmq/pika_client_wrapper.py | from functools import wraps
from pika import (
URLParameters,
)
from pika.spec import (
BasicProperties,
)
from pika import exceptions as pika_exceptions
from infrabbitmq.exceptions import ClientWrapperError
class PikaClientWrapper:
DEFAULT_HEARTBEAT = 0
def __init__(self, pika_library):
self._connection = None
self._channel = None
self._pika_library = pika_library
def raise_client_wrapper_error(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
try:
return func(self, *args, **kwargs)
except (pika_exceptions.AMQPError, pika_exceptions.ChannelError, pika_exceptions.ReentrancyError) as exc:
raise ClientWrapperError(exc)
except ValueError as exc:
# if consumer-creation parameters donโt match those of the existing queue consumer generator, if any. NEW in pika 0.10.0
raise ClientWrapperError(exc)
return wrapper
@raise_client_wrapper_error
def connect(self, broker_uri):
broker_uri_with_heartbeat = self._build_broker_uri_with_heartbeat(broker_uri)
self._connection = self._pika_library.BlockingConnection(URLParameters(broker_uri_with_heartbeat))
self._channel = self._connection.channel()
self._channel.basic_qos(prefetch_size=0, prefetch_count=1, global_qos=True)
self._channel.confirm_delivery()
def _build_broker_uri_with_heartbeat(self, broker_uri):
heartbeat_param = 'heartbeat'
existing_query_params = '?'
if heartbeat_param not in broker_uri:
if existing_query_params in broker_uri:
return f"{broker_uri}&{heartbeat_param}={self.DEFAULT_HEARTBEAT}"
return f"{broker_uri}?{heartbeat_param}={self.DEFAULT_HEARTBEAT}"
return broker_uri
@raise_client_wrapper_error
def disconnect(self):
self._channel.close()
self._connection.close()
@raise_client_wrapper_error
def exchange_declare(self, exchange, exchange_type, **kwargs):
self._channel.exchange_declare(exchange=exchange,
exchange_type=exchange_type,
passive=kwargs.get('passive', False),
durable=kwargs.get('durable', False),
auto_delete=kwargs.get('auto_delete', False),
internal=kwargs.get('internal', False),
arguments=kwargs.get('arguments', {}))
@raise_client_wrapper_error
def exchange_delete(self, exchange):
self._channel.exchange_delete(exchange=exchange)
@raise_client_wrapper_error
def queue_declare(self, queue_name, auto_delete=True, exclusive=False, durable=False, arguments=None):
self._channel.queue_declare(queue_name,
durable=durable,
exclusive=exclusive,
auto_delete=auto_delete,
arguments=arguments)
@raise_client_wrapper_error
def queue_bind(self, queue_name, exchange, routing_key=''):
self._channel.queue_bind(queue=queue_name, exchange=exchange, routing_key=routing_key)
@raise_client_wrapper_error
def queue_unbind(self, queue_name, exchange, routing_key=''):
self._channel.queue_unbind(queue=queue_name, exchange=exchange, routing_key=routing_key)
@raise_client_wrapper_error
def queue_purge(self, queue_name):
self._channel.queue_purge(queue=queue_name)
@raise_client_wrapper_error
def queue_delete(self, queue_name):
self._channel.queue_delete(queue=queue_name)
@raise_client_wrapper_error
def basic_publish(self, exchange, routing_key, body, **kwargs):
headers = kwargs.get('headers', {})
properties = self._build_properties_for_basic_publish(headers)
self._channel.basic_publish(exchange=exchange, routing_key=routing_key, body=body, properties=properties, mandatory=False)
def _build_properties_for_basic_publish(self, headers):
if 'expiration' in headers.keys():
return BasicProperties(expiration=headers['expiration'])
elif 'x-delay' in headers.keys():
return BasicProperties(headers=headers)
return BasicProperties()
@raise_client_wrapper_error
def consume_one_message(self, queue_name, timeout_in_seconds=1):
message_body = {}
for method_frame, properties, body in self._channel.consume(queue_name, inactivity_timeout=timeout_in_seconds):
if body and method_frame:
self._channel.basic_ack(method_frame.delivery_tag)
message_body['body'] = body
self._channel.cancel()
break
return message_body
|
Kirishikesan/haiku | src/add-ons/kernel/drivers/misc/kdl.c | <filename>src/add-ons/kernel/drivers/misc/kdl.c<gh_stars>1000+
/*
* Copyright (c) 2009 <NAME>, <<EMAIL>>.
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without restriction,
* including without limitation the rights to use, copy, modify,
* merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*/
#include <KernelExport.h>
#include <Drivers.h>
#include <Errors.h>
int32 api_version = B_CUR_DRIVER_API_VERSION;
static int32 sOpenMask;
status_t
init_hardware(void)
{
dprintf("kdl: init_hardware\n");
return B_OK;
}
status_t
uninit_hardware(void)
{
dprintf("kdl: uninit_hardware\n");
return B_OK;
}
status_t
init_driver(void)
{
dprintf("kdl: init_driver\n");
return B_OK;
}
void
uninit_driver(void)
{
dprintf("kdl: uninit_driver\n");
}
static status_t
driver_open(const char *name, uint32 flags, void** _cookie)
{
dprintf("kdl: open\n");
// TODO: check for proper credencials! (root only ?)
if (atomic_or(&sOpenMask, 1)) {
dprintf("kdl: open, BUSY!\n");
return B_BUSY;
}
dprintf("kdl: open, success\n");
return B_OK;
}
static status_t
driver_close(void* cookie)
{
dprintf("kdl: close enter\n");
dprintf("kdl: close leave\n");
return B_OK;
}
static status_t
driver_free(void* cookie)
{
dprintf("kdl: free\n");
atomic_and(&sOpenMask, ~1);
return B_OK;
}
static status_t
driver_read(void* cookie, off_t position, void *buf, size_t* num_bytes)
{
dprintf("kdl: read\n");
panic("requested from kdl driver.");
*num_bytes = 0; // nothing to read
return B_ERROR;
}
static status_t
driver_write(void* cookie, off_t position, const void* buffer, size_t* num_bytes)
{
dprintf("kdl: write\n");
*num_bytes = 1; // pretend 1 byte was written
return B_OK;
}
static status_t
driver_control(void *cookie, uint32 op, void *arg, size_t len)
{
dprintf("kdl: control\n");
return B_ERROR;
}
const char**
publish_devices(void)
{
static const char *names[] = {"misc/kdl", NULL};
dprintf("kdl: publish_devices\n");
return names;
}
device_hooks*
find_device(const char* name)
{
static device_hooks hooks = {
driver_open,
driver_close,
driver_free,
driver_control,
driver_read,
driver_write,
};
dprintf("kdl: find_device\n");
return &hooks;
}
|
nickchen-mitac/fork | src/avashell/win32/dyndlg.py | <reponame>nickchen-mitac/fork
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import win32ui
import win32con
def MakeDlgTemplate():
style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
cs = win32con.WS_CHILD | win32con.WS_VISIBLE
dlg = [ ["Select Warehouse", (0, 0, 177, 93), style, None, (8, "MS Sans Serif")], ]
dlg.append([130, "Current Warehouse:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT])
dlg.append([130, "ASTORIA", 128, (16, 17, 99, 7), cs | win32con.SS_LEFT])
dlg.append([130, "New &Warehouse:", -1, (7, 29, 69, 9), cs | win32con.SS_LEFT])
s = win32con.WS_TABSTOP | cs
# dlg.append([131, None, 130, (5, 40, 110, 48),
# s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER])
dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP])
dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
s = win32con.BS_PUSHBUTTON | s
dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s])
dlg.append([128, "&Help", 100, (124, 74, 50, 14), s])
return dlg
def test1():
win32ui.CreateDialogIndirect( MakeDlgTemplate() ).DoModal()
def testall():
test1()
if __name__=='__main__':
testall() |
bradchesney79/illacceptanything | linux/drivers/crypto/atmel-aes.c | <filename>linux/drivers/crypto/atmel-aes.c
/*
* Cryptographic API.
*
* Support for ATMEL AES HW acceleration.
*
* Copyright (c) 2012 Eukrรฉa Electromatique - ATMEL
* Author: <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*
* Some ideas are from omap-aes.c driver.
*/
#include <linux/kernel.h>
#include <linux/module.h>
#include <linux/slab.h>
#include <linux/err.h>
#include <linux/clk.h>
#include <linux/io.h>
#include <linux/hw_random.h>
#include <linux/platform_device.h>
#include <linux/device.h>
#include <linux/init.h>
#include <linux/errno.h>
#include <linux/interrupt.h>
#include <linux/irq.h>
#include <linux/scatterlist.h>
#include <linux/dma-mapping.h>
#include <linux/of_device.h>
#include <linux/delay.h>
#include <linux/crypto.h>
#include <linux/cryptohash.h>
#include <crypto/scatterwalk.h>
#include <crypto/algapi.h>
#include <crypto/aes.h>
#include <crypto/hash.h>
#include <crypto/internal/hash.h>
#include <linux/platform_data/crypto-atmel.h>
#include <dt-bindings/dma/at91.h>
#include "atmel-aes-regs.h"
#define CFB8_BLOCK_SIZE 1
#define CFB16_BLOCK_SIZE 2
#define CFB32_BLOCK_SIZE 4
#define CFB64_BLOCK_SIZE 8
/* AES flags */
#define AES_FLAGS_MODE_MASK 0x03ff
#define AES_FLAGS_ENCRYPT BIT(0)
#define AES_FLAGS_CBC BIT(1)
#define AES_FLAGS_CFB BIT(2)
#define AES_FLAGS_CFB8 BIT(3)
#define AES_FLAGS_CFB16 BIT(4)
#define AES_FLAGS_CFB32 BIT(5)
#define AES_FLAGS_CFB64 BIT(6)
#define AES_FLAGS_CFB128 BIT(7)
#define AES_FLAGS_OFB BIT(8)
#define AES_FLAGS_CTR BIT(9)
#define AES_FLAGS_INIT BIT(16)
#define AES_FLAGS_DMA BIT(17)
#define AES_FLAGS_BUSY BIT(18)
#define AES_FLAGS_FAST BIT(19)
#define ATMEL_AES_QUEUE_LENGTH 50
#define ATMEL_AES_DMA_THRESHOLD 16
struct atmel_aes_caps {
bool has_dualbuff;
bool has_cfb64;
u32 max_burst_size;
};
struct atmel_aes_dev;
struct atmel_aes_ctx {
struct atmel_aes_dev *dd;
int keylen;
u32 key[AES_KEYSIZE_256 / sizeof(u32)];
u16 block_size;
};
struct atmel_aes_reqctx {
unsigned long mode;
};
struct atmel_aes_dma {
struct dma_chan *chan;
struct dma_slave_config dma_conf;
};
struct atmel_aes_dev {
struct list_head list;
unsigned long phys_base;
void __iomem *io_base;
struct atmel_aes_ctx *ctx;
struct device *dev;
struct clk *iclk;
int irq;
unsigned long flags;
int err;
spinlock_t lock;
struct crypto_queue queue;
struct tasklet_struct done_task;
struct tasklet_struct queue_task;
struct ablkcipher_request *req;
size_t total;
struct scatterlist *in_sg;
unsigned int nb_in_sg;
size_t in_offset;
struct scatterlist *out_sg;
unsigned int nb_out_sg;
size_t out_offset;
size_t bufcnt;
size_t buflen;
size_t dma_size;
void *buf_in;
int dma_in;
dma_addr_t dma_addr_in;
struct atmel_aes_dma dma_lch_in;
void *buf_out;
int dma_out;
dma_addr_t dma_addr_out;
struct atmel_aes_dma dma_lch_out;
struct atmel_aes_caps caps;
u32 hw_version;
};
struct atmel_aes_drv {
struct list_head dev_list;
spinlock_t lock;
};
static struct atmel_aes_drv atmel_aes = {
.dev_list = LIST_HEAD_INIT(atmel_aes.dev_list),
.lock = __SPIN_LOCK_UNLOCKED(atmel_aes.lock),
};
static int atmel_aes_sg_length(struct ablkcipher_request *req,
struct scatterlist *sg)
{
unsigned int total = req->nbytes;
int sg_nb;
unsigned int len;
struct scatterlist *sg_list;
sg_nb = 0;
sg_list = sg;
total = req->nbytes;
while (total) {
len = min(sg_list->length, total);
sg_nb++;
total -= len;
sg_list = sg_next(sg_list);
if (!sg_list)
total = 0;
}
return sg_nb;
}
static int atmel_aes_sg_copy(struct scatterlist **sg, size_t *offset,
void *buf, size_t buflen, size_t total, int out)
{
unsigned int count, off = 0;
while (buflen && total) {
count = min((*sg)->length - *offset, total);
count = min(count, buflen);
if (!count)
return off;
scatterwalk_map_and_copy(buf + off, *sg, *offset, count, out);
off += count;
buflen -= count;
*offset += count;
total -= count;
if (*offset == (*sg)->length) {
*sg = sg_next(*sg);
if (*sg)
*offset = 0;
else
total = 0;
}
}
return off;
}
static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset)
{
return readl_relaxed(dd->io_base + offset);
}
static inline void atmel_aes_write(struct atmel_aes_dev *dd,
u32 offset, u32 value)
{
writel_relaxed(value, dd->io_base + offset);
}
static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset,
u32 *value, int count)
{
for (; count--; value++, offset += 4)
*value = atmel_aes_read(dd, offset);
}
static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset,
u32 *value, int count)
{
for (; count--; value++, offset += 4)
atmel_aes_write(dd, offset, *value);
}
static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_ctx *ctx)
{
struct atmel_aes_dev *aes_dd = NULL;
struct atmel_aes_dev *tmp;
spin_lock_bh(&atmel_aes.lock);
if (!ctx->dd) {
list_for_each_entry(tmp, &atmel_aes.dev_list, list) {
aes_dd = tmp;
break;
}
ctx->dd = aes_dd;
} else {
aes_dd = ctx->dd;
}
spin_unlock_bh(&atmel_aes.lock);
return aes_dd;
}
static int atmel_aes_hw_init(struct atmel_aes_dev *dd)
{
clk_prepare_enable(dd->iclk);
if (!(dd->flags & AES_FLAGS_INIT)) {
atmel_aes_write(dd, AES_CR, AES_CR_SWRST);
atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET);
dd->flags |= AES_FLAGS_INIT;
dd->err = 0;
}
return 0;
}
static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd)
{
return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff;
}
static void atmel_aes_hw_version_init(struct atmel_aes_dev *dd)
{
atmel_aes_hw_init(dd);
dd->hw_version = atmel_aes_get_version(dd);
dev_info(dd->dev,
"version: 0x%x\n", dd->hw_version);
clk_disable_unprepare(dd->iclk);
}
static void atmel_aes_finish_req(struct atmel_aes_dev *dd, int err)
{
struct ablkcipher_request *req = dd->req;
clk_disable_unprepare(dd->iclk);
dd->flags &= ~AES_FLAGS_BUSY;
req->base.complete(&req->base, err);
}
static void atmel_aes_dma_callback(void *data)
{
struct atmel_aes_dev *dd = data;
/* dma_lch_out - completed */
tasklet_schedule(&dd->done_task);
}
static int atmel_aes_crypt_dma(struct atmel_aes_dev *dd,
dma_addr_t dma_addr_in, dma_addr_t dma_addr_out, int length)
{
struct scatterlist sg[2];
struct dma_async_tx_descriptor *in_desc, *out_desc;
dd->dma_size = length;
if (!(dd->flags & AES_FLAGS_FAST)) {
dma_sync_single_for_device(dd->dev, dma_addr_in, length,
DMA_TO_DEVICE);
}
if (dd->flags & AES_FLAGS_CFB8) {
dd->dma_lch_in.dma_conf.dst_addr_width =
DMA_SLAVE_BUSWIDTH_1_BYTE;
dd->dma_lch_out.dma_conf.src_addr_width =
DMA_SLAVE_BUSWIDTH_1_BYTE;
} else if (dd->flags & AES_FLAGS_CFB16) {
dd->dma_lch_in.dma_conf.dst_addr_width =
DMA_SLAVE_BUSWIDTH_2_BYTES;
dd->dma_lch_out.dma_conf.src_addr_width =
DMA_SLAVE_BUSWIDTH_2_BYTES;
} else {
dd->dma_lch_in.dma_conf.dst_addr_width =
DMA_SLAVE_BUSWIDTH_4_BYTES;
dd->dma_lch_out.dma_conf.src_addr_width =
DMA_SLAVE_BUSWIDTH_4_BYTES;
}
if (dd->flags & (AES_FLAGS_CFB8 | AES_FLAGS_CFB16 |
AES_FLAGS_CFB32 | AES_FLAGS_CFB64)) {
dd->dma_lch_in.dma_conf.src_maxburst = 1;
dd->dma_lch_in.dma_conf.dst_maxburst = 1;
dd->dma_lch_out.dma_conf.src_maxburst = 1;
dd->dma_lch_out.dma_conf.dst_maxburst = 1;
} else {
dd->dma_lch_in.dma_conf.src_maxburst = dd->caps.max_burst_size;
dd->dma_lch_in.dma_conf.dst_maxburst = dd->caps.max_burst_size;
dd->dma_lch_out.dma_conf.src_maxburst = dd->caps.max_burst_size;
dd->dma_lch_out.dma_conf.dst_maxburst = dd->caps.max_burst_size;
}
dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf);
dmaengine_slave_config(dd->dma_lch_out.chan, &dd->dma_lch_out.dma_conf);
dd->flags |= AES_FLAGS_DMA;
sg_init_table(&sg[0], 1);
sg_dma_address(&sg[0]) = dma_addr_in;
sg_dma_len(&sg[0]) = length;
sg_init_table(&sg[1], 1);
sg_dma_address(&sg[1]) = dma_addr_out;
sg_dma_len(&sg[1]) = length;
in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, &sg[0],
1, DMA_MEM_TO_DEV,
DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
if (!in_desc)
return -EINVAL;
out_desc = dmaengine_prep_slave_sg(dd->dma_lch_out.chan, &sg[1],
1, DMA_DEV_TO_MEM,
DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
if (!out_desc)
return -EINVAL;
out_desc->callback = atmel_aes_dma_callback;
out_desc->callback_param = dd;
dmaengine_submit(out_desc);
dma_async_issue_pending(dd->dma_lch_out.chan);
dmaengine_submit(in_desc);
dma_async_issue_pending(dd->dma_lch_in.chan);
return 0;
}
static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd)
{
dd->flags &= ~AES_FLAGS_DMA;
/* use cache buffers */
dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg);
if (!dd->nb_in_sg)
return -EINVAL;
dd->nb_out_sg = atmel_aes_sg_length(dd->req, dd->out_sg);
if (!dd->nb_out_sg)
return -EINVAL;
dd->bufcnt = sg_copy_to_buffer(dd->in_sg, dd->nb_in_sg,
dd->buf_in, dd->total);
if (!dd->bufcnt)
return -EINVAL;
dd->total -= dd->bufcnt;
atmel_aes_write(dd, AES_IER, AES_INT_DATARDY);
atmel_aes_write_n(dd, AES_IDATAR(0), (u32 *) dd->buf_in,
dd->bufcnt >> 2);
return 0;
}
static int atmel_aes_crypt_dma_start(struct atmel_aes_dev *dd)
{
int err, fast = 0, in, out;
size_t count;
dma_addr_t addr_in, addr_out;
if ((!dd->in_offset) && (!dd->out_offset)) {
/* check for alignment */
in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) &&
IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size);
out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) &&
IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size);
fast = in && out;
if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg))
fast = 0;
}
if (fast) {
count = min(dd->total, sg_dma_len(dd->in_sg));
count = min(count, sg_dma_len(dd->out_sg));
err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
if (!err) {
dev_err(dd->dev, "dma_map_sg() error\n");
return -EINVAL;
}
err = dma_map_sg(dd->dev, dd->out_sg, 1,
DMA_FROM_DEVICE);
if (!err) {
dev_err(dd->dev, "dma_map_sg() error\n");
dma_unmap_sg(dd->dev, dd->in_sg, 1,
DMA_TO_DEVICE);
return -EINVAL;
}
addr_in = sg_dma_address(dd->in_sg);
addr_out = sg_dma_address(dd->out_sg);
dd->flags |= AES_FLAGS_FAST;
} else {
/* use cache buffers */
count = atmel_aes_sg_copy(&dd->in_sg, &dd->in_offset,
dd->buf_in, dd->buflen, dd->total, 0);
addr_in = dd->dma_addr_in;
addr_out = dd->dma_addr_out;
dd->flags &= ~AES_FLAGS_FAST;
}
dd->total -= count;
err = atmel_aes_crypt_dma(dd, addr_in, addr_out, count);
if (err && (dd->flags & AES_FLAGS_FAST)) {
dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE);
}
return err;
}
static int atmel_aes_write_ctrl(struct atmel_aes_dev *dd)
{
int err;
u32 valcr = 0, valmr = 0;
err = atmel_aes_hw_init(dd);
if (err)
return err;
/* MR register must be set before IV registers */
if (dd->ctx->keylen == AES_KEYSIZE_128)
valmr |= AES_MR_KEYSIZE_128;
else if (dd->ctx->keylen == AES_KEYSIZE_192)
valmr |= AES_MR_KEYSIZE_192;
else
valmr |= AES_MR_KEYSIZE_256;
if (dd->flags & AES_FLAGS_CBC) {
valmr |= AES_MR_OPMOD_CBC;
} else if (dd->flags & AES_FLAGS_CFB) {
valmr |= AES_MR_OPMOD_CFB;
if (dd->flags & AES_FLAGS_CFB8)
valmr |= AES_MR_CFBS_8b;
else if (dd->flags & AES_FLAGS_CFB16)
valmr |= AES_MR_CFBS_16b;
else if (dd->flags & AES_FLAGS_CFB32)
valmr |= AES_MR_CFBS_32b;
else if (dd->flags & AES_FLAGS_CFB64)
valmr |= AES_MR_CFBS_64b;
else if (dd->flags & AES_FLAGS_CFB128)
valmr |= AES_MR_CFBS_128b;
} else if (dd->flags & AES_FLAGS_OFB) {
valmr |= AES_MR_OPMOD_OFB;
} else if (dd->flags & AES_FLAGS_CTR) {
valmr |= AES_MR_OPMOD_CTR;
} else {
valmr |= AES_MR_OPMOD_ECB;
}
if (dd->flags & AES_FLAGS_ENCRYPT)
valmr |= AES_MR_CYPHER_ENC;
if (dd->total > ATMEL_AES_DMA_THRESHOLD) {
valmr |= AES_MR_SMOD_IDATAR0;
if (dd->caps.has_dualbuff)
valmr |= AES_MR_DUALBUFF;
} else {
valmr |= AES_MR_SMOD_AUTO;
}
atmel_aes_write(dd, AES_CR, valcr);
atmel_aes_write(dd, AES_MR, valmr);
atmel_aes_write_n(dd, AES_KEYWR(0), dd->ctx->key,
dd->ctx->keylen >> 2);
if (((dd->flags & AES_FLAGS_CBC) || (dd->flags & AES_FLAGS_CFB) ||
(dd->flags & AES_FLAGS_OFB) || (dd->flags & AES_FLAGS_CTR)) &&
dd->req->info) {
atmel_aes_write_n(dd, AES_IVR(0), dd->req->info, 4);
}
return 0;
}
static int atmel_aes_handle_queue(struct atmel_aes_dev *dd,
struct ablkcipher_request *req)
{
struct crypto_async_request *async_req, *backlog;
struct atmel_aes_ctx *ctx;
struct atmel_aes_reqctx *rctx;
unsigned long flags;
int err, ret = 0;
spin_lock_irqsave(&dd->lock, flags);
if (req)
ret = ablkcipher_enqueue_request(&dd->queue, req);
if (dd->flags & AES_FLAGS_BUSY) {
spin_unlock_irqrestore(&dd->lock, flags);
return ret;
}
backlog = crypto_get_backlog(&dd->queue);
async_req = crypto_dequeue_request(&dd->queue);
if (async_req)
dd->flags |= AES_FLAGS_BUSY;
spin_unlock_irqrestore(&dd->lock, flags);
if (!async_req)
return ret;
if (backlog)
backlog->complete(backlog, -EINPROGRESS);
req = ablkcipher_request_cast(async_req);
/* assign new request to device */
dd->req = req;
dd->total = req->nbytes;
dd->in_offset = 0;
dd->in_sg = req->src;
dd->out_offset = 0;
dd->out_sg = req->dst;
rctx = ablkcipher_request_ctx(req);
ctx = crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req));
rctx->mode &= AES_FLAGS_MODE_MASK;
dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode;
dd->ctx = ctx;
ctx->dd = dd;
err = atmel_aes_write_ctrl(dd);
if (!err) {
if (dd->total > ATMEL_AES_DMA_THRESHOLD)
err = atmel_aes_crypt_dma_start(dd);
else
err = atmel_aes_crypt_cpu_start(dd);
}
if (err) {
/* aes_task will not finish it, so do it here */
atmel_aes_finish_req(dd, err);
tasklet_schedule(&dd->queue_task);
}
return ret;
}
static int atmel_aes_crypt_dma_stop(struct atmel_aes_dev *dd)
{
int err = -EINVAL;
size_t count;
if (dd->flags & AES_FLAGS_DMA) {
err = 0;
if (dd->flags & AES_FLAGS_FAST) {
dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE);
dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE);
} else {
dma_sync_single_for_device(dd->dev, dd->dma_addr_out,
dd->dma_size, DMA_FROM_DEVICE);
/* copy data */
count = atmel_aes_sg_copy(&dd->out_sg, &dd->out_offset,
dd->buf_out, dd->buflen, dd->dma_size, 1);
if (count != dd->dma_size) {
err = -EINVAL;
pr_err("not all data converted: %u\n", count);
}
}
}
return err;
}
static int atmel_aes_buff_init(struct atmel_aes_dev *dd)
{
int err = -ENOMEM;
dd->buf_in = (void *)__get_free_pages(GFP_KERNEL, 0);
dd->buf_out = (void *)__get_free_pages(GFP_KERNEL, 0);
dd->buflen = PAGE_SIZE;
dd->buflen &= ~(AES_BLOCK_SIZE - 1);
if (!dd->buf_in || !dd->buf_out) {
dev_err(dd->dev, "unable to alloc pages.\n");
goto err_alloc;
}
/* MAP here */
dd->dma_addr_in = dma_map_single(dd->dev, dd->buf_in,
dd->buflen, DMA_TO_DEVICE);
if (dma_mapping_error(dd->dev, dd->dma_addr_in)) {
dev_err(dd->dev, "dma %d bytes error\n", dd->buflen);
err = -EINVAL;
goto err_map_in;
}
dd->dma_addr_out = dma_map_single(dd->dev, dd->buf_out,
dd->buflen, DMA_FROM_DEVICE);
if (dma_mapping_error(dd->dev, dd->dma_addr_out)) {
dev_err(dd->dev, "dma %d bytes error\n", dd->buflen);
err = -EINVAL;
goto err_map_out;
}
return 0;
err_map_out:
dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen,
DMA_TO_DEVICE);
err_map_in:
err_alloc:
free_page((unsigned long)dd->buf_out);
free_page((unsigned long)dd->buf_in);
if (err)
pr_err("error: %d\n", err);
return err;
}
static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd)
{
dma_unmap_single(dd->dev, dd->dma_addr_out, dd->buflen,
DMA_FROM_DEVICE);
dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen,
DMA_TO_DEVICE);
free_page((unsigned long)dd->buf_out);
free_page((unsigned long)dd->buf_in);
}
static int atmel_aes_crypt(struct ablkcipher_request *req, unsigned long mode)
{
struct atmel_aes_ctx *ctx = crypto_ablkcipher_ctx(
crypto_ablkcipher_reqtfm(req));
struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req);
struct atmel_aes_dev *dd;
if (mode & AES_FLAGS_CFB8) {
if (!IS_ALIGNED(req->nbytes, CFB8_BLOCK_SIZE)) {
pr_err("request size is not exact amount of CFB8 blocks\n");
return -EINVAL;
}
ctx->block_size = CFB8_BLOCK_SIZE;
} else if (mode & AES_FLAGS_CFB16) {
if (!IS_ALIGNED(req->nbytes, CFB16_BLOCK_SIZE)) {
pr_err("request size is not exact amount of CFB16 blocks\n");
return -EINVAL;
}
ctx->block_size = CFB16_BLOCK_SIZE;
} else if (mode & AES_FLAGS_CFB32) {
if (!IS_ALIGNED(req->nbytes, CFB32_BLOCK_SIZE)) {
pr_err("request size is not exact amount of CFB32 blocks\n");
return -EINVAL;
}
ctx->block_size = CFB32_BLOCK_SIZE;
} else if (mode & AES_FLAGS_CFB64) {
if (!IS_ALIGNED(req->nbytes, CFB64_BLOCK_SIZE)) {
pr_err("request size is not exact amount of CFB64 blocks\n");
return -EINVAL;
}
ctx->block_size = CFB64_BLOCK_SIZE;
} else {
if (!IS_ALIGNED(req->nbytes, AES_BLOCK_SIZE)) {
pr_err("request size is not exact amount of AES blocks\n");
return -EINVAL;
}
ctx->block_size = AES_BLOCK_SIZE;
}
dd = atmel_aes_find_dev(ctx);
if (!dd)
return -ENODEV;
rctx->mode = mode;
return atmel_aes_handle_queue(dd, req);
}
static bool atmel_aes_filter(struct dma_chan *chan, void *slave)
{
struct at_dma_slave *sl = slave;
if (sl && sl->dma_dev == chan->device->dev) {
chan->private = sl;
return true;
} else {
return false;
}
}
static int atmel_aes_dma_init(struct atmel_aes_dev *dd,
struct crypto_platform_data *pdata)
{
int err = -ENOMEM;
dma_cap_mask_t mask;
dma_cap_zero(mask);
dma_cap_set(DMA_SLAVE, mask);
/* Try to grab 2 DMA channels */
dd->dma_lch_in.chan = dma_request_slave_channel_compat(mask,
atmel_aes_filter, &pdata->dma_slave->rxdata, dd->dev, "tx");
if (!dd->dma_lch_in.chan)
goto err_dma_in;
dd->dma_lch_in.dma_conf.direction = DMA_MEM_TO_DEV;
dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base +
AES_IDATAR(0);
dd->dma_lch_in.dma_conf.src_maxburst = dd->caps.max_burst_size;
dd->dma_lch_in.dma_conf.src_addr_width =
DMA_SLAVE_BUSWIDTH_4_BYTES;
dd->dma_lch_in.dma_conf.dst_maxburst = dd->caps.max_burst_size;
dd->dma_lch_in.dma_conf.dst_addr_width =
DMA_SLAVE_BUSWIDTH_4_BYTES;
dd->dma_lch_in.dma_conf.device_fc = false;
dd->dma_lch_out.chan = dma_request_slave_channel_compat(mask,
atmel_aes_filter, &pdata->dma_slave->txdata, dd->dev, "rx");
if (!dd->dma_lch_out.chan)
goto err_dma_out;
dd->dma_lch_out.dma_conf.direction = DMA_DEV_TO_MEM;
dd->dma_lch_out.dma_conf.src_addr = dd->phys_base +
AES_ODATAR(0);
dd->dma_lch_out.dma_conf.src_maxburst = dd->caps.max_burst_size;
dd->dma_lch_out.dma_conf.src_addr_width =
DMA_SLAVE_BUSWIDTH_4_BYTES;
dd->dma_lch_out.dma_conf.dst_maxburst = dd->caps.max_burst_size;
dd->dma_lch_out.dma_conf.dst_addr_width =
DMA_SLAVE_BUSWIDTH_4_BYTES;
dd->dma_lch_out.dma_conf.device_fc = false;
return 0;
err_dma_out:
dma_release_channel(dd->dma_lch_in.chan);
err_dma_in:
dev_warn(dd->dev, "no DMA channel available\n");
return err;
}
static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd)
{
dma_release_channel(dd->dma_lch_in.chan);
dma_release_channel(dd->dma_lch_out.chan);
}
static int atmel_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *key,
unsigned int keylen)
{
struct atmel_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
keylen != AES_KEYSIZE_256) {
crypto_ablkcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
return -EINVAL;
}
memcpy(ctx->key, key, keylen);
ctx->keylen = keylen;
return 0;
}
static int atmel_aes_ecb_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT);
}
static int atmel_aes_ecb_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
0);
}
static int atmel_aes_cbc_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CBC);
}
static int atmel_aes_cbc_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CBC);
}
static int atmel_aes_ofb_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_OFB);
}
static int atmel_aes_ofb_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_OFB);
}
static int atmel_aes_cfb_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CFB | AES_FLAGS_CFB128);
}
static int atmel_aes_cfb_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CFB | AES_FLAGS_CFB128);
}
static int atmel_aes_cfb64_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CFB | AES_FLAGS_CFB64);
}
static int atmel_aes_cfb64_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CFB | AES_FLAGS_CFB64);
}
static int atmel_aes_cfb32_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CFB | AES_FLAGS_CFB32);
}
static int atmel_aes_cfb32_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CFB | AES_FLAGS_CFB32);
}
static int atmel_aes_cfb16_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CFB | AES_FLAGS_CFB16);
}
static int atmel_aes_cfb16_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CFB | AES_FLAGS_CFB16);
}
static int atmel_aes_cfb8_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CFB | AES_FLAGS_CFB8);
}
static int atmel_aes_cfb8_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CFB | AES_FLAGS_CFB8);
}
static int atmel_aes_ctr_encrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_ENCRYPT | AES_FLAGS_CTR);
}
static int atmel_aes_ctr_decrypt(struct ablkcipher_request *req)
{
return atmel_aes_crypt(req,
AES_FLAGS_CTR);
}
static int atmel_aes_cra_init(struct crypto_tfm *tfm)
{
tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx);
return 0;
}
static void atmel_aes_cra_exit(struct crypto_tfm *tfm)
{
}
static struct crypto_alg aes_algs[] = {
{
.cra_name = "ecb(aes)",
.cra_driver_name = "atmel-ecb-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0xf,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_ecb_encrypt,
.decrypt = atmel_aes_ecb_decrypt,
}
},
{
.cra_name = "cbc(aes)",
.cra_driver_name = "atmel-cbc-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0xf,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_cbc_encrypt,
.decrypt = atmel_aes_cbc_decrypt,
}
},
{
.cra_name = "ofb(aes)",
.cra_driver_name = "atmel-ofb-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0xf,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_ofb_encrypt,
.decrypt = atmel_aes_ofb_decrypt,
}
},
{
.cra_name = "cfb(aes)",
.cra_driver_name = "atmel-cfb-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0xf,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_cfb_encrypt,
.decrypt = atmel_aes_cfb_decrypt,
}
},
{
.cra_name = "cfb32(aes)",
.cra_driver_name = "atmel-cfb32-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = CFB32_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0x3,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_cfb32_encrypt,
.decrypt = atmel_aes_cfb32_decrypt,
}
},
{
.cra_name = "cfb16(aes)",
.cra_driver_name = "atmel-cfb16-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = CFB16_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0x1,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_cfb16_encrypt,
.decrypt = atmel_aes_cfb16_decrypt,
}
},
{
.cra_name = "cfb8(aes)",
.cra_driver_name = "atmel-cfb8-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = CFB8_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0x0,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_cfb8_encrypt,
.decrypt = atmel_aes_cfb8_decrypt,
}
},
{
.cra_name = "ctr(aes)",
.cra_driver_name = "atmel-ctr-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = AES_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0xf,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_ctr_encrypt,
.decrypt = atmel_aes_ctr_decrypt,
}
},
};
static struct crypto_alg aes_cfb64_alg = {
.cra_name = "cfb64(aes)",
.cra_driver_name = "atmel-cfb64-aes",
.cra_priority = 100,
.cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
.cra_blocksize = CFB64_BLOCK_SIZE,
.cra_ctxsize = sizeof(struct atmel_aes_ctx),
.cra_alignmask = 0x7,
.cra_type = &crypto_ablkcipher_type,
.cra_module = THIS_MODULE,
.cra_init = atmel_aes_cra_init,
.cra_exit = atmel_aes_cra_exit,
.cra_u.ablkcipher = {
.min_keysize = AES_MIN_KEY_SIZE,
.max_keysize = AES_MAX_KEY_SIZE,
.ivsize = AES_BLOCK_SIZE,
.setkey = atmel_aes_setkey,
.encrypt = atmel_aes_cfb64_encrypt,
.decrypt = atmel_aes_cfb64_decrypt,
}
};
static void atmel_aes_queue_task(unsigned long data)
{
struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data;
atmel_aes_handle_queue(dd, NULL);
}
static void atmel_aes_done_task(unsigned long data)
{
struct atmel_aes_dev *dd = (struct atmel_aes_dev *) data;
int err;
if (!(dd->flags & AES_FLAGS_DMA)) {
atmel_aes_read_n(dd, AES_ODATAR(0), (u32 *) dd->buf_out,
dd->bufcnt >> 2);
if (sg_copy_from_buffer(dd->out_sg, dd->nb_out_sg,
dd->buf_out, dd->bufcnt))
err = 0;
else
err = -EINVAL;
goto cpu_end;
}
err = atmel_aes_crypt_dma_stop(dd);
err = dd->err ? : err;
if (dd->total && !err) {
if (dd->flags & AES_FLAGS_FAST) {
dd->in_sg = sg_next(dd->in_sg);
dd->out_sg = sg_next(dd->out_sg);
if (!dd->in_sg || !dd->out_sg)
err = -EINVAL;
}
if (!err)
err = atmel_aes_crypt_dma_start(dd);
if (!err)
return; /* DMA started. Not fininishing. */
}
cpu_end:
atmel_aes_finish_req(dd, err);
atmel_aes_handle_queue(dd, NULL);
}
static irqreturn_t atmel_aes_irq(int irq, void *dev_id)
{
struct atmel_aes_dev *aes_dd = dev_id;
u32 reg;
reg = atmel_aes_read(aes_dd, AES_ISR);
if (reg & atmel_aes_read(aes_dd, AES_IMR)) {
atmel_aes_write(aes_dd, AES_IDR, reg);
if (AES_FLAGS_BUSY & aes_dd->flags)
tasklet_schedule(&aes_dd->done_task);
else
dev_warn(aes_dd->dev, "AES interrupt when no active requests.\n");
return IRQ_HANDLED;
}
return IRQ_NONE;
}
static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd)
{
int i;
for (i = 0; i < ARRAY_SIZE(aes_algs); i++)
crypto_unregister_alg(&aes_algs[i]);
if (dd->caps.has_cfb64)
crypto_unregister_alg(&aes_cfb64_alg);
}
static int atmel_aes_register_algs(struct atmel_aes_dev *dd)
{
int err, i, j;
for (i = 0; i < ARRAY_SIZE(aes_algs); i++) {
err = crypto_register_alg(&aes_algs[i]);
if (err)
goto err_aes_algs;
}
if (dd->caps.has_cfb64) {
err = crypto_register_alg(&aes_cfb64_alg);
if (err)
goto err_aes_cfb64_alg;
}
return 0;
err_aes_cfb64_alg:
i = ARRAY_SIZE(aes_algs);
err_aes_algs:
for (j = 0; j < i; j++)
crypto_unregister_alg(&aes_algs[j]);
return err;
}
static void atmel_aes_get_cap(struct atmel_aes_dev *dd)
{
dd->caps.has_dualbuff = 0;
dd->caps.has_cfb64 = 0;
dd->caps.max_burst_size = 1;
/* keep only major version number */
switch (dd->hw_version & 0xff0) {
case 0x130:
dd->caps.has_dualbuff = 1;
dd->caps.has_cfb64 = 1;
dd->caps.max_burst_size = 4;
break;
case 0x120:
break;
default:
dev_warn(dd->dev,
"Unmanaged aes version, set minimum capabilities\n");
break;
}
}
#if defined(CONFIG_OF)
static const struct of_device_id atmel_aes_dt_ids[] = {
{ .compatible = "atmel,at91sam9g46-aes" },
{ /* sentinel */ }
};
MODULE_DEVICE_TABLE(of, atmel_aes_dt_ids);
static struct crypto_platform_data *atmel_aes_of_init(struct platform_device *pdev)
{
struct device_node *np = pdev->dev.of_node;
struct crypto_platform_data *pdata;
if (!np) {
dev_err(&pdev->dev, "device node not found\n");
return ERR_PTR(-EINVAL);
}
pdata = devm_kzalloc(&pdev->dev, sizeof(*pdata), GFP_KERNEL);
if (!pdata) {
dev_err(&pdev->dev, "could not allocate memory for pdata\n");
return ERR_PTR(-ENOMEM);
}
pdata->dma_slave = devm_kzalloc(&pdev->dev,
sizeof(*(pdata->dma_slave)),
GFP_KERNEL);
if (!pdata->dma_slave) {
dev_err(&pdev->dev, "could not allocate memory for dma_slave\n");
devm_kfree(&pdev->dev, pdata);
return ERR_PTR(-ENOMEM);
}
return pdata;
}
#else
static inline struct crypto_platform_data *atmel_aes_of_init(struct platform_device *pdev)
{
return ERR_PTR(-EINVAL);
}
#endif
static int atmel_aes_probe(struct platform_device *pdev)
{
struct atmel_aes_dev *aes_dd;
struct crypto_platform_data *pdata;
struct device *dev = &pdev->dev;
struct resource *aes_res;
unsigned long aes_phys_size;
int err;
pdata = pdev->dev.platform_data;
if (!pdata) {
pdata = atmel_aes_of_init(pdev);
if (IS_ERR(pdata)) {
err = PTR_ERR(pdata);
goto aes_dd_err;
}
}
if (!pdata->dma_slave) {
err = -ENXIO;
goto aes_dd_err;
}
aes_dd = kzalloc(sizeof(struct atmel_aes_dev), GFP_KERNEL);
if (aes_dd == NULL) {
dev_err(dev, "unable to alloc data struct.\n");
err = -ENOMEM;
goto aes_dd_err;
}
aes_dd->dev = dev;
platform_set_drvdata(pdev, aes_dd);
INIT_LIST_HEAD(&aes_dd->list);
tasklet_init(&aes_dd->done_task, atmel_aes_done_task,
(unsigned long)aes_dd);
tasklet_init(&aes_dd->queue_task, atmel_aes_queue_task,
(unsigned long)aes_dd);
crypto_init_queue(&aes_dd->queue, ATMEL_AES_QUEUE_LENGTH);
aes_dd->irq = -1;
/* Get the base address */
aes_res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
if (!aes_res) {
dev_err(dev, "no MEM resource info\n");
err = -ENODEV;
goto res_err;
}
aes_dd->phys_base = aes_res->start;
aes_phys_size = resource_size(aes_res);
/* Get the IRQ */
aes_dd->irq = platform_get_irq(pdev, 0);
if (aes_dd->irq < 0) {
dev_err(dev, "no IRQ resource info\n");
err = aes_dd->irq;
goto aes_irq_err;
}
err = request_irq(aes_dd->irq, atmel_aes_irq, IRQF_SHARED, "atmel-aes",
aes_dd);
if (err) {
dev_err(dev, "unable to request aes irq.\n");
goto aes_irq_err;
}
/* Initializing the clock */
aes_dd->iclk = clk_get(&pdev->dev, "aes_clk");
if (IS_ERR(aes_dd->iclk)) {
dev_err(dev, "clock intialization failed.\n");
err = PTR_ERR(aes_dd->iclk);
goto clk_err;
}
aes_dd->io_base = ioremap(aes_dd->phys_base, aes_phys_size);
if (!aes_dd->io_base) {
dev_err(dev, "can't ioremap\n");
err = -ENOMEM;
goto aes_io_err;
}
atmel_aes_hw_version_init(aes_dd);
atmel_aes_get_cap(aes_dd);
err = atmel_aes_buff_init(aes_dd);
if (err)
goto err_aes_buff;
err = atmel_aes_dma_init(aes_dd, pdata);
if (err)
goto err_aes_dma;
spin_lock(&atmel_aes.lock);
list_add_tail(&aes_dd->list, &atmel_aes.dev_list);
spin_unlock(&atmel_aes.lock);
err = atmel_aes_register_algs(aes_dd);
if (err)
goto err_algs;
dev_info(dev, "Atmel AES - Using %s, %s for DMA transfers\n",
dma_chan_name(aes_dd->dma_lch_in.chan),
dma_chan_name(aes_dd->dma_lch_out.chan));
return 0;
err_algs:
spin_lock(&atmel_aes.lock);
list_del(&aes_dd->list);
spin_unlock(&atmel_aes.lock);
atmel_aes_dma_cleanup(aes_dd);
err_aes_dma:
atmel_aes_buff_cleanup(aes_dd);
err_aes_buff:
iounmap(aes_dd->io_base);
aes_io_err:
clk_put(aes_dd->iclk);
clk_err:
free_irq(aes_dd->irq, aes_dd);
aes_irq_err:
res_err:
tasklet_kill(&aes_dd->done_task);
tasklet_kill(&aes_dd->queue_task);
kfree(aes_dd);
aes_dd = NULL;
aes_dd_err:
dev_err(dev, "initialization failed.\n");
return err;
}
static int atmel_aes_remove(struct platform_device *pdev)
{
static struct atmel_aes_dev *aes_dd;
aes_dd = platform_get_drvdata(pdev);
if (!aes_dd)
return -ENODEV;
spin_lock(&atmel_aes.lock);
list_del(&aes_dd->list);
spin_unlock(&atmel_aes.lock);
atmel_aes_unregister_algs(aes_dd);
tasklet_kill(&aes_dd->done_task);
tasklet_kill(&aes_dd->queue_task);
atmel_aes_dma_cleanup(aes_dd);
iounmap(aes_dd->io_base);
clk_put(aes_dd->iclk);
if (aes_dd->irq > 0)
free_irq(aes_dd->irq, aes_dd);
kfree(aes_dd);
aes_dd = NULL;
return 0;
}
static struct platform_driver atmel_aes_driver = {
.probe = atmel_aes_probe,
.remove = atmel_aes_remove,
.driver = {
.name = "atmel_aes",
.of_match_table = of_match_ptr(atmel_aes_dt_ids),
},
};
module_platform_driver(atmel_aes_driver);
MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
MODULE_LICENSE("GPL v2");
MODULE_AUTHOR("<NAME> - Eukrรฉa Electromatique");
|
cfsandoval/chartjs_con_ios | test/tabris/util-fonts.test.js | import {expect} from '../test';
import {fontStringToObject, fontObjectToString} from '../../src/tabris/util-fonts';
describe('util-fonts', function() {
describe('fontStringToObject', function() {
let parse = function(str) {
return fontStringToObject(str);
};
let parsing = function(str) {
return function() {
return fontStringToObject(str);
};
};
it('parses valid sizes', function() {
expect(parse('12px').size).to.equal(12);
expect(parse('12px 20px').size).to.equal(12);
expect(parse('8px ').size).to.equal(8);
expect(parse(' 18px').size).to.equal(18);
expect(parse(' 50px ').size).to.equal(50);
expect(parse('12px').size).to.equal(12);
expect(parse('italic 12px').size).to.equal(12);
expect(parse('bold italic 12px').size).to.equal(12);
expect(parse('12px Arial, Fantasy').size).to.equal(12);
expect(parse("12px 'Times New Roman', Arial").size).to.equal(12);
expect(parse('12px "Times New Roman", Arial').size).to.equal(12);
});
it('throws error for strings without valid size', function() {
expect(parsing('12pxf')).to.throw();
expect(parsing('12p x')).to.throw();
expect(parsing('-1px')).to.throw();
expect(parsing('foo13px')).to.throw();
expect(parsing('8 px ')).to.throw();
expect(parsing(' 18pt')).to.throw();
expect(parsing(' px ')).to.throw();
expect(parsing('23')).to.throw();
});
it('parses valid styles', function() {
expect(parse('italic 12px').style).to.equal('italic');
expect(parse('bold italic 12px').style).to.equal('italic');
expect(parse('italic bold 12px').style).to.equal('italic');
expect(parse('italic bold 12px Arial, Times').style).to.equal('italic');
expect(parse('normal normal 12px').style).to.equal('normal');
expect(parse('bold normal 12px').style).to.equal('normal');
expect(parse('normal 12px').style).to.equal('normal');
expect(parse('12px').style).to.equal('normal');
expect(parse('12px italic').style).to.equal('normal');
});
it('parses valid weight', function() {
expect(parse('bold 12px').weight).to.equal('bold');
expect(parse('black 12px').weight).to.equal('black');
expect(parse('light italic 12px').weight).to.equal('light');
expect(parse(' italic thin 12px').weight).to.equal('thin');
expect(parse(' italic medium 12px Arial, Times').weight).to.equal('medium');
expect(parse('normal normal 12px').weight).to.equal('normal');
expect(parse('italic normal 12px').weight).to.equal('normal');
expect(parse('normal 12px').weight).to.equal('normal');
expect(parse('12px').weight).to.equal('normal');
expect(parse('12px bold').weight).to.equal('normal');
});
it('throws error for strings with invalid styles', function() {
expect(parsing('bold-italic 12px')).to.throw();
expect(parsing('bold.italic 12px')).to.throw();
expect(parsing('bold bold 12px')).to.throw();
expect(parsing('italic italic 12px')).to.throw();
expect(parsing('bold italic normal 12px')).to.throw();
expect(parsing('normal normal normal 12px')).to.throw();
expect(parsing('bold0italic 12px')).to.throw();
expect(parsing('foobar 12px')).to.throw();
expect(parsing('12px foobar')).not.to.throw();
});
it('parses valid font families', function() {
expect(parse('12px ').family).to.eql(['']);
expect(parse('12px Arial').family).to.eql(['Arial']);
expect(parse('bold italic 12px Arial').family).to.eql(['Arial']);
expect(parse('12px Arial, Fantasy').family).to.eql(['Arial', 'Fantasy']);
expect(parse('12px Times New Roman,Fantasy').family).to.eql(['Times New Roman', 'Fantasy']);
expect(parse('12px Arial , Fantasy').family).to.eql(['Arial', 'Fantasy']);
expect(parse('12px bold italic').family).to.eql(['bold italic']);
expect(parse('12px Arial, Times New Roman ,Fantasy').family)
.to.eql(['Arial', 'Times New Roman', 'Fantasy']);
expect(parse('12px \' Arial \', "Times New Roman",Fantasy').family)
.to.eql(['Arial', 'Times New Roman', 'Fantasy']);
});
it('throws error for strings with invalid family syntax', function() {
expect(parsing('12px Arial "Times New Roman", Fantasy')).to.throw();
expect(parsing('12px Arial "Times New Roman", Fantasy,')).to.throw();
expect(parsing('12px\'Arial\', "Times New Roman", Fantasy')).to.throw();
expect(parsing('12px Arial, "Times New Roman\', Fantasy')).to.throw();
expect(parsing('12px Arial, foo "Times New Roman", Fantasy')).to.throw();
expect(parsing('12px Arial, "Times New Roman" bar, Fantasy')).to.throw();
expect(parsing('12px Ar\'ial, "Times New Roman", Fantasy')).to.throw();
expect(parsing('12px Arial, Times New Roman", Fantasy')).to.throw();
expect(parsing('12px Arial, "Times New Roman, Fantasy')).to.throw();
expect(parsing('12px Arial,, Fantasy')).to.throw();
});
});
describe('fontObjectToString', function() {
let decode = function(arr) {
return fontObjectToString(arr);
};
it('creates string from object', function() {
expect(decode({family: ['Arial'], size: 12, weight: 'normal', style: 'normal'}))
.to.equal('normal normal 12px Arial');
expect(decode({
family: ['Arial', 'Times New Roman'],
size: 12,
weight: 'normal',
style: 'normal'
})).to.equal('normal normal 12px Arial, Times New Roman');
expect(decode({family: [''], size: 12, weight: 'normal', style: 'normal'}))
.to.equal('normal normal 12px');
expect(decode({family: [''], size: 12, weight: 'bold', style: 'normal'}))
.to.equal('normal bold 12px');
expect(decode({family: [''], size: 12, weight: 'normal', style: 'italic'}))
.to.equal('italic normal 12px');
expect(decode({family: [''], size: 12, weight: 'thin', style: 'italic'}))
.to.equal('italic thin 12px');
expect(decode({family: ['Arial'], size: 12, weight: 'medium', style: 'italic'}))
.to.equal('italic medium 12px Arial');
});
});
});
|
Lockyz-Dev/JoiBoi | commands/kick.js | <reponame>Lockyz-Dev/JoiBoi<filename>commands/kick.js
const { embedColor } = require("../info.js");
const { MessageEmbed } = require("discord.js");
const { noBotPerms } = require("../utils/errors");
exports.run = async (client, message, args) => {
let perms = message.guild.me.permissions;
if (!perms.has("KICK_MEMBERS")) return noBotPerms(message, "KICK_MEMBERS");
const table2 = sql.prepare("SELECT count(*) FROM sqlite_master WHERE type='table' AND name = 'roleSettings';").get();
client.getroleSet = sql.prepare("SELECT * FROM roleSettings WHERE guildID = ?");
client.setroleSet = sql.prepare("INSERT OR REPLACE INTO roleSettings (guildID, adminID, modID, muteID, autoID) VALUES (@guildID, @adminID, @modID, @muteID, @autoID);");
let roleSet;
roleSet = client.getroleSet.get(guild.id);
if(message.member.hasPermission("KICK_MEMBERS") || message.member.hasPermission("ADMINISTRATOR") || message.member.roles.has(roleSet.adminID)) {
let member = message.mentions.members.first() || message.guild.members.cache.get(args[0]);
if(!member)
return message.channel.send("Please mention a valid member of this server")
if(!member.kickable)
return message.channel.send("I cannot kick this user! Do they have a higher role? Do I have kick permissions?")
let reason = args.slice(1).join(" ");
if(!reason) reason = "No reason provided";
member.kick(reason)
.catch(error => message.channel.send(`Sorry ${message.author} I couldn"t kick because of : ${error}`)
);
message.channel.send(`${member} has been kicked by **${message.author.username}** for \`${reason}\``)
const statsEmbed = new MessageEmbed()
.setAuthor(`Kick | ${member.user.tag}`, member.user.displayAvatarURL())
.setColor(embedColor)
.addField(`Kicked By:`, message.author, true)
.addField(`Reason:`, reason, true)
.setFooter(`ID: ${member.id}`)
.setTimestamp();
client.channels.cache.get(`697496140563742731`).send(statsEmbed);
}
else {
message.channel.send("You don\'t have permission to use this command");
}
};
exports.help = {
name: "kick",
aliases: [],
description: "Kick user for a specified reason.",
usage: "kick {user} [Reason]",
category: "moderation",
datause: "false"
}; |
gabrielmbs/Tamburetei | prog2/implementacoes/comparable/Main.java | <reponame>gabrielmbs/Tamburetei<filename>prog2/implementacoes/comparable/Main.java
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Classe principal para exemplificar comparable em Java.
*
* @author <NAME>
*/
public class Main {
public static void main(String[] args) {
Filme filme1 = new Filme("Homem-Aranha", 2002);
Filme filme2 = new Filme("Joรฃo e Maria: caรงadores de bruxas", 2013);
Filme filme3 = new Filme("A volta dos que nรฃo foram", 2002);
List<Filme> filmes = new ArrayList<>();
filmes.add(filme1);
filmes.add(filme2);
filmes.add(filme3);
// Como cada filme implementa comparable, vai ordenar pelo ano de lanรงamento.
Collections.sort(filmes);
// Imprimindo o array ordenado.
System.out.println(Arrays.toString(filmes.toArray()));
}
}
|
Cocopyth/foodshare | foodshare/handlers/cook_conversation/conclusion_selection.py | <reponame>Cocopyth/foodshare<filename>foodshare/handlers/cook_conversation/conclusion_selection.py
from telegram import InlineKeyboardButton, InlineKeyboardMarkup, ParseMode
from telegram.ext import ConversationHandler
from foodshare.bdd.database_communication import (
add_meal,
get_user_from_chat_id,
)
from foodshare.handlers.cook_conversation import ConversationStage, get_message
from foodshare.handlers.start_conversation.first_message import first_message
from foodshare.job_manager.meal_manager import handle_meals
from foodshare.keyboards.confirmation_keyboard import confirmation_keyboard
from foodshare.utils.gif_test import get_gif_url
buttons = [
[InlineKeyboardButton(text='Confirm', callback_data='confirm')],
[InlineKeyboardButton(text='Modify some infos', callback_data='modify')],
]
last_keyboard = InlineKeyboardMarkup(buttons)
def ask_for_conclusion(update, context, highlight=None):
ud = context.user_data
query = update.callback_query
ud['last_query'] = query
epilog = (
'Now I will send a message to people if you want'
+ ' to add a text message just send it to me. '
+ 'Press confirm when you\'re ready!'
)
context.user_data['confirmation_stage'] = True
text = get_message(context, epilog=epilog, highlight=highlight)
if (
update.message is None
): # reply doesn't work if there is no message to reply to
update.callback_query.edit_message_text(
text=text,
reply_markup=last_keyboard,
parse_mode=ParseMode.MARKDOWN,
)
else:
update.message.reply_text(
text=text,
reply_markup=last_keyboard,
parse_mode=ParseMode.MARKDOWN,
)
return ConversationStage.CONFIRMATION
def additional_message(update, context):
bot = context.bot
ud = context.user_data
ud['message2others'] = update.message.text
bot.deleteMessage(update.message.chat_id, update.message.message_id)
query = ud['last_query']
epilog = (
'Now I will send a message to people if you want'
+ ' to add a text message just send it to me. '
+ 'Press confirm when you\'re ready!'
)
text = get_message(context, epilog=epilog)
bot.edit_message_text(
text=text,
chat_id=query.message.chat_id,
message_id=query.message.message_id,
reply_markup=last_keyboard,
parse_mode=ParseMode.MARKDOWN,
)
return ConversationStage.CONFIRMATION
def end(update, context):
# sticker_id = (
# 'CAACAgIAAxkBAAIJNF6N7Cj5oZ7qs9hrRce8HdLTn'
# '7FdAAKcAgACa8TKChTuhP744omRGAQ'
# ) # Lazybone ID
bot = context.bot
chat_id = context.user_data['chat_id']
ud = context.user_data
bot.deleteMessage(chat_id, ud['last_message'].message_id)
who_cooks = get_user_from_chat_id(chat_id)
gif_url = get_gif_url(ud['meal_name'])
if gif_url is not None:
bot.send_document(chat_id=chat_id, document=gif_url)
add_meal(who_cooks, ud, gif_url)
# bot.send_sticker(chat_id, sticker_id)
ud.clear()
prefix = f'Messages sent : I will update you on the answers \n'
while not handle_meals():
pass
first_message(update, context, prefix=prefix)
return ConversationHandler.END
def modify_infos(update, context):
ud = context.user_data
query = update.callback_query
ud['last_query'] = query
epilog = 'Chose what information you want to modify!'
context.user_data['confirmation_stage'] = True
text = get_message(context, epilog=epilog)
if (
update.message is None
): # reply doesn't work if there is no message to reply to
update.callback_query.edit_message_text(
text=text,
reply_markup=confirmation_keyboard,
parse_mode=ParseMode.MARKDOWN,
)
else:
update.message.reply_text(
text=text,
reply_markup=confirmation_keyboard,
parse_mode=ParseMode.MARKDOWN,
)
return ConversationStage.MODIFICATION
|
wyf0926/car_dev | src/main/java/io/renren/common/utils/SequenceService.java | package io.renren.common.utils;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.support.atomic.RedisAtomicLong;
import org.springframework.stereotype.Service;
import javax.annotation.Resource;
import java.util.Date;
import java.util.concurrent.TimeUnit;
/**
* @author allan
*/
@Service
public class SequenceService {
@Resource
private RedisTemplate<String, Long> redisTemplate;
public String getID(ID_Prefix prefix) {
StringBuilder sb = new StringBuilder();
switch (prefix) {
case NULL:
return "";
default:
sb.append(prefix.getValue());
sb.append(new Date().getTime());
sb.append(getSuffix(prefix.name()));
return sb.toString();
}
}
private String getSuffix(String key) {
StringBuilder seq = new StringBuilder(getSequence(key).toString());
while (seq.length() < 4) {
seq.insert(0, "0");
}
return seq.toString();
}
public Long getSequence(String key) {
return getSequence(key, 1, 1);
}
public Long getSequence(String key, int increment) {
return getSequence(key, increment, 1);
}
public Long getSequence(String key, int increment, long expire) {
RedisAtomicLong counter = new RedisAtomicLong(key, redisTemplate.getConnectionFactory());
counter.getAndAdd(increment);
if (expire > 0) {
counter.expire(expire, TimeUnit.SECONDS);
}
return counter.longValue();
}
public enum ID_Prefix {
NULL(""),
ORDER("WX");
private final String value;
ID_Prefix(String value) {
this.value = value;
}
public static ID_Prefix getTablePrefix(String value) {
for (ID_Prefix status : ID_Prefix.values()) {
if (status.toString().equalsIgnoreCase(value)) {
return status;
}
}
return ID_Prefix.NULL;
}
public String getValue() {
return this.value;
}
}
}
|
fakeNetflix/facebook-repo-conceal | first-party/soloader/Elf64_Phdr.java | <reponame>fakeNetflix/facebook-repo-conceal<filename>first-party/soloader/Elf64_Phdr.java
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
// AUTOMATICALLY GENERATED CODE. Regenerate with genstructs.sh.
package com.facebook.soloader;
final class Elf64_Phdr {
public static final int p_type = 0x0;
public static final int p_flags = 0x4;
public static final int p_offset = 0x8;
public static final int p_vaddr = 0x10;
public static final int p_paddr = 0x18;
public static final int p_filesz = 0x20;
public static final int p_memsz = 0x28;
public static final int p_align = 0x30;
}
|
The0x539/wasp | libc/newlib/libm/machine/pru/isfinite.c | <filename>libc/newlib/libm/machine/pru/isfinite.c
/* SPDX-License-Identifier: BSD-2-Clause-FreeBSD
*
* Copyright (c) 2018-2019 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <math.h>
/* GCC will not generate code calling this function, since the corresponding
builtin will produce code that uses simple ops only. In order to support
linking against TI CLPRU objects, though, provide the function mandated
by TI ABI. */
int __pruabi_isfinite(double a)
{
return isfinite(a);
}
|
vinceh121/powercord | src/fake_node_modules/powercord/components/AsyncComponent.js | <gh_stars>10-100
const { React, getModule, getModuleByDisplayName } = require('powercord/webpack');
module.exports = class AsyncComponent extends React.PureComponent {
constructor (props) {
super(props);
this.state = {
Component: null
};
}
async componentDidMount () {
this.setState({
Component: await this.props._provider()
});
}
render () {
const { Component } = this.state;
if (Component) {
return React.createElement(Component, Object.assign({}, this.props, this.props._pass));
}
return this.props._fallback || null;
}
/**
* Creates an AsyncComponent from a promise
* @param {Promise} promise Promise of a React component
*/
static from (promise, fallback) {
return React.memo(
(props) => React.createElement(AsyncComponent, {
_provider: () => promise,
_fallback: fallback,
...props
})
);
}
static fromDisplayName (displayName, fallback) {
return AsyncComponent.from(getModuleByDisplayName(displayName), fallback);
}
static fromModule (filter, fallback) {
return AsyncComponent.from(getModule(filter), fallback);
}
static fromModuleProp (filter, prop, fallback) {
return AsyncComponent.from((async () => (await getModule(filter))[prop])(), fallback);
}
};
|
huluobo11/demo-collection | webService01_Client/src/main/java/com/ssm/webservice/service/package-info.java | <reponame>huluobo11/demo-collection<filename>webService01_Client/src/main/java/com/ssm/webservice/service/package-info.java
@javax.xml.bind.annotation.XmlSchema(namespace = "http://service.webService.ssm.com/")
package com.ssm.webservice.service;
|
ManonGros/colplus-backend | colplus-dao/src/main/java/org/col/db/type2/HstoreIssueCountTypeHandler.java | <filename>colplus-dao/src/main/java/org/col/db/type2/HstoreIssueCountTypeHandler.java
package org.col.db.type2;
import org.col.api.vocab.Issue;
public class HstoreIssueCountTypeHandler extends HstoreEnumCountTypeHandlerBase<Issue> {
public HstoreIssueCountTypeHandler() {
super(Issue.class);
}
}
|
pazamelin/openvino | thirdparty/fluid/modules/gapi/test/common/gapi_stereo_tests.hpp | <filename>thirdparty/fluid/modules/gapi/test/common/gapi_stereo_tests.hpp
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2021 Intel Corporation
#ifndef OPENCV_GAPI_STEREO_TESTS_HPP
#define OPENCV_GAPI_STEREO_TESTS_HPP
#include <opencv2/gapi/stereo.hpp> // fore cv::gapi::StereoOutputFormat
#include "gapi_tests_common.hpp"
#include "gapi_parsers_tests_common.hpp"
namespace opencv_test
{
GAPI_TEST_FIXTURE(TestGAPIStereo, initMatsRandU, FIXTURE_API(cv::gapi::StereoOutputFormat, int, int, double, double, CompareMats), 6,
oF, numDisparities, blockSize, baseline,
focus, cmpF)
} // namespace opencv_test
#endif // OPENCV_GAPI_STEREO_TESTS_HPP
|
dllen/WeChatMina | wechat-engine/src/main/java/edu/buaa/scse/niu/wechat/engine/mina/codec/RespondMegEncoder.java | <reponame>dllen/WeChatMina
package edu.buaa.scse.niu.wechat.engine.mina.codec;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import org.apache.mina.core.buffer.IoBuffer;
import edu.buaa.scse.niu.wechat.engine.entity.ChatMessageType.MessageType;
import edu.buaa.scse.niu.wechat.engine.entity.msg.RespondMessage;
public class RespondMegEncoder extends JsonEncoder<RespondMessage> {
public RespondMegEncoder() {
super();
}
public RespondMegEncoder(Charset charset) {
super(charset);
}
@Override
public byte[] encode(RespondMessage data) {
fixNullField(data);
IoBuffer buffer = IoBuffer.allocate(100, false);
buffer.setAutoExpand(true);
try {
buffer.putString(data.getMsgId(), 36, encoder);
buffer.putEnum(data.getType());
} catch (CharacterCodingException e) {
e.printStackTrace();
}
buffer.flip();
byte[] ret = new byte[buffer.limit()];
buffer.get(ret);
return ret;
}
@Override
public void fixNullField(RespondMessage data) {
if(data.getMsgId()==null){
data.setMsgId("null id");
}
if(data.getType()==null){
data.setType(MessageType.TEXT);
}
}
}
|
MattHahnDesign/fetch-it | src/attributes/preRequest.js | // Validators
import isFunction from '../validators/isFunction';
/**
* @description :: Preparing the preRequest function
* @param {any} preRequest :: The preRequest that needs to be prepared
* @return {any} :: Prepared preRequest
*/
export const checkPreRequest = (preRequest) => {
if (!isFunction(preRequest)) {
console.warn(
`Expected a function for preRequest but got ${typeof preRequest}`
);
return null;
}
return preRequest;
};
/**
* @description :: Preparing the preRequest for the fetch execution
* @param {any} profilePreRequest :: The preRequest that was set in the profile
* @param {any} preRequest :: The preRequest that was set in in an individual fetch
* @return {any} :: existing preRequest
*/
export const preparePreRequest = (profilePreRequest, preRequest) => {
if (!!preRequest) return checkPreRequest(preRequest);
if (!!profilePreRequest) return checkPreRequest(profilePreRequest);
return null;
};
|
combet/CLstack2mass | pzmassfitter/bashreader.py | <filename>pzmassfitter/bashreader.py<gh_stars>10-100
#!/usr/bin/env python
######################
# @file bashreader.py
# @author <NAME>
# @date 2/26/08
#
# @brief Interprets simple bash scripts to parse them for variables
# This way python can share existing config files.
#
# This is meant as a library. Test routines will run if the script is executed.
########################
from __future__ import with_statement
import unittest
import re
import datetime
import os
class DoesNotParseException(Exception):
pass
######################################################
class BashConfig(dict):
'''Reads a simple bash file and returns a dictionary-interface
object containing the bash variables set in the script.
It will also process any bash export statements.
'''
def __init__(self, bashstr=None):
self.parse(bashstr)
###################
def __getattr__(self, attr):
try:
return self[attr]
except KeyError:
raise AttributeError('Unknown Variable: %s' % attr)
###################
def _parseInt(self, bashstr):
if re.match(r'^(\d+)$', bashstr):
return int(bashstr)
raise DoesNotParseException
###################
def _parseFloat(self, bashstr):
if re.match(r'^[-+]?(\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?$', bashstr):
return float(bashstr)
raise DoesNotParseException
###################
_variableRE = r'\${?(\w+)(?:\[(\d+)\])?}?'
def _parseVar(self, bashstr):
match = re.match(r'^%s$' % BashConfig._variableRE, bashstr)
if match is None:
raise DoesNotParseException
return self._replaceVar(match)
###################
def _replaceVar(self, match):
varname = match.group(1)
if str.lower(varname) in self:
val = getattr(self, str.lower(varname))
elif varname in os.environ:
val = os.environ[varname]
else:
raise NameError
index = match.group(2)
if index is None:
return val
return val[int(index)]
###
def _parseString(self, bashstr):
###
def subVars(match):
return str(self._replaceVar(match))
###
strippedQuotes = bashstr.strip('"\'')
return re.sub(BashConfig._variableRE, subVars,
strippedQuotes)
##################
def _parseTime(self, bashstr):
match = re.match(r'(\d+):(\d+):(\d+)', bashstr)
if match is not None:
hours = int(match.group(1))
min = int(match.group(2))
sec = int(match.group(3))
return datetime.time(hours, min, sec)
raise DoesNotParseException
###################
def _parseArray(self, bashstr):
match = re.match(r'\((.+)\)', bashstr)
if match is None:
raise DoesNotParseException
arrayDef = match.group(1)
return [self._parseAtomic(x) for x in arrayDef.split()]
###################
def _parseDict(self, bashstr):
entries = re.findall(r'\[(\d+)\]\s*=\s*(.+?)[\s\)]', bashstr)
if len(entries) == 0:
raise DoesNotParseException
return dict([(int(x), self._parseAtomic(y)) for x, y in entries])
##################
def _parseEval(self, bashstr):
match = re.match(r'\$\(\((.+)\)\)', bashstr)
if match is None:
raise DoesNotParseException
toEval = self._parseString(match.group(1))
return eval(toEval)
##################
def _parseAtomic(self, bashstr):
return self._runParsers(bashstr, parsers=[self._parseInt,
self._parseFloat,
self._parseVar,
self._parseString])
##################
def _runParsers(self, bashstr, parsers):
for parser in parsers:
try:
return parser(bashstr)
except DoesNotParseException:
continue
##################
def _parseExport(self, bashstr):
match = re.match(r'export ((\w+)(=.+)?)', bashstr)
if match is None:
raise DoesNotParseException
if match.group(3) is not None:
self._parseAssignment(match.group(1))
attr = match.group(2)
os.environ[attr] = str(getattr(self, str.lower(attr)))
##################
def _parseAssignment(self, bashstr):
match = re.match(r'(\w+)(?:\[(\w+)\])?=(.+)', bashstr)
if match is None:
raise DoesNotParseException
attr = str.lower(str.strip(match.group(1)))
rawIndex = match.group(2)
rawVal = str.strip(match.group(3))
valParsers = [self._parseInt,
self._parseFloat,
self._parseVar,
self._parseTime,
self._parseEval,
self._parseDict,
self._parseArray,
self._parseString]
val = self._runParsers(rawVal, valParsers)
if rawIndex is None:
self[attr] = val
else:
index = int(rawIndex)
if attr in self:
self[attr][index] = val
else:
self[attr] = {index: val}
###################
def _parseReadFile(self, bashstr):
match = re.match(r'\. (.+)', bashstr)
if match is None:
raise DoesNotParseException
filename = match.group(1)
self.parseFile(filename)
###################
def _parseSemicolon(self, bashstr):
substatements = bashstr.split(';')
if len(substatements) == 1:
raise DoesNotParseException
for substatement in substatements:
self._parseLine(substatement)
###################
def _parseLine(self, bashstr):
lineParsers = [self._parseSemicolon,
self._parseReadFile,
self._parseAssignment,
self._parseExport]
self._runParsers(bashstr.strip(), lineParsers)
###################
def parse(self, bashstr):
if bashstr is None:
return
lines = map(str.strip, bashstr.splitlines())
for line in lines:
self._parseLine(line)
###################
def parseFile(self, filename):
with open(filename) as input:
for line in input:
self._parseLine(line)
#######################################################
#######################################################
######################
# USER METHODS
##############
def parse(bashstr):
result = BashConfig(bashstr)
return result
#######################################################
def parseFile(filename):
result = BashConfig()
result.parseFile(filename)
return result
######################################################
######################################################
############################
# TESTING CLASSES
##################
class TestParseBash(unittest.TestCase):
def testReadIntVar(self):
config = parse('NFRAMES=20')
self.assertEquals(config.nframes, 20)
self.assertEquals(type(config.nframes), type(5))
#############
def testReadFloat(self):
config = parse('OBSLAT=19.82861111')
self.assertAlmostEquals(config.obslat, 19.82861111, 8)
#############
def testReadStr(self):
config = parse('INSTRUMENT=SUBARU')
self.assertEquals(config.instrument, 'SUBARU')
#############
def testReadTime(self):
config = parse('REFERENCETIME=22:00:00')
self.assertEquals(config.referencetime, datetime.time(22, 0, 0))
##############
def testReadIntIntMap(self):
config = parse(
'OVSCANX1=([6]=1 [7]=1 [3]=1 [4]=1 [9]=1 [8]=2055)')
self.assertEquals(config.ovscanx1, {
6: 1, 7: 1, 3: 1, 4: 1, 9: 1, 8: 2055})
##############
def testReadMultiplelines(self):
bash = '''OBSLAT=19.82861111
OBSLONG=155.48055556
REFERENCETIME=22:00:00
'''
config = parse(bash)
self.assertAlmostEquals(config.obslat, 19.82861111, 8)
self.assertAlmostEquals(config.obslong, 155.48055556, 8)
self.assertEquals(config.referencetime, datetime.time(22, 0, 0))
##############
def testReadReference(self):
bash = '''REF=5
TEST=$REF
TEST2=${REF}
'''
config = parse(bash)
self.assertEquals(config.ref, 5)
self.assertEquals(config.test, 5)
self.assertEquals(config.test2, 5)
##############
def testReadReferenceInConcat(self):
bash = '''BIN=/home/bin
P_READLINK=${BIN}/readlink'''
config = parse(bash)
self.assertEquals(config.bin, '/home/bin')
self.assertEquals(config.p_readlink, '/home/bin/readlink')
###############
def testBadReference(self):
self.assertRaises(NameError, parse, 'TEST=$BAD')
##############
def testEnvReference(self):
config = parse('TEST=$HOME')
self.assertEquals(config.test, os.environ['HOME'])
##############
def testMapRef(self):
bash = '''STATSALLIM=([1]=1000 [2]=2000 [3]=1000 [4]=1000)
TEST=$STATSALLIM[1]
TEST2=${STATSALLIM[1]}'''
config = parse(bash)
self.assertEquals(config.statsallim, {
1: 1000, 2: 2000, 3: 1000, 4: 1000})
self.assertEquals(config.test, 1000)
self.assertEquals(config.test2, 1000)
##############
def testNonIntMap(self):
bash = 'TEST=([1]="a" [2]="b")'
config = parse(bash)
self.assertEquals(config.test, {1: 'a', 2: 'b'})
##############
def testStripQuotes(self):
config = parse('TEST="blab"')
self.assertEquals(config.test, 'blab')
##############
def testParseArray(self):
config = parse('TEST=(1 2 3 4)')
self.assertEquals(config.test, [1, 2, 3, 4])
##############
def testParseEvaluate(self):
bash = '''STATSALLIM=([1]=1000 [2]=2000 [3]=1000 [4]=1000)
STATSXMIN=$(( ${STATSALLIM[1]} - ${STATSALLIM[3]} / 2 ))
STATSXMAX=$(( ${STATSALLIM[1]} + ${STATSALLIM[3]} / 2 ))
STATSYMIN=$(( ${STATSALLIM[2]} - ${STATSALLIM[4]} / 2 ))
STATSYMAX=$(( ${STATSALLIM[2]} + ${STATSALLIM[4]} / 2 ))
'''
config = parse(bash)
self.assertEquals(config.statsallim, {
1: 1000, 2: 2000, 3: 1000, 4: 1000})
self.assertEquals(config.statsxmin, 500)
self.assertEquals(config.statsxmax, 1500)
self.assertEquals(config.statsymin, 1500)
self.assertEquals(config.statsymax, 2500)
##################
def testExportVar(self):
bash = '''TEST=5
export TEST'''
config = parse(bash)
self.assertTrue('TEST' in os.environ)
self.assertEquals(os.environ['TEST'], '5')
del os.environ['TEST']
#################
def testExportVar2(self):
config = parse('export TEST=13')
self.assertEquals(config.test, 13)
self.assertTrue('TEST' in os.environ)
self.assertEquals(os.environ['TEST'], '13')
del os.environ['TEST']
##################
def testExportIssue1(self):
config = parse('export TEMPDIR=.')
self.assertEquals(config.tempdir, '.')
self.assertTrue('TEMPDIR' in os.environ)
self.assertEquals(os.environ['TEMPDIR'], '.')
#################
def testReadFile(self):
cfile = 'test.ini'
if not os.path.exists(cfile):
output = open(cfile, 'w')
output.write('TEST=5\n')
output.close()
config = parse('. test.ini')
self.assertEquals(config.test, 5)
os.remove(cfile)
##################
def testSemicolons(self):
config = parse('TEST=5 ; export TEST; TEST2=10')
self.assertEquals(config.test, 5)
self.assertTrue('TEST' in os.environ)
self.assertEquals(os.environ['TEST'], '5')
self.assertEquals(config.test2, 10)
###################
def testSetDefinedMapElement(self):
bash = '''TEST=([1]=5 [2]=10)
TEST[1]=8'''
config = parse(bash)
self.assertEquals(config.test, {1: 8, 2: 10})
####################
def testSetUndefinedMapElement(self):
config = parse('TEST[1]=3')
self.assertEquals(config.test, {1: 3})
####################
def testParseFile(self):
cfile = 'test.ini'
if not os.path.exists(cfile):
output = open(cfile, 'w')
output.write('TEST=5\n')
output.close()
config = parseFile(cfile)
self.assertEquals(config.test, 5)
os.remove(cfile)
###########################
######################################################
def test():
testcases = [TestParseBash]
suite = unittest.TestSuite(map(unittest.TestLoader().loadTestsFromTestCase,
testcases))
unittest.TextTestRunner(verbosity=2).run(suite)
##############################################################
if __name__ == '__main__':
test()
|
xander69/SkolkoDoBaniBot | src/main/java/ru/xander/telebot/util/Utils.java | <reponame>xander69/SkolkoDoBaniBot
package ru.xander.telebot.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ru.xander.telebot.dto.Request;
import ru.xander.telebot.dto.TimeOfDay;
import java.io.IOException;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.time.Clock;
import java.time.Instant;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.Month;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.temporal.TemporalAccessor;
import java.util.List;
import java.util.Locale;
import java.util.Random;
import java.util.function.Consumer;
/**
* @author <NAME>
*/
public abstract class Utils {
public static final String EMPTY_STRING = "";
public static final String[] EMPTY_STRING_ARRAY = new String[0];
public static final ZoneId ZONE_ID_MOSCOW = ZoneId.of("Europe/Moscow");
private static final Locale LOCALE_RU = Locale.forLanguageTag("RU");
private static final Random random = new Random(Long.MAX_VALUE);
private static final ObjectMapper objectMapper = new ObjectMapper();
private static final String[] slavMonths = {"",
"ัััะฝั", "ะปััะพะณะพ", "ะฑะตัะตะทะฝั",
"ะบะฒััะฝั", "ััะฐะฒะฝั", "ัะตัะฒะฝั",
"ะปะธะฟะฝั", "ัะตัะฟะฝั", "ะฒะตัะตัะฝั",
"ะถะพะฒัะฝั", "ะปะธััะพะฟะฐะดะฐ", "ะณััะดะฝั"
};
private static final String[] rusMonths = {"",
"ัะฝะฒะฐัั", "ัะตะฒัะฐะปั", "ะผะฐััะฐ",
"ะฐะฟัะตะปั", "ะผะฐั", "ะธัะฝั",
"ะธัะปั", "ะฐะฒะณัััะฐ", "ัะตะฝััะฑัั",
"ะพะบััะฑัั", "ะฝะพัะฑัั", "ะดะตะบะฐะฑัั"
};
private Utils() {
throw new IllegalStateException("Utility class");
}
public static boolean randomBoolean() {
return random.nextInt(100) % 2 == 0;
}
public static int randomInt(int bound) {
return random.nextInt(bound);
}
public static <T> T randomArray(T[] array) {
int size = array.length;
int index = random.nextInt(size);
return array[index];
}
public static <T> T randomList(List<T> list) {
int size = list.size();
int index = random.nextInt(size);
return list.get(index);
}
public static String randomUserMention(Request request) {
Integer userId = request.getUserId();
String userName = (request.getUserName() != null) && randomBoolean() ? request.getUserName() : request.getUserFullName();
return String.format("[%s](tg://user?id=%d)", userName, userId);
}
public static int max(int first, int... others) {
int max = first;
for (int i = 0; i < others.length; i++) {
max = Math.max(max, others[i]);
}
return max;
}
public static boolean isHappyBirthDay() {
LocalDate localDate = Instant.now().atZone(ZONE_ID_MOSCOW).toLocalDate();
return (localDate.getMonth() == Month.FEBRUARY) && (localDate.getDayOfMonth() == 16);
}
public static boolean isHappyBirthNextDay() {
LocalDate localDate = Instant.now().atZone(ZONE_ID_MOSCOW).toLocalDate();
return (localDate.getMonth() == Month.FEBRUARY) && (localDate.getDayOfMonth() == 17);
}
public static Instant now() {
return Instant.now(Clock.system(ZONE_ID_MOSCOW));
}
public static Instant parseDate(String date, String format) {
return Instant.from(DateTimeFormatter.ofPattern(format).withZone(ZONE_ID_MOSCOW).parse(date));
}
public static LocalDate parseLocalDate(String date, String format) {
return LocalDate.parse(date, DateTimeFormatter.ofPattern(format));
}
public static Instant createDate(long millis) {
return Instant.ofEpochMilli(millis).atZone(ZONE_ID_MOSCOW).toInstant();
}
public static int getDayId() {
return getDayId(LocalDate.now(ZONE_ID_MOSCOW));
}
public static int getDayId(LocalDate localDate) {
return localDate.getMonthValue() * 100 + localDate.getDayOfMonth();
}
public static String formatDate(TemporalAccessor date, String format) {
return DateTimeFormatter
.ofPattern(format)
.withLocale(LOCALE_RU)
.withZone(ZONE_ID_MOSCOW)
.format(date);
}
public static String formatRusDay(LocalDateTime dateTime) {
return dateTime.getDayOfMonth() + "-ะต " + rusMonths[dateTime.getMonthValue()];
}
public static String formatSlavDay(int month, int day) {
return day + "-ะต " + slavMonths[month];
}
public static <T> T parseJson(String json, Class<T> clazz) {
try {
return objectMapper.readValue(json, clazz);
} catch (JsonProcessingException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
public static TimeOfDay getTimeOfDay() {
return getTimeOfDay(now());
}
public static TimeOfDay getTimeOfDay(Instant instant) {
int hour = LocalDateTime.ofInstant(instant, ZONE_ID_MOSCOW).getHour();
if (hour < 6) {
return TimeOfDay.NIGHT;
} else if (hour < 12) {
return TimeOfDay.MORNING;
} else if (hour < 18) {
return TimeOfDay.AFTERNOON;
} else {
return TimeOfDay.EVENING;
}
}
public static String formatBanyaTime(String template, long nanos) {
long totalSeconds = nanos / 1_000_000_000;
long seconds = totalSeconds % 60;
long minutes = totalSeconds / 60 % 60;
long hours = totalSeconds / 3600 % 24;
long days = nanos / 1_000_000_000 / 3600 / 24;
double inMicros = nanos / 1_000d;
double inMillis = nanos / 1_000_000d;
double inSeconds = nanos / 1_000_000_000d;
double inMunites = nanos / 1_000_000_000d / 60d;
double inHours = nanos / 1_000_000_000d / 3600d;
double inDays = nanos / 1_000_000_000d / 3600d / 24d;
return template
.replace("${TIME}", String.format("%d:%02d:%02d:%02d", days, hours, minutes, seconds))
.replace("${NANOS}", String.valueOf(nanos))
.replace("${MICROS}", String.format("%.3f", inMicros))
.replace("${MILLIS}", String.format("%.3f", inMillis))
.replace("${SECONDS}", String.format("%.3f", inSeconds))
.replace("${MINUTES}", String.format("%.2f", inMunites))
.replace("${HOURS}", String.format("%.2f", inHours))
.replace("${DAYS}", String.format("%.2f", inDays));
}
public static String stackTraceToString(Throwable throwable) {
try (
StringWriter stringWriter = new StringWriter();
PrintWriter writer = new PrintWriter(stringWriter)
) {
throwable.printStackTrace(writer);
return stringWriter.toString();
} catch (IOException e) {
return "getStackTrace exception: " + e.getMessage();
}
}
public static void tryWithResource(String resourceName, Consumer<InputStream> resourceConsumer) {
try (InputStream resource = Utils.class.getResourceAsStream(resourceName)) {
resourceConsumer.accept(resource);
} catch (IOException e) {
throw new RuntimeException("Cannot read resource '" + resourceName + "': " + e.getMessage(), e);
}
}
public static int compareInteger(Integer i1, Integer i2) {
if ((i1 == null) && (i2 == null)) {
return 0;
}
if (i1 == null) {
return -1;
}
if (i2 == null) {
return 1;
}
return Integer.compare(i1, i2);
}
public static <T> T readResource(String resourceName, ResourceReader<T> resourceReader) {
try (InputStream resource = Utils.class.getResourceAsStream(resourceName)) {
return resourceReader.apply(resource);
} catch (Exception e) {
throw new RuntimeException("Cannot read resource '" + resourceName + "': " + e.getMessage(), e);
}
}
public interface ResourceReader<T> {
T apply(InputStream resource) throws Exception;
}
}
|
visit-dav/vis | src/databases/PDB/MaterialEncoder.h | <reponame>visit-dav/vis
// Copyright (c) Lawrence Livermore National Security, LLC and other VisIt
// Project developers. See the top-level LICENSE file for dates and other
// details. No copyright assignment is required to contribute to VisIt.
#ifndef MATERIAL_ENCODER_H
#define MATERIAL_ENCODER_H
#include <string>
#include <vector>
class avtMaterial;
// ****************************************************************************
// Class: MaterialEncoder
//
// Purpose:
// Keeps track of mixed material information.
//
// Notes:
//
// Programmer: <NAME>
// Creation: Fri Jun 21 13:53:35 PST 2002
//
// Modifications:
// <NAME>, Tue Dec 7 16:15:38 PST 2004
// Changed the double argument to float on the AddMixed method.
//
// ****************************************************************************
class MaterialEncoder
{
public:
MaterialEncoder();
~MaterialEncoder();
void AddMaterial(const std::string &mat);
void AddClean(int zoneId, int matNumber);
void AddMixed(int zoneId, const int *matNumbers, const float *matVf,
int nMats);
void AllocClean(int nZones);
int GetMixedSize() const;
avtMaterial *CreateMaterial(const int *dims, int ndims) const;
private:
void Resize(int nMats);
int have_mixed;
int *mix_zone;
int *mix_mat;
float *mix_vf;
int *mix_next;
int *matlist;
int _array_size;
int _array_index;
int _array_growth;
std::vector<std::string> matNames;
};
#endif
|
nagama-wal/electron-react-boilerplate | app/components/Dashboard/DashboardTabs/DashboardTabs.js | <filename>app/components/Dashboard/DashboardTabs/DashboardTabs.js<gh_stars>0
import React, { Component } from "react";
import { TabContent, TabPane, Nav, NavItem, NavLink, Card, Button, CardTitle, CardText, Row, Col } from 'reactstrap';
import classnames from 'classnames';
import DashboardHeader from "../DashboardHeader/DashboardHeader";
import DashboardTable from "../DashboardTable/DashboardTable"
import Calendar from "../../daysheet/daysheet";
class DashboardTabs extends Component {
constructor(props) {
super(props);
this.toggle = this.toggle.bind(this);
this.state = {
activeTab: '1'
};
}
toggle(tab) {
if (this.state.activeTab !== tab) {
this.setState({
activeTab: tab
});
}
}
render() {
return (
<div className= "tabs-wrapper">
<Nav tabs className="nav-tabs-block">
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '1' })}
onClick={() => { this.toggle('1'); }}
>
DASHBOARD
</NavLink>
</NavItem>
<NavItem>
<NavLink
className={classnames({ active: this.state.activeTab === '2' })}
onClick={() => { this.toggle('2'); }}
>
DAYSHEET
</NavLink>
</NavItem>
</Nav>
<TabContent activeTab={this.state.activeTab}>
<TabPane tabId="1">
<DashboardHeader/>
<DashboardTable/>
</TabPane>
<TabPane tabId="2">
<Calendar/>
</TabPane>
</TabContent>
</div>
);
}
}
export default DashboardTabs; |
szokejokepu/natural-rws | core/argo/core/optimizers/NesterovConst.py | <filename>core/argo/core/optimizers/NesterovConst.py
'''
DOCUMENTATION:
Nesterov method with constant momentum factor is given in the work of Defazio:
https://arxiv.org/abs/1812.04634
See Table 1, page 3 - 'Modern Momentum' (here: beta is the momentum factor)
The momentum coefficient is set between 0.5 and 0.9. See the work of Ruder:
https://arxiv.org/abs/1609.04747
'''
# Loading modules
from tensorflow.python.training import optimizer # Here we have the 'Optimizer' class
from tensorflow.python.framework import ops # From here we need the function that converts to 'Tensor' object
from tensorflow.python.ops import math_ops # From here we need mathematical operations for 'Tensor' objects
from tensorflow.python.ops import state_ops # From here we need 'Operations' on 'Tensor' objects
from tensorflow.python.ops import control_flow_ops # From here we need the function 'group'
# The subclass of Optimizer class, containing Nesterov method with constant momentum coefficient
class NesterovConst(optimizer.Optimizer):
# The constructor of the class
def __init__(self, model, learning_rate = 1e-2, momentum = 0.5, use_locking = False, name = 'NesterovConst'):
# Call the constructor of the 'Optimizer' superclass using the parameters 'use_locking' and 'name'
super(NesterovConst, self).__init__(use_locking, name)
# Initialize the private Python variables of the current subclass
self._lr = learning_rate
self._momentum = momentum
self._model = model
# Initialize the private 'Tensor' objects of the current subclass
self._lr_t = None
self._momentum_t = None
# We construct all the 'Tensor' objects before we apply the gradients
# Private function
def _prepare(self):
self._lr_t = ops.convert_to_tensor(self._lr, name = 'learning_rate')
self._momentum_t = ops.convert_to_tensor(self._momentum, name = 'momentum')
# We create the slots for the variables. A 'Slot' is an additional variable associated with the variables to train
# We allocate and manage these auxiliary variables
# Private function
def _create_slots(self, var_list):
for v in var_list:
# The accumulator variable is 'p^{k+1}' in the work of Defazio
self._zeros_slot(v, "accum", self._name)
# The actual Nesterov implementation for the general case when we have dense 'Tensor' objects
# All of the operations are applied to 'Tensor' variables
# 'apply_gradients', 'compute_gradients' and 'minimize' are public functions of 'Optimizer' class
# Order of functions:
# minimize(loss, global_step, var_list)
# => grads_and_vars = compute_gradients(loss, var_list)
# => grads_and_vars = list(zip(grads, var_list))
# => grads = gradients.gradients(loss, var_refs)
# var_list = (variables.trainable_variables() + ops.get_collection(ops.GraphKeys.TRAINABLE_RESOURCE_VARIABLES))
# apply_gradients(grads_and_vars, global_step)
# => for g, v in grads_and vars: p = _get_processor(v)
# => _TensorProcessor(v), _DenseResourceVariableProcessor(v), _DenseResourceVariableProcessor(v), _RefVariableProcessor(v)
# => for grad, var, processor in converted_grads_and_vars: processor.update_op(grad)
# => update_op(self, optimizer, g)
# => return update_op = optimizer._apply_dense(g, self._v)
def _apply_dense(self, grad, var):
# 1st step: we convert our 'Tensor' objects to have the type of the training variables
lr_t = math_ops.cast(self._lr_t, var.dtype.base_dtype)
momentum_t = math_ops.cast(self._momentum_t, var.dtype.base_dtype)
# 2nd step: we define the gradient accumulations, using the identifier 'accum' from '_create_slots()'
accum = self.get_slot(var, "accum")
# 3rd step: we have the Nesterov formula 'accum_t <- accum_t * momentum_t + grad', i.e. 'p^{k+1}' from Defazio
# We update 'accum' by assigning the value 'momentum_t * accum + grad' to it. Furthermore, the new value is return in the 'Tensor' object 'accum_t'
accum_t = state_ops.assign(accum, momentum_t * accum + grad, use_locking = False)
# 4th step: variables updates by using 'var_update <- var - ( lr_t * grad + lr_t * momentum_t * accum_t )', i.e. 'x^{k+1}' from Defazio
# Here, 'accum_t' is 'p^{k+1}' because was already updated before
var_update = state_ops.assign_sub(var, lr_t * grad + lr_t * momentum_t * accum_t)
# 5th step: return the updates, i.e. we return the Graph 'Operation' that will group multiple 'Tensor' ops.
# For more complex algorithms, the 'control_flow_ops.group' is used in the '_finish()' function, after '_apply_dense()'
return control_flow_ops.group(*[var_update, accum_t])
# I did not implemented the algorithm for the case of 'Sparse Tensor' variables
def _apply_sparse(self, grad, var):
raise NotImplementedError("Sparse gradient updates are not supported.")
|
xbgbtx/memory-cat-app | node_modules/lit-element/development/decorators/query-assigned-elements.js | <filename>node_modules/lit-element/development/decorators/query-assigned-elements.js
/**
* @license
* Copyright 2021 Google LLC
* SPDX-License-Identifier: BSD-3-Clause
*/
export * from '@lit/reactive-element/decorators/query-assigned-elements.js';
//# sourceMappingURL=query-assigned-elements.js.map |
tinapiao/Software-IC-Automation | bag_serdes_ec-master/scripts_test/digital/buffer_array.py | # -*- coding: utf-8 -*-
import yaml
from bag.core import BagProject
from serdes_ec.layout.digital.buffer import BufferArray
if __name__ == '__main__':
with open('specs_test/serdes_ec/digital/buffer_array.yaml', 'r') as f:
block_specs = yaml.load(f)
local_dict = locals()
if 'bprj' not in local_dict:
print('creating BAG project')
bprj = BagProject()
else:
print('loading BAG project')
bprj = local_dict['bprj']
bprj.generate_cell(block_specs, BufferArray, debug=True)
# StdCellWrapper.generate_cells(bprj, block_specs, gen_sch=True, run_lvs=True)
|
coms/ep | eulerProject/src/euler/Problem60.java | <filename>eulerProject/src/euler/Problem60.java
package euler;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import euler.utils.Prime;
/**
Prime pair sets
Problem 60
The primes 3, 7, 109, and 673, are quite remarkable. By taking any two primes and concatenating
them in any order the result will always be prime. For example, taking 7 and 109, both 7109 and 1097 are prime.
The sum of these four primes, 792, represents the lowest sum for a set of four primes with this property.
Find the lowest sum for a set of five primes for which any two primes concatenate to produce another prime.
**/
public class Problem60 {
public static void main(String[] args) {
System.out.println("Solve problem 60");
List<Set<Long>> chains = new ArrayList();
Set<Long> chain1 = new HashSet<Long>();
chains.add(chain1);
for (long n = 11; n < 100000000; n++) {
if (Prime.isPrimeFaster(n)) {
String sn = String.valueOf(n);
for (int i = 1; i < sn.length(); i++) {
Long n1 = Long.valueOf(sn.substring(0, i));
if (sn.substring(i).startsWith("0")) {
continue;
}
Long n2 = Long.valueOf(sn.substring(i));
Long n3 = Long.valueOf(sn.substring(i) + sn.substring(0, i));
Set<Long> newChain = null;
if (Prime.isPrimeFaster(n1) && Prime.isPrimeFaster(n2) && Prime.isPrimeFaster(n3)) {
Long newPrime = n1;
for (Set<Long> primes : chains) {
if (primes.size() < 2) {
primes.add(n1);
primes.add(n2);
} else if (primes.contains(n1)) {
newPrime = n2;
}
boolean isAddable = true;
for (Long a : primes) {
Long n4 = Long.valueOf(a.toString() + newPrime.toString());
Long n5 = Long.valueOf(newPrime.toString() + a.toString());
if (!Prime.isPrimeFaster(n4) || !Prime.isPrimeFaster(n5)) {
isAddable = false;
}
}
if (isAddable) {
primes.add(newPrime);
// System.out.println(chains);
if (primes.size() == 5) {
System.out.println(primes);
System.exit(0);
}
} else {
newChain = new HashSet<Long>();
newChain.add(n1);
newChain.add(n2);
}
}
if (newChain != null) {
chains.add(newChain);
}
}
}
}
}
}
} |
ketancmaheshwari/swift-k | src/org/griphyn/vdl/mapping/nodes/ExternalDataNode.java | <gh_stars>0
/*
* Copyright 2012 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.griphyn.vdl.mapping.nodes;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import k.thr.LWThread;
import org.apache.log4j.Logger;
import org.griphyn.vdl.karajan.Loader;
import org.griphyn.vdl.mapping.DSHandle;
import org.griphyn.vdl.mapping.HandleOpenException;
import org.griphyn.vdl.mapping.InvalidPathException;
import org.griphyn.vdl.mapping.Mapper;
import org.griphyn.vdl.mapping.Path;
import org.griphyn.vdl.mapping.RootHandle;
import org.griphyn.vdl.type.Field;
import org.griphyn.vdl.type.Types;
import org.griphyn.vdl.type.impl.FieldImpl;
public class ExternalDataNode extends AbstractFutureNonCompositeDataNode implements RootHandle {
static final String DATASET_URI_PREFIX = "dataset:external:";
public static final Logger logger = Logger.getLogger(ExternalDataNode.class);
private static long datasetIDCounter = 850000000000l;
private static final String datasetIDPartialID = Loader.getUUID();
// previously in mapper params
private int line = -1;
private LWThread thread;
private boolean input;
public ExternalDataNode(String name) {
super(new FieldImpl(name, Types.EXTERNAL));
}
public ExternalDataNode(Field field) {
super(field);
}
public int getLine() {
return line;
}
public void setLine(int line) {
this.line = line;
}
public boolean isInput() {
return input;
}
public void setInput(boolean input) {
this.input = input;
}
public void setThread(LWThread thread) {
this.thread = thread;
}
public LWThread getThread() {
return thread;
}
public String getName() {
return (String) getField().getId();
}
@Override
public void setName(String name) {
getField().setId(name);
}
@Override
public void init(Mapper mapper) {
}
@Override
public void mapperInitialized(Mapper mapper) {
}
public boolean isRestartable() {
return true;
}
public RootHandle getRoot() {
return this;
}
public DSHandle getField(Path path) throws InvalidPathException {
if (path.isEmpty()) {
return this;
}
else {
throw new InvalidPathException(path, this);
}
}
protected void getFields(List<DSHandle> fields, Path path) throws InvalidPathException {
// nothing
}
public void set(DSHandle handle) {
throw new UnsupportedOperationException(this.getDisplayableName() + " is an external dataset and cannot be set");
}
public Map<Comparable<?>, DSHandle> getArrayValue() {
throw new UnsupportedOperationException("cannot get value of external dataset");
}
public boolean isArray() {
return false;
}
public Collection<Path> getFringePaths() throws HandleOpenException {
return Collections.singletonList(Path.EMPTY_PATH);
}
public Path getPathFromRoot() {
return Path.EMPTY_PATH;
}
public Mapper getMapper() {
return null;
}
protected String makeIdentifierURIString() {
datasetIDCounter++;
return DATASET_URI_PREFIX + datasetIDPartialID + ":" + datasetIDCounter;
}
public DSHandle createDSHandle(String fieldName) {
throw new UnsupportedOperationException("cannot create new field in external dataset");
}
public DSHandle getParent() {
return null;
}
@Override
protected AbstractDataNode getParentNode() {
return null;
}
@Override
public synchronized void closeDeep() {
if (!this.isClosed()) {
/*
* Need to override this and set a value since
* this is skipped by the normal stageout mechanism which
* does that
*/
this.setValue(FILE_VALUE);
}
}
@Override
public Mapper getActualMapper() {
return null;
}
@Override
public void closeArraySizes() {
}
@Override
protected void getFringePaths(List<Path> list, Path myPath) throws HandleOpenException {
}
@Override
protected void getLeaves(List<DSHandle> list) throws HandleOpenException {
}
}
|
kr056/Softuni | Java OOP Advanced November 2017/b_Generics/Exercises/p07_Threeuple/Threeuple.java | package b_Generics.Exercises.p07_Threeuple;
public class Threeuple<F, S, T> {
private F firstEl;
private S secondEl;
private T thirdEl;
public Threeuple(F firstEl, S secondEl, T thirdEl) {
this.firstEl = firstEl;
this.secondEl = secondEl;
this.thirdEl = thirdEl;
}
@Override
public String toString() {
return this.firstEl + " -> " + this.secondEl + " -> " + this.thirdEl;
}
}
|
Sundragon1993/AI-Game-Pratices | goals/Goal_DodgeSideToSide.h | <reponame>Sundragon1993/AI-Game-Pratices
#ifndef GOAL_DODGE_SIDE_H
#define GOAL_DODGE_SIDE_H
#pragma warning (disable:4786)
//-----------------------------------------------------------------------------
//
// Name: Goal_DodgeSideToSide.h
//
// Author: <NAME> (<EMAIL>)
//
// Desc: this goal makes the bot dodge from side to side
//
//-----------------------------------------------------------------------------
#include "Goals/Goal.h"
#include "Raven_Goal_Types.h"
#include "../Raven_Bot.h"
class Goal_DodgeSideToSide : public Goal<Raven_Bot>
{
private:
Vector2D m_vStrafeTarget;
bool m_bClockwise;
Vector2D GetStrafeTarget()const;
public:
Goal_DodgeSideToSide(Raven_Bot* pBot):Goal<Raven_Bot>(pBot, goal_strafe),
m_bClockwise(RandBool())
{}
void Activate();
int Process();
void Render();
void Terminate();
};
#endif
|
m-wrona/gwt-medicapital | client_view/com/medicapital/client/user/SearchUserForm.java | package com.medicapital.client.user;
import com.google.gwt.event.dom.client.HasClickHandlers;
import com.medicapital.client.ui.table.DataTable;
import com.medicapital.common.entities.User;
import com.medicapital.common.entities.UserRole;
final public class SearchUserForm extends DataTable<SearchUserFormHeader, SearchUserFormRow, User> implements SearchUserView {
@Override
protected SearchUserFormHeader createHeader() {
return new SearchUserFormHeader();
}
@Override
public void display(final int userId, String firstName, String lastName, UserRole userRole) {
SearchUserFormRow row = new SearchUserFormRow();
row.getFirstName().setText(firstName);
row.getLastName().setText(lastName);
row.getUserRole().setText(userRole.toString());
addRow(userId, row);
}
@Override
public String getSearchFirstName() {
return getHeader().getSearchFirstName().getText();
}
@Override
public String getSearchLastName() {
return getHeader().getSearchLastName().getText();
}
@Override
public HasClickHandlers getSearchClickHandler() {
return getHeader().getButtonSearch();
}
@Override
public HasClickHandlers getCancelClickHandler() {
return getHeader().getButtonCancel();
}
@Override
public HasClickHandlers getDisplayDetailClickHandler(int entityId) {
return getRowIdRowMap().get(entityId).getButtonSelect();
}
}
|
letitgone/thinking_in_java | Chapter11/src/test/java/exercise/E25_WordsInfo3.java | package exercise;
import net.mindview.util.TextFile;
import java.util.*;
/**
* @Author ZhangGJ
* @Date 2019/05/28
*/
public class E25_WordsInfo3 {
public static void main(String[] args) {
Map<String, ArrayList<Integer>> stat = new HashMap<>();
int wordCount = 0;
for (String word : new TextFile("E25_WordsInfo3.java", "\\W+")) {
ArrayList<Integer> loc = stat.get(word);
if (loc == null) {
loc = new ArrayList<>();
stat.put(word, loc);
}
loc.add(++wordCount);
}
System.out.println(stat);
}
}
|
imatiach-msft/interpret-text | python/interpret_text/experimental/introspective_rationale/components.py | <filename>python/interpret_text/experimental/introspective_rationale/components.py
import os
import logging
import numpy as np
import torch
import torch.nn as nn
from torch.autograd import Variable
from tqdm import tqdm
from interpret_text.experimental.common.utils_introspective_rationale import generate_data
class ClassifierWrapper():
"""Wrapper to provide a common interfaces among different classifier modules
"""
def __init__(self, args, model):
"""Initialize an instance of the wrapper
:param args: arguments containing training and structure parameters
:type args: ModelArguments
:param model: A classifier module, ex. BERT or RNN classifier module
:type model: BertForSequenceClassification or ClassifierModule
"""
self.args = args
self.model = model
self.opt = None
self.num_epochs = args.num_pretrain_epochs
self.epochs_since_improv = 0
self.best_test_acc = 0
self.avg_accuracy = 0
self.test_accs = []
self.train_accs = []
self.loss_func = nn.CrossEntropyLoss(reduction="none")
def init_optimizer(self):
"""Initialize the classifier's optimizer
"""
self.opt = torch.optim.Adam(filter(lambda x: x.requires_grad,
self.model.parameters()),
lr=self.args.lr)
def test(self, df_test, verbosity=2):
"""Calculate and store as model attributes:
Average classification accuracy using rationales (self.avg_accuracy),
Average classification accuracy rationale complements
(self.anti_accuracy)
Average sparsity of rationales (self.avg_sparsity)
:param df_test: dataframe containing test data labels, tokens, masks,
and counts
:type df_test: pandas dataframe
:param verbosity: {0, 1, 2}, default 2
If 0, does not log any output
If 1, logs accuracy, anti-rationale accuracy, sparsity, and
continuity scores
If 2, displays a random test example with rationale and
classification
:type verbosity: int, optional
"""
self.model.eval()
accuracy = 0
for i in range(len(df_test) // self.args.test_batch_size):
test_batch = df_test.iloc[
i * self.args.test_batch_size: (i + 1)
* self.args.test_batch_size
]
batch_dict = generate_data(test_batch, self.args.cuda)
batch_x_ = batch_dict["x"]
batch_m_ = batch_dict["m"]
batch_y_ = batch_dict["y"]
predict, _, _ = self.model(batch_x_, batch_m_)
# do a softmax on the predicted class probabilities
_, y_pred = torch.max(predict, dim=1)
accuracy += (y_pred == batch_y_).sum().item()
self.avg_accuracy = accuracy / len(df_test)
self.test_accs.append(self.avg_accuracy)
if verbosity > 0:
logging.info("train acc: %.4f, test acc: %.4f" %
(self.train_accs[-1], self.avg_accuracy))
if self.args.save_best_model:
if self.avg_accuracy > self.best_test_acc:
logging.info("saving best classifier model and model stats")
# save model
torch.save(
self.model.state_dict(),
os.path.join(
self.args.model_folder_path,
self.args.model_prefix + "gen_classifier.pth",
),
)
if self.avg_accuracy > self.best_test_acc:
self.best_test_acc = self.avg_accuracy
self.epochs_since_improv = 0
else:
self.epochs_since_improv += 1
def _train_one_step(self, X_tokens, label, X_mask):
"""Train the classifier for one optimization step.
:param X_tokens: Tokenized and embedded training example
:type X_tokens: torch.int64
:param label: Label of the training example
:type label: torch.int64
:param X_mask: Mask differentiating tokens vs not tokens
:type X_mask: torch.FloatTensor
:return: losses, classifier prediction logits
:rtype: tuple
"""
self.opt.zero_grad()
self.model.zero_grad()
cls_predict_logits, _, _ = self.model(
X_tokens, attention_mask=X_mask
) # dimensions: (batch_size, hidden_dim, sequence_length)
sup_loss = torch.mean(self.loss_func(cls_predict_logits, label))
losses = {"g_sup_loss": sup_loss.cpu().data}
sup_loss.backward()
# Clip the norm of the gradients to 1.0.
# This is to help prevent the "exploding gradients" problem.
# torch.nn.utils.clip_grad_norm_(self.model.parameters(), 1.0)
self.opt.step()
return losses, cls_predict_logits
def fit(self, df_train, df_test):
"""Train the classifier on the training data, with testing
at the end of every epoch.
:param df_train: training data containing labels, lists of word token
ids, pad/word masks, and token counts for each training example
:type df_train: pd.DataFrame
:param df_test: testing data containing labels, lists of word token
ids, pad/word masks, and token counts for each testing example
:type df_test: pd.DataFrame
"""
self.init_optimizer()
total_train = len(df_train)
indices = np.array(list(range(0, total_train)))
for i in tqdm(range(self.num_epochs)):
self.model.train() # pytorch fn; sets module to train mode
# shuffle the epoch
np.random.shuffle(indices)
total_train_acc = 0
for i in range(total_train // self.args.train_batch_size):
# sample a batch of data
start = i * self.args.train_batch_size
end = min((i + 1) * self.args.train_batch_size, total_train)
batch = df_train.loc[indices[start:end]]
batch_dict = generate_data(batch, self.args.cuda)
batch_x_ = batch_dict["x"]
batch_m_ = batch_dict["m"]
batch_y_ = batch_dict["y"]
losses, predict = self._train_one_step(
batch_x_, batch_y_, batch_m_
)
# calculate classification accuarcy
_, y_pred = torch.max(predict, dim=1)
acc = np.float((y_pred == batch_y_).sum().cpu().data.item())
total_train_acc += acc
total_acc_percent = total_train_acc / total_train
self.train_accs.append(total_acc_percent)
self.test(df_test)
# stop training if there have been no improvements
if self.epochs_since_improv > self.args.training_stop_thresh:
break
# Modules that can be used in the three player introspective model
class RnnModel(nn.Module):
"""RNN Module
"""
def __init__(self, input_dim, hidden_dim, layer_num, dropout_rate):
"""Initialize an RNN.
:param input_dim: dimension of input
:type input_dim: int
:param hidden_dim: dimension of filters
:type hidden_dim: int
:param layer_num: number of RNN layers
:type layer_num: int
:param dropout_rate: dropout rate
:type dropout_rate: float
"""
super(RnnModel, self).__init__()
self.rnn_layer = nn.GRU(
input_size=input_dim,
hidden_size=hidden_dim // 2,
num_layers=layer_num,
bidirectional=True,
dropout=dropout_rate,
)
def forward(self, embeddings, mask=None, h0=None):
"""Forward pass in the RNN.
:param embeddings: sequence of word embeddings with dimension
(batch_size, sequence_length, embedding_dim)
:type embeddings: torch.FloatTensor
:param mask: a float tensor of masks with dimension
(batch_size, length), defaults to None
:type mask: torch.FloatTensor, optional
:param h0: initial RNN weights with dimension
(num_layers * num_directions, batch, hidden_size), defaults to None
:type h0: torch.FloatTensor, optional
:return: hiddens, a sentence embedding tensor with dimension
(batch_size, hidden_dim, sequence_length)
:rtype: torch.FloatTensor
"""
# dimensions: (sequence_length, batch_size, embedding_dim)
embeddings_ = embeddings.transpose(0, 1)
if mask is not None:
seq_lengths = list(torch.sum(mask, dim=1).cpu().data.numpy())
seq_lengths = list(map(int, seq_lengths))
inputs_ = torch.nn.utils.rnn.pack_padded_sequence(
embeddings_, seq_lengths
)
else:
inputs_ = embeddings_
if h0 is not None:
hidden, _ = self.rnn_layer(inputs_, h0)
else:
# hidden's dimensions:
# (sequence_length, batch_size, hidden_dim (* 2 if bidirectional))
hidden, _ = self.rnn_layer(inputs_)
if mask is not None:
# hidden's dimensions: (length, batch_size, hidden_dim)
hidden, _ = torch.nn.utils.rnn.pad_packed_sequence(hidden)
# output dimensions: (batch_size, hidden_dim, sequence_length)
return hidden.permute(1, 2, 0)
class ClassifierModule(nn.Module):
"""Module for classifying text used in original paper code.
"""
def __init__(self, args, word_vocab):
"""Initialize a ClassifierModule.
:param args: model structure parameters and hyperparameters
:type args: ModelArguments
:param word_vocab: a mapping of a set of words (keys) to
indices (values)
:type word_vocab: dict
"""
super(ClassifierModule, self).__init__()
self.args = args
self.encoder = RnnModel(
self.args.embedding_dim,
self.args.hidden_dim,
self.args.layer_num,
self.args.dropout_rate,
)
self.predictor = nn.Linear(self.args.hidden_dim, self.args.num_labels)
self.input_dim = args.embedding_dim
self.embedding_path = args.embedding_path
self.fine_tuning = args.fine_tuning
self.init_embedding_layer(word_vocab)
self.NEG_INF = -1.0e6
def init_embedding_layer(self, word_vocab):
"""Initialize the layer that embeds tokens according to a provided embedding
:param word_vocab: a mapping of a set of words (keys) to
indices (values)
:type word_vocab: dict
"""
# get initial vocab embeddings
vocab_size = len(word_vocab)
# initialize a numpy embedding matrix
embeddings = 0.1 * np.random.randn(vocab_size, self.input_dim).astype(
np.float32
)
# replace the <PAD> embedding by all zero
embeddings[0, :] = np.zeros(self.input_dim, dtype=np.float32)
if self.embedding_path and os.path.isfile(self.embedding_path):
f = open(self.embedding_path, "r", encoding="utf8")
counter = 0
for line in f:
data = line.strip().split(" ")
word = data[0].strip()
embedding = data[1::]
embedding = list(map(np.float32, embedding))
if word in word_vocab:
embeddings[word_vocab[word], :] = embedding
counter += 1
f.close()
logging.info("%d words have been switched." % counter)
else:
logging.info("embedding is initialized fully randomly.")
# initialize embedding layer
self.embed_layer = nn.Embedding(vocab_size, self.input_dim)
self.embed_layer.weight.data = torch.from_numpy(embeddings)
self.embed_layer.weight.requires_grad = self.fine_tuning
def forward(self, X_tokens, attention_mask, z=None):
"""Forward pass in the classifier module
:param X_tokens: tokenized and embedded text with shape
(batch_size, length, embed_dim)
:type X_tokens: torch Variable
:param attention_mask: mask indicating word tokens (1) and padding (0)
with shape (batch_size, length)
:type attention_mask: torch.FloatTensor
:param z: chosen rationales for sentence tokens (whether a given token
is important for classification)
with shape (batch_size, length), defaults to None
:type z: torch.FloatTensor, optional
:return: prediction (batch_size, num_label), word_embeddings, encoded
input, None
:rtype: tuple
"""
word_embeddings = self.embed_layer(X_tokens)
if z is None:
z = torch.ones_like(X_tokens)
if torch.cuda.is_available():
z = z.type(torch.cuda.FloatTensor)
else:
z = z.type(torch.FloatTensor)
masked_input = word_embeddings * z.unsqueeze(-1)
hiddens = self.encoder(masked_input, attention_mask)
max_hidden = torch.max(
hiddens + (1 - attention_mask * z).unsqueeze(1) * self.NEG_INF,
dim=2,
)[0]
predict = self.predictor(max_hidden)
# the last one is for attention in the BERT model
return predict, [word_embeddings, hiddens], None
# extra classes needed to make model introspective
class DepGenerator(nn.Module):
"""Rationale generator module
"""
def __init__(self, input_dim, hidden_dim, layer_num, dropout_rate):
"""
:param input_dim: dimension of input
:type input_dim: int
:param hidden_dim: dimension of filters
:type hidden_dim: int
:param layer_num: number of RNN layers
:type layer_num: int
:param dropout_rate: dropout rate of RNN
:type dropout_rate: float
"""
super(DepGenerator, self).__init__()
self.generator_model = RnnModel(
input_dim, hidden_dim, layer_num, dropout_rate
)
# rationale has dimension (num_tokens, 2)
self.output_layer = nn.Linear(hidden_dim, 2)
def forward(self, X_embeddings, h0=None, mask=None):
"""Forward pass in the DepGenerator
:param X_embeddings: input sequence of word embeddings
:type X_embeddings: (batch_size, sequence_length, embedding_dim)
:param h0: initial RNN weights, defaults to None
:type h0: torch.FloatTensor, optional
:param mask: mask indicating word tokens (1) and padding (0)
with shape (batch_size, length), defaults to None
:type mask: torch.FloatTensor, optional
:return: scores of the importance of each word in X_embeddings
:rtype: torch.FloatTensor
"""
"""
Outputs:
z -- output rationale, "binary" mask, (batch_size, sequence_length)
"""
# hiddens' dimensions: (batch_size, sequence_length, hidden_dim)
hiddens = (
self.generator_model(X_embeddings, mask, h0)
.transpose(1, 2)
.contiguous()
)
# scores' dimensions: (batch_size, sequence_length, 2)
scores = self.output_layer(hiddens)
return scores
class IntrospectionGeneratorModule(nn.Module):
"""Introspective rationale generator used in paper
"""
def __init__(self, args, classifier):
"""Initialize the IntrospectionGeneratorModule
:param args: model structure parameters and hyperparameters
:type args: ModelArguments
:param classifier: an instantiated classifier module with an embedding
layer and forward method
:type classifier: an instantiated classifier module
e.g. ClassifierModule
"""
super(IntrospectionGeneratorModule, self).__init__()
self.args = args
# for initializing RNN and DepGenerator
self.input_dim = args.gen_embedding_dim
self.hidden_dim = args.hidden_dim
self.layer_num = args.layer_num
self.dropout_rate = args.dropout_rate
# for embedding labels
self.num_labels = args.num_labels
self.label_embedding_dim = args.label_embedding_dim
# for training
self.fixed_classifier = args.fixed_classifier
self.NEG_INF = -1.0e6
# should be shared with the Classifier_pred weights
self.lab_embed_layer = self._create_label_embed_layer()
# baseline classification model
self.classifier = classifier
self.Transformation = nn.Sequential()
self.Transformation.add_module(
"linear_layer",
nn.Linear(
self.hidden_dim + self.label_embedding_dim,
self.hidden_dim // 2,
),
)
self.Transformation.add_module("tanh_layer", nn.Tanh())
self.Generator = DepGenerator(
self.input_dim,
self.hidden_dim,
self.layer_num,
self.dropout_rate,
)
def _create_label_embed_layer(self):
embed_layer = nn.Embedding(self.num_labels, self.label_embedding_dim)
embed_layer.weight.data.normal_(mean=0, std=0.1)
embed_layer.weight.requires_grad = True
return embed_layer
def forward(self, X_tokens, mask):
"""Forward pass of the introspection generator module
:param X_tokens: tokenized and embedded text with shape
(batch_size, length, embed_dim)
:type X_tokens: torch Variable
:param mask: mask indicating word tokens (1) and padding (0)
with shape (batch_size, length)
:type mask: torch.FloatTensor
:return: z_scores_ (scores of token importances),
cls_pred_logits (internal classifier predictions),
word_embeddings (embedded tokenized text input)
:rtype: tuple()
"""
cls_pred_logits, hidden_states, _ = self.classifier(
X_tokens, attention_mask=mask
)
# hidden states must be in shape (batch_size, hidden_dim, length)
# RNN returns (batch_size, hidden_dim, length)
# BERT returns (batch_size, length, hidden_dim)
last_hidden_state = hidden_states[-1]
if last_hidden_state.shape[1] != self.hidden_dim:
last_hidden_state = hidden_states[-1].transpose(1, 2)
# max_cls_hidden dimensions: (batch_size, hidden_dim)
max_cls_hidden = torch.max(
last_hidden_state + (1 - mask).unsqueeze(1) * self.NEG_INF, dim=2
)[0]
if self.fixed_classifier:
max_cls_hidden = Variable(max_cls_hidden.data)
word_embeddings = hidden_states[0]
_, cls_pred = torch.max(cls_pred_logits, dim=1)
# classifier label embedding dimensions: (batch_size, lab_emb_dim)
cls_lab_embeddings = self.lab_embed_layer(cls_pred)
# initial h0 dimensions: (batch_size, hidden_dim / 2)
init_h0 = self.Transformation(
torch.cat([max_cls_hidden, cls_lab_embeddings], dim=1)
)
# initial h0 dimensions: (2, batch_size, hidden_dim / 2)
init_h0 = (
init_h0.unsqueeze(0)
.expand(2, init_h0.size(0), init_h0.size(1))
.contiguous()
)
# z_scores' dimensions: (batch_size, length, 2)
z_scores_ = self.Generator(word_embeddings, mask=mask, h0=init_h0)
z_scores_[:, :, 1] = z_scores_[:, :, 1] + (1 - mask) * self.NEG_INF
return z_scores_, cls_pred_logits, word_embeddings
|
sumitmitra255/justfornpmcommand | src/Components/Products/ProductList.js | <gh_stars>0
import { useSelector, useDispatch } from 'react-redux'
import {
productListActionGenerator,
userproductDetailsActionGenerator,
} from '../../Actions/productActions'
import { useEffect } from 'react'
import { userProductDetailsActionGenerator } from '../../Actions/productActions'
import { useHistory } from 'react-router-dom'
import { DisplayProduct } from './DisplayProduct'
import {
Paper,
Table,
TableBody,
TableCell,
TableContainer,
TableHead,
TableRow,
} from '@material-ui/core'
import { Selectsearch } from '../../selectsearch/Selectsearch'
import { useState } from 'react'
import '../../css/products.css'
export const ProductList = (props) => {
const { toggle } = props
const dispatch = useDispatch()
const history = useHistory()
const [selectedproduct, setSelectedproduct] = useState()
const token = useSelector((state) => state.logintoken.token)
const productList = useSelector((state) => state.productlist)
const options = productList.map((ele) => {
return { value: ele, label: `Name:${ele.name}` }
})
const displayproduct = () => {
dispatch(userproductDetailsActionGenerator(selectedproduct, token, history))
}
return (
<>
<div className='fixedElement'>
{toggle ? (
''
) : (
<Selectsearch
options={options}
setter={setSelectedproduct}
dispatcher={displayproduct}
/>
)}
</div>
<TableContainer component={Paper}>
<Table aria-label='simple table'>
<TableHead key={Date.now() + Math.random()}>
<TableRow key={Date.now() + Math.random()}>
<TableCell key={Date.now() + Math.random()}>
Product Name
</TableCell>
<TableCell key={Date.now() + Math.random()}>
Price Per Unit
</TableCell>
<TableCell key={Date.now() + Math.random()}>Edit</TableCell>
<TableCell key={Date.now() + Math.random()}>Delete</TableCell>
</TableRow>
</TableHead>
<TableBody>
{toggle
? productList
.slice(-5)
.reverse()
.map((ele, i) => {
return (
<>
<TableRow hover={true} key={Date.now() + Math.random()}>
<DisplayProduct ProductEle={ele} />
</TableRow>
</>
)
})
: productList.reverse().map((ele) => {
return (
<>
<TableRow hover={true} key={Date.now() + Math.random()}>
<DisplayProduct ProductEle={ele} />
</TableRow>
</>
)
})}
</TableBody>
</Table>
</TableContainer>
</>
)
}
|
part-blockchain/chainsqld | src/peersafe/app/misc/CACertSite.h | //------------------------------------------------------------------------------
/*
This file is part of chainsqld: https://github.com/chainsql/chainsqld
Copyright (c) 2016-2019 Peersafe Technology Co., Ltd.
chainsqld is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
chainsqld is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with cpp-ethereum. If not, see <http://www.gnu.org/licenses/>.
*/
//==============================================================================
#ifndef PEERSAFE_APP_MISC_CACERTSITE_H_INCLUDED
#define PEERSAFE_APP_MISC_CACERTSITE_H_INCLUDED
#include <peersafe/app/misc/ConfigSite.h>
#include <ripple/app/misc/Manifest.h>
#include <ripple/core/TimeKeeper.h>
#include <ripple/crypto/csprng.h>
#include <ripple/json/json_value.h>
#include <ripple/protocol/PublicKey.h>
#include <boost/iterator/counting_iterator.hpp>
#include <boost/range/adaptors.hpp>
#include <boost/thread/locks.hpp>
#include <boost/thread/shared_mutex.hpp>
#include <mutex>
#include <numeric>
namespace ripple {
/**
CA Cert Sites
---------------
This class manages the set of configured remote sites used to fetch the
latest published recommended validator lists.
Lists are fetched at a regular interval.
Fetched lists are expected to be in JSON format and contain the following
fields:
@li @c "blob": Base64-encoded JSON string containing a @c "sequence", @c
"expiration", and @c "validators" field. @c "expiration" contains the
Ripple timestamp (seconds since January 1st, 2000 (00:00 UTC)) for when
the list expires. @c "certs" contains an array of objects with a
@c "cert" and optional @c "manifest" field.
@c "cert" should be the hex-encoded master public key.
@c "manifest" should be the base64-encoded validator manifest.
@li @c "manifest": Base64-encoded serialization of a manifest containing the
publisher's master and signing public keys.
@li @c "signature": Hex-encoded signature of the blob using the publisher's
signing key.
@li @c "version": 1
@li @c "refreshInterval" (optional)
*/
class CACertSite : public ConfigSite
{
public:
CACertSite(
ManifestCache& validatorManifests,
ManifestCache& publisherManifests,
TimeKeeper& timeKeeper,
boost::asio::io_service& ios,
std::vector<std::string>& rootCerts,
beast::Journal j);
~CACertSite();
virtual Json::Value
getJson() const ;
virtual ListDisposition applyList(
std::string const& manifest,
std::string const& blob,
std::string const& signature,
std::uint32_t version);
/** Stop trusting publisher's list of keys.
@param publisherKey Publisher public key
@return `false` if key was not trusted
@par Thread Safety
Calling public member function is expected to lock mutex
*/
bool
removePublisherList(PublicKey const& publisherKey);
private:
/** Check response for trusted valid published list
@return `ListDisposition::accepted` if list can be applied
@par Thread Safety
Calling public member function is expected to lock mutex
*/
ListDisposition
verify(
Json::Value& list,
PublicKey& pubKey,
std::string const& manifest,
std::string const& blob,
std::string const& signature);
std::vector<std::string>& rootCerts_;
//ManifestCache& validatorManifests_;
ManifestCache& publisherManifests_;
TimeKeeper& timeKeeper_;
boost::shared_mutex mutable mutex_;
// Currently supported version of publisher list format
static constexpr std::uint32_t requiredListVersion = 1;
};
} // ripple
#endif
|
vishal-panchal611/Smart_Farming_using_IoT | node_modules/@carbon/icons-react/es/task--view/16.js | import { TaskView16 } from '..';
export default TaskView16;
|
nagineni/chromium-crosswalk | chrome/browser/extensions/api/top_sites/top_sites_api.h | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef CHROME_BROWSER_EXTENSIONS_API_TOP_SITES_TOP_SITES_API_H_
#define CHROME_BROWSER_EXTENSIONS_API_TOP_SITES_TOP_SITES_API_H_
#include "base/memory/weak_ptr.h"
#include "chrome/browser/extensions/chrome_extension_function.h"
#include "chrome/browser/history/history_types.h"
namespace extensions {
class TopSitesGetFunction : public ChromeAsyncExtensionFunction {
public:
DECLARE_EXTENSION_FUNCTION("topSites.get", TOPSITES_GET)
TopSitesGetFunction();
protected:
virtual ~TopSitesGetFunction();
// ExtensionFunction:
virtual bool RunImpl() OVERRIDE;
private:
void OnMostVisitedURLsAvailable(const history::MostVisitedURLList& data);
// For callbacks may be run after destruction.
base::WeakPtrFactory<TopSitesGetFunction> weak_ptr_factory_;
};
} // namespace extensions
#endif // CHROME_BROWSER_EXTENSIONS_API_TOP_SITES_TOP_SITES_API_H_
|
1shenxi/webpack | test/watchCases/parsing/switching-harmony/1/cc.js | <reponame>1shenxi/webpack
module.exports = "cc1";
|
lwyj123/vue-endless | src/views/gameMap/js/astar.js | const Astar = function Astar(map, start, end) {
function block(x, y) {
this.x = x;
this.y = y;
this.parent = null;
this.G = 0;
this.H = null;
this.getF = function() {
return this.G + this.H;
};
}
this.map = _.cloneDeep(map);
this.init = function() {
var opt = {
startBlock : start,
endBlock : end,
stickList : _.filter(_.flattenDeep(this.map.mapData) ,{ block_type: 'X'}),
openList : [],
closeList : [],
isInList : function(block, type) {
let index = _.findIndex(this[type],{
x:block.x,
y:block.y
})
return index !== -1 && { index }
}
}
console.log(this.map)
_.assign(this.map, opt)
this.map.openList.push(this.map.startBlock);
var line = this.step();
if (!line.find) {
console.info('ๆ ๆณ็ๅญ่ทฏๅพ!');
return [];
}
line = line.endBlock;
let path = [];
while (line.parent.parent) {
line = line.parent;
path.push(line);
}
path.reverse().push(end);
return path;
}
this.step = function() {
// sort for get least f
this.map.openList = this.map.openList.sort(function(a, b) {
return a.getF() - b.getF();
})
// get a block from Open List
var currentBlock = this.map.openList.shift();
if (!currentBlock) {
return {
find: false
}
};
this.map.closeList.push(currentBlock);
var around = this.around(currentBlock);
for (var i = 0; i < around.length; i++) {
var _block = around[i];
var index = this.map.isInList(_block, 'openList');
_block.parent = currentBlock;
_block.H = this.countH(_block);
_block.G = this.countG(_block) + (currentBlock.G || 0);
if (!index) {
if (_block.x === this.map.endBlock.x && _block.y === this.map.endBlock.y) {
return {
find: true,
endBlock: _block
}
}
this.map.openList.push(_block);
continue;
}
if ((currentBlock.G + this.countG(_block)) < this.map.openList[index.index].G) {
this.map.openList[index.index].parent = currentBlock;
}
};
return this.step();
};
this.around = function(currentBlock) {
var list = [];
for (var i = -1; i <= 1; i++) {
for (var j = -1; j <= 1; j++) {
if (i === 0 && j === 0) {
continue;
};
if (i !== 0 && j !== 0) {
continue;
}
var x = currentBlock.x + i;
var y = currentBlock.y + j;
if (x >= this.map.row || y >= this.map.col || x < 0 || y < 0) {
continue;
}
var record = new block(x, y);
if (this.map.isInList(record, 'closeList')) {
continue;
}
if (this.map.isInList(record, 'stickList')) {
continue;
}
list.push(record)
}
}
return list;
}
this.countH = function(block) {
var x = Math.abs(block.x - this.map.endBlock.x);
var y = Math.abs(block.y - this.map.endBlock.y);
return (x + y) * 10;
}
this.countG = function(block) {
if (block.x !== block.parent.x && block.y !== block.parent.y) {
return 14;
} else {
return 10;
}
}
return this.init();
}
export default Astar |
NullVoxPopuli/aeonvera-ui | app/mixins/components/print/form.js | <reponame>NullVoxPopuli/aeonvera-ui
import Ember from 'ember';
export default Ember.Mixin.create({
additionalRows: 0,
additionalRowsArray: Ember.computed('additionalRows', {
get() {
const newRows = this.get('additionalRows');
const result = [];
for (let i = 0; i < newRows; i++) {
result.push(i);
}
return result;
}
})
});
|
BBN-E/LearnIt | kb/src/main/java/com/bbn/akbc/evaluation/tac/LoadEvalKB.java | <gh_stars>1-10
package com.bbn.akbc.evaluation.tac;
import java.io.IOException;
public class LoadEvalKB {
public static void main(String[] argv) throws IOException {
String fileQuery = argv[0];
String fileAssessment = argv[1];
String fileSysKbAligned = argv[2];
String evalLog = argv[3];
String sysKb = argv[4];
EvalKB evalKB = new EvalKB(fileQuery, fileAssessment, fileSysKbAligned, evalLog, sysKb);
evalKB.print();
}
}
|
flufff42/fastlane | spaceship/lib/spaceship/connect_api/models/app_store_version_submission.rb | <reponame>flufff42/fastlane<filename>spaceship/lib/spaceship/connect_api/models/app_store_version_submission.rb<gh_stars>1000+
require_relative '../model'
module Spaceship
class ConnectAPI
class AppStoreVersionSubmission
include Spaceship::ConnectAPI::Model
attr_accessor :can_reject
attr_mapping({
"canReject" => "can_reject"
})
def self.type
return "appStoreVersionSubmissions"
end
#
# API
#
def delete!(client: nil, filter: {}, includes: nil, limit: nil, sort: nil)
client ||= Spaceship::ConnectAPI
client.delete_app_store_version_submission(app_store_version_submission_id: id)
end
end
end
end
|
wesleyegberto/courses | nodejs/apis/api-payfast/misc/copyFileStream.js | var fs = require('fs');
fs.createReadStream('smith.jpg')
// chunk to be processed
.pipe(fs.createWriteStream('cloned_smith.jpg'))
// final event
.on('finish', function() {
console.log('Smith was cloned again!');
}); |
Doresimon/good-chain | crypto/hdk/key.go | <gh_stars>0
package hdk
import (
"fmt"
"math/big"
"github.com/Doresimon/good-chain/crypto/bls"
"github.com/Doresimon/good-chain/crypto/hash/hmac"
"golang.org/x/crypto/bn256"
)
var bn256Order = bn256.Order
var bigZero = new(big.Int).SetInt64(0)
var bigTmp = new(big.Int).SetInt64(0)
// // HDPrivateKey ...
// type HDPrivateKey interface {
// Public() *HDPublicKey
// Sign([]byte) ([]byte, error)
// Bytes() []byte
// }
// // HDPublicKey ...
// type HDPublicKey interface {
// Bytes() []byte
// Verify(message, sigBytes []byte) bool
// }
// GenerateMasterKey TODO
func GenerateMasterKey(key, seed []byte) (masterKey *bls.PrivateKey, chainCode []byte, err error) {
MAC := hmac.SHA512(key, seed)
privateKeyValue := new(big.Int).SetBytes(MAC[0:32])
chainCode = MAC[32:64]
privateKeyValue.Mod(privateKeyValue, bn256Order)
privateKeyValueBytes := privateKeyValue.Bytes()
len := len(privateKeyValueBytes)
if len < 32 {
emptyBytes := make([]byte, 32-len)
privateKeyValueBytes = append(emptyBytes, privateKeyValueBytes...)
}
if len > 32 {
panic("len > 32") // it should never go here
}
privateKeyValue.SetBytes(privateKeyValueBytes)
if privateKeyValue.Sign() == 0 {
err = fmt.Errorf("master key is all 0")
}
masterKey = new(bls.PrivateKey)
masterKey.Set(privateKeyValue)
return
}
// Priv2Priv The function Priv2Priv((k_p, c_p), i) โ (k_c, c_c) computes a child extended private key from the parent extended private key
func Priv2Priv(parentPrivKey *bls.PrivateKey, parentChainCode []byte, index uint32) (*bls.PrivateKey, []byte, bool) {
key := parentChainCode
data := make([]byte, 0, 0) // Data = ser(point(k)) || ser (i))
parentPubKey := new(bn256.G2).ScalarBaseMult(parentPrivKey.Value())
d1 := parentPubKey.Marshal()
d2 := int32ToBytes(index)
data = append(data, d1...)
data = append(data, d2...)
mac := hmac.SHA512(key, data)
childKeyValue := new(big.Int)
childKeyValue.SetBytes(mac[0:32])
childKeyValue.Add(childKeyValue, parentPrivKey.Value())
childKeyValue.Mod(childKeyValue, bn256Order)
childChainCode := make([]byte, 32, 32)
copy(childChainCode, mac[32:64])
if childKeyValue.Sign() == 0 {
return nil, nil, false
}
childKey := new(bls.PrivateKey)
childKey.Set(childKeyValue)
return childKey, childChainCode, true
}
// Pub2Pub The function Pub2Pub((K , c ), i) โ (K , c ) computes a child extended public key from
// the parent extended public key
func Pub2Pub(parentPubKey *bls.PublicKey, parentChainCode []byte, index uint32) (*bls.PublicKey, []byte, bool) {
key := parentChainCode
data := make([]byte, 0, 0) // Data = []byte(point(k)) || [4]byte(i))
d1 := parentPubKey.Value().Marshal()
d2 := int32ToBytes(index)
data = append(data, d1...)
data = append(data, d2...)
mac := hmac.SHA512(key, data)
tmpBN := new(big.Int)
tmpBN.SetBytes(mac[0:32])
tmpBN.Mod(tmpBN, bn256Order)
if tmpBN.Sign() == 0 {
return nil, nil, false
}
// if bigTmp.Sub(bn256Order, tmpBN).Sign() != 1 {
// return nil, nil, false
// }
childPubKeyValue := new(bn256.G2)
childPubKeyValue.ScalarBaseMult(tmpBN)
childPubKeyValue.Add(childPubKeyValue, parentPubKey.Value())
childChainCode := make([]byte, 32, 32)
copy(childChainCode, mac[32:64])
childPubKey := new(bls.PublicKey)
childPubKey.Set(childPubKeyValue)
return childPubKey, childChainCode, true
}
// Verify ...
func Verify(pubkBytes []byte, messageBytes []byte, sigBytes []byte) bool {
pubk := bls.NewPubKey(pubkBytes)
if pubk == nil {
return false
}
return pubk.Verify(messageBytes, sigBytes)
}
func int32ToBytes(i uint32) []byte {
ret := make([]byte, 4, 4)
// ret[0] = byte((i & 0xff000000) >> 24)
// ret[1] = byte((i & 0x00ff0000) >> 16)
// ret[2] = byte((i & 0x0000ff00) >> 8)
// ret[3] = byte((i & 0x000000ff) >> 0)
ret[0] = byte((i) >> 24)
ret[1] = byte((i) >> 16)
ret[2] = byte((i) >> 8)
ret[3] = byte((i) >> 0)
return ret
}
func isBytesEmpty(b *[]byte) bool {
empty := true
for _, v := range *b {
if v != 0 {
empty = false
break
}
}
return empty
}
|
Narflex/sagetv | third_party/Microsoft/MpegMux/MpegMux.h | <filename>third_party/Microsoft/MpegMux/MpegMux.h
//------------------------------------------------------------------------------
// Copyright 2015 The SageTV Authors. All Rights Reserved.
// File: MpegMux.cpp
//
// Desc: DirectShow sample code - implementation of a renderer that MpegMuxs
// the samples it receives into a text file.
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//------------------------------------------------------------------------------
#define MAX_BUFFY_SIZE 2048
#define VSTREAM_ID 224
#define ASTREAM_ID 189
#define AUD_CIRC_SIZE 256000
#define VID_CIRC_SIZE 4000000
//#define GOP_TICKS 45045
//#define GOPS_PER_SEC (2997./1500.)
#define FUDGE 0.90
#define DIFF_HIST_LEN 240
#define STCODE_Q_SIZE 10
#define MPEGFRAME_Q_SIZE 128
// 200 seemed a little too high for this, I know 150 is too low
#define DIFF_HIST_AVG_THRESH_TOP 175
#define DIFF_HIST_AVG_THRESH_BOTTOM -1000
// At 100 I noticed a lot of extra corrections in the first few minutes, it was
// even noticable when watching. It never got out of sync, but you could
// hear the discontinuities.
#define MINREGLENA 10
#define MAXREGLENA 50
#define MINREGLENV 40 //100
#define MAXREGLENV 60 //125
#define LINREGLEN 500 //200 11/21 I took it off 200, I noticed too much adjust there, Greatest Love Songs example
#define TABLE_SIZE 40
static BYTE number_of_frame_headers_table[TABLE_SIZE] =
{
7, 6, 6, 7, 6, 6, 6, 7, 6, 6, 7, 6, 6, 6, 7, 6,
6, 6, 7, 6, 6, 7, 6, 6, 6, 7, 6, 6, 6, 7, 6, 6,
7, 6, 6, 6, 7, 6, 6, 6
};
static BYTE first_access_unit_pointer_table[TABLE_SIZE] =
{
4, 236, 148, 60, 36, 204, 116, 28, 4, 172, 84, 60, 228, 140, 52, 28,
196, 108, 20, 252, 164, 76, 52, 220, 132, 44, 20, 188, 100, 12, 244, 156,
68, 44, 212, 124, 36, 12, 180, 92
};
struct MpegFrameOffset
{
DWORD offset;
DWORD frameType;
LONGLONG dts;
};
typedef struct MpegFrameOffset MpegFrameOffset;
struct DataRecvTime
{
__int64 time;
LONGLONG len;
};
typedef struct DataRecvTime DataRecvTime;
class CMpegMuxInputPin;
class CMpegMux;
class CMpegMuxFilter;
// Main filter object
class CMpegMuxFilter : public CBaseFilter
{
CMpegMux * const m_pMpegMux;
public:
// Constructor
CMpegMuxFilter(CMpegMux *pMpegMux,
LPUNKNOWN pUnk,
CCritSec *pLock,
HRESULT *phr);
// Pin enumeration
CBasePin * GetPin(int n);
int GetPinCount();
// Open and close the file as necessary
//STDMETHODIMP Run(REFERENCE_TIME tStart);
STDMETHODIMP Pause();
STDMETHODIMP Stop();
};
// Pin object
class CMpegMuxInputPin : public CRenderedInputPin
{
protected:
CMpegMux * const m_pMpegMux; // Main renderer object
CCritSec * const m_pReceiveLock; // Critical section for streaming
public:
CMpegMuxInputPin(TCHAR *pObjectName,
CMpegMux *pMpegMux,
LPUNKNOWN pUnk,
CBaseFilter *pFilter,
CCritSec *pLock,
CCritSec *pReceiveLock,
HRESULT *phr,
LPCWSTR pName);
STDMETHODIMP EndOfStream(void);
STDMETHODIMP ReceiveCanBlock();
HRESULT Inactive();
CMediaType *MediaType()
{
return &m_mt;
}
};
class CMpegMuxVideoInputPin : public CMpegMuxInputPin
{
public:
CMpegMuxVideoInputPin(CMpegMux *pMpegMux,
LPUNKNOWN pUnk,
CBaseFilter *pFilter,
CCritSec *pLock,
CCritSec *pReceiveLock,
HRESULT *phr);
// Do something with this media sample
STDMETHODIMP Receive(IMediaSample *pSample);
// Specify our preferred media type
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
// Check if the pin can support this specific proposed type and format
HRESULT CheckMediaType(const CMediaType *);
BOOL m_foundFirstGroupStart;
BOOL m_foundRecentSeqHdr;
};
class CMpegMuxAudioInputPin : public CMpegMuxInputPin
{
public:
CMpegMuxAudioInputPin(CMpegMux *pMpegMux,
LPUNKNOWN pUnk,
CBaseFilter *pFilter,
CCritSec *pLock,
CCritSec *pReceiveLock,
HRESULT *phr);
// Do something with this media sample
STDMETHODIMP Receive(IMediaSample *pSample);
// Specify our preferred media type
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
// Check if the pin can support this specific proposed type and format
HRESULT CheckMediaType(const CMediaType *);
};
// CMpegMux object which has filter and pin members
class CMpegMux : public CUnknown, public IFileSinkFilter, public IMpegMux
{
friend class CMpegMuxFilter;
friend class CMpegMuxInputPin;
friend class CMpegMuxVideoInputPin;
friend class CMpegMuxAudioInputPin;
CMpegMuxFilter *m_pFilter; // Methods for filter interfaces
CMpegMuxInputPin *m_pvPin; // A simple rendered input video pin
CMpegMuxInputPin *m_paPin; // A simple rendered input video pin
CCritSec m_Lock; // Main renderer critical section
CCritSec m_vReceiveLock; // Sublock for received samples
CCritSec m_aReceiveLock; // Sublock for received samples
CCritSec m_seqReadyLock;
CCritSec m_videoLock;
CCritSec m_audioLock;
CCritSec m_fileLock;
HANDLE m_hFile; // Handle to file for MpegMuxing
LPOLESTR m_pFileName; // The filename where we MpegMux to
unsigned char m_buffy[MAX_BUFFY_SIZE];
unsigned int m_bytePos;
unsigned int m_bitPos;
unsigned char m_spareByte;
ULONGLONG m_nextVDTS;
ULONGLONG m_nextVSCR;
ULONGLONG m_nextASCR;
DWORD m_nextASCRE;
ULONGLONG m_nextAPTS;
ULONGLONG m_lastWrittenAPTS;
PBYTE m_seqBuf;
DWORD m_seqBufLen;
DWORD m_seqBufOffset;
DWORD m_numSeqReady;
PBYTE m_audBuf;
DWORD m_audBufLen;
DWORD m_audBufOffset;
LONGLONG m_aBytesRecvd;
LONGLONG m_gopsRecvd;
LONGLONG m_aBytesWritten;
LONGLONG m_gopsWritten;
LONGLONG m_aFrameNum;
DWORD m_aFrameRem;
LONG m_diffHist[DIFF_HIST_LEN];
DWORD m_diffHistPos;
bool m_diffHistValid;
LONG m_scrDiffHist[DIFF_HIST_LEN];
DWORD m_scrDiffHistPos;
bool m_scrDiffHistValid;
DWORD m_buffy_size;
LONGLONG m_videoBitrate; // in bits/second, video bit rate out of MPEG-2 encoder
DWORD m_audioSampleRate; // in samples/second
DWORD m_audioBitrate; // in bits/second
DWORD m_aFrameSize; // in samples, dependent upon layer of MPEG audio
DWORD m_muxRate;
DWORD m_audioPacksPerGOP;
DWORD m_audioBytesPerGOP;
DWORD m_videoPacksPerGOP;
DWORD m_stCodeQ[STCODE_Q_SIZE];
DWORD m_stCodeIdx;
MpegFrameOffset m_frameCodeQ[MPEGFRAME_Q_SIZE];
DWORD m_frameCodeIdx;
DWORD m_numFrameCodes;
bool m_wroteSysHdr;
DWORD m_frameTicks;
DWORD m_gopFrames;
DWORD m_ipFrameDist;
DWORD m_numIPFramesPerGop;
ShareInfo m_shareInfo;
DWORD m_videoStatIdx;
DWORD m_audioStatIdx;
bool m_fullVideoStat;
bool m_fullAudioStat;
DataRecvTime m_videoStat[LINREGLEN];
DataRecvTime m_audioStat[LINREGLEN];
__int64 m_baseCpuTime;
LONGLONG m_baseAudioTime;
LONGLONG m_baseVideoTime;
LONGLONG m_lastVideoRecvTime;
LONGLONG m_totalAudioStatAdjust;
DWORD m_audioByteGOPExtras;
bool m_bDropNextSeq;
CAMEvent m_evWork;
CAMEvent m_evStop; // set when thread should exit
HANDLE m_hThread;
bool m_bRegBoostThreads;
ULONGLONG m_llRegVDTS;
ULONGLONG m_llRegAPTS;
bool m_bMpegVideo;
bool m_bPesVideo;
bool m_bNextPesPacketFragmented;
long m_circFileSize;
long m_nextCircFileSize;
bool m_bResyncAV;
int m_lastFrameAdjust;
int m_numFrameAdjustRepeat;
LONGLONG m_extraDTS;
// start the thread
HRESULT StartThread(void);
// stop the thread and close the handle
HRESULT CloseThread(void);
// called on thread to process any active requests
void ProcessSeqs(void);
// initial static thread proc calls ThreadProc with DWORD
// param as this
static DWORD WINAPI InitialThreadProc(LPVOID pv) {
CMpegMux * pThis = (CMpegMux*) pv;
return pThis->ThreadProc();
};
DWORD ThreadProc(void);
public:
DECLARE_IUNKNOWN
CMpegMux(LPUNKNOWN pUnk, HRESULT *phr);
~CMpegMux();
static CUnknown * WINAPI CreateInstance(LPUNKNOWN punk, HRESULT *phr);
// Write data streams to a file
HRESULT Write(PBYTE pbData,LONG lData);
// Write PS Headers
HRESULT WritePSHeaders();
void WritePESPaddingPacket(ULONG packSize);
BOOL PackupVideoSequence();
void WritePackHeader(ULONGLONG scr, ULONGLONG scre, ULONG numStuffs);
void WritePESVideoPacket(ULONGLONG pts, ULONGLONG dts, DWORD len, BOOL bDataAlignment = FALSE);
void WritePESAudioPacket(ULONGLONG pts, DWORD len, int frameAdjust);
void WriteSystemHeader();
void BufferBits(ULONGLONG value, unsigned int numBits);
HRESULT WriteBuff(unsigned int numBytes);
// Implements the IFileSinkFilter interface
STDMETHODIMP SetFileName(LPCOLESTR pszFileName,const AM_MEDIA_TYPE *pmt);
STDMETHODIMP GetCurFile(LPOLESTR * ppszFileName,AM_MEDIA_TYPE *pmt);
// Implements the IMpegMux interface
STDMETHODIMP get_ShareInfo(ShareInfo **sharin);
STDMETHODIMP get_FileLength(LONGLONG *fileLength);
STDMETHODIMP put_CircularSize(long lCircSize);
STDMETHODIMP ForceCleanUp()
{
return S_OK;
}
private:
// Overriden to say what interfaces we support where
STDMETHODIMP NonDelegatingQueryInterface(REFIID riid, void ** ppv);
// Open and write to the file
HRESULT OpenFile();
HRESULT CloseFile();
void PushSeqCode(DWORD idx);
DWORD PopSeqLen();
DWORD PeekSeqLen();
void PushFrameOffset(MpegFrameOffset x);
MpegFrameOffset PopFrameOffset();
__int64 linRegTimeForBytes(DataRecvTime* pData, DWORD numPoints, LONGLONG byteLen, bool includeGlobalAudioOffset);
};
|
CommandPost/FinalCutProFrameworks | Headers/Frameworks/Flexo/FFTimelineToolController.h | <gh_stars>1-10
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Mar 11 2021 20:53:35).
//
// Copyright (C) 1997-2019 <NAME>.
//
#import <objc/NSObject.h>
@class FFTool, TLKTimelineView;
__attribute__((visibility("hidden")))
@interface FFTimelineToolController : NSObject
{
TLKTimelineView *_timelineView;
FFTool *_activeTimelineTool;
BOOL _selectingFromModel;
BOOL _selectingFromTK;
}
+ (BOOL)toolIsTimelineTool:(id)arg1;
+ (id)timelineTools;
@property(retain, nonatomic) TLKTimelineView *timelineView; // @synthesize timelineView=_timelineView;
@property(retain, nonatomic) FFTool *activeTimelineTool; // @synthesize activeTimelineTool=_activeTimelineTool;
- (Class)_toolClassForCurrentEventHandler;
- (void)_setEventHandlerForTool:(id)arg1;
- (void)_activeTimelineHandlerDidChange;
- (void)_activeToolDidChange:(id)arg1;
- (void)eventDispatcher:(id)arg1 didSetCurrentHandler:(id)arg2;
- (void)dealloc;
- (id)init;
- (id)initWithTimelineView:(id)arg1;
@end
|
xingmeichen/spring-cloud-shop | shop-job/shop-job-model/src/main/java/quick/pager/shop/job/response/JobGroupResponse.java | <reponame>xingmeichen/spring-cloud-shop
package quick.pager.shop.job.response;
import java.io.Serializable;
import lombok.Data;
/**
* ไปปๅก็ปๅๅบๅฏน่ฑก
* @author siguiyang
*/
@Data
public class JobGroupResponse implements Serializable {
private static final long serialVersionUID = 2164846165607992992L;
private Long id;
/**
* ็ปๆๅกๅ็งฐ
*/
private String groupName;
/**
* ๅบๅท
*/
private Integer sequence;
}
|
dhyces/DinnerPlate | src/main/java/dhyces/dinnerplate/capability/bitten/MockFoodProvider.java | package dhyces.dinnerplate.capability.bitten;
import dhyces.dinnerplate.bite.BitableProperties;
import dhyces.dinnerplate.bite.Bite;
import dhyces.dinnerplate.bite.IBite;
import dhyces.dinnerplate.util.Couple;
import net.minecraft.core.particles.ItemParticleOption;
import net.minecraft.core.particles.ParticleOptions;
import net.minecraft.core.particles.ParticleTypes;
import net.minecraft.nbt.CompoundTag;
import net.minecraft.sounds.SoundEvent;
import net.minecraft.world.entity.LivingEntity;
import net.minecraft.world.entity.player.Player;
import net.minecraft.world.item.ItemStack;
import net.minecraft.world.level.Level;
public class MockFoodProvider implements IMockFoodProvider {
protected ItemStack stack = ItemStack.EMPTY;
protected int chewCount;
/** The first of the pair is used for first and second bites, the second of the pair is used for the last bite*/
protected Couple<IBite> bites;
public MockFoodProvider() {
chewCount = 0;
bites = Couple.coupleOf(new Bite.Builder().build(), new Bite.Builder().build());
}
@Override
public void initialize(ItemStack stack, LivingEntity entity, int chewCount) {
if (!stack.isEdible()) return;
this.stack = stack;
var biteProps = BitableProperties.threeBite(stack.getItem().getFoodProperties(stack, entity));
// TODO: there's an issue here in the case there is an array out of bounds error if the splitInto method returns a list with a
// size less than 2
this.bites = Couple.coupleOf(biteProps.getBite(0), biteProps.getBite(1));
}
@Override
public ItemStack getRealStack() {
return stack;
}
@Override
public SoundEvent getEatingSound(ItemStack stack) {
return this.stack.getEatingSound();
}
@Override
public ParticleOptions getParticle(ItemStack stack) {
return new ItemParticleOption(ParticleTypes.ITEM, this.stack);
}
@Override
public ItemStack finish(ItemStack stackIn, Level level, LivingEntity livingEntity) {
var stackCopy = this.stack.copy();
if (livingEntity instanceof Player p) {
var tempStorage = new CompoundTag();
var ability = p.getAbilities().instabuild;
p.getFoodData().addAdditionalSaveData(tempStorage);
p.getAbilities().instabuild = false;
var ret = stackCopy.finishUsingItem(level, livingEntity);
stackCopy.setCount(1);
p.getAbilities().instabuild = ability;
p.getFoodData().readAdditionalSaveData(tempStorage);
return ret.equals(stackCopy) ? this.stack.getContainerItem() : ret;
}
return stack;
}
@Override
public int getBiteCount() {
return chewCount;
}
@Override
public int getMaxBites() {
return 3;
}
@Override
public boolean incrementBiteCount() {
return ++chewCount >= getMaxBiteCount(stack, null);
}
@Override
public void setBiteCount(int count) {
this.chewCount = count;
}
@Override
public IBite getBite(int chew) {
return chew < 2 ? bites.getFirst() : bites.getSecond();
}
@Override
public boolean canBeFast() {
return stack.getItem().getFoodProperties().isFastFood();
}
@Override
public boolean isMeat() {
return stack.getItem().getFoodProperties().isMeat();
}
@Override
public boolean canAlwaysEat() {
return stack.getItem().getFoodProperties().canAlwaysEat();
}
}
|
lamkadmi/depenses | app/src/main/java/com/project/depense/mvvm/ui/splash/SplashActivity.java |
package com.project.depense.mvvm.ui.splash;
import android.content.Intent;
import android.os.Bundle;
import com.project.depense.mvvm.BR;
import com.project.depense.mvvm.R;
import com.project.depense.mvvm.ViewModelProviderFactory;
import com.project.depense.mvvm.databinding.ActivitySplashBinding;
import com.project.depense.mvvm.ui.base.BaseActivity;
import com.project.depense.mvvm.ui.home.SpendingActivity;
import com.project.depense.mvvm.ui.login.LoginActivity;
import com.project.depense.mvvm.ui.main.MainActivity;
import javax.inject.Inject;
import androidx.lifecycle.ViewModelProviders;
/**
* Created by lamkadmi on 17/11/19.
*/
public class SplashActivity extends BaseActivity<ActivitySplashBinding, SplashViewModel> implements SplashNavigator {
@Inject
ViewModelProviderFactory factory;
private SplashViewModel mSplashViewModel;
@Override
public int getBindingVariable() {
return BR.viewModel;
}
@Override
public int getLayoutId() {
return R.layout.activity_splash;
}
@Override
public SplashViewModel getViewModel() {
mSplashViewModel = ViewModelProviders.of(this, factory).get(SplashViewModel.class);
return mSplashViewModel;
}
@Override
public void openLoginActivity() {
Intent intent = LoginActivity.newIntent(SplashActivity.this);
startActivity(intent);
finish();
}
@Override
public void openMainActivity() {
Intent intent = SpendingActivity.newIntent(SplashActivity.this);
startActivity(intent);
finish();
}
@Override
public void openDashBoard() {
Intent intent = SpendingActivity.newIntent(SplashActivity.this);
startActivity(intent);
finish();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mSplashViewModel.setNavigator(this);
mSplashViewModel.startDashboard();
}
}
|
jackhutu/jackblog-api-es6 | server/model/logs.model.js | <filename>server/model/logs.model.js
'use strict'
const mongoose = require('mongoose')
const Schema = mongoose.Schema
let LogsSchema = new Schema({
uid: {
type:Schema.Types.ObjectId,
ref:'User'
},
content: {
type:String,
trim: true
},
type: String,
created: {
type: Date,
default: Date.now
}
})
exports.LogsSchema = LogsSchema
module.exports = mongoose.model('Logs',LogsSchema) |
wayfinder/Wayfinder-Server | Server/MapGen/MapEditor/include/MERouteableItemLayer.h | /*
Copyright (c) 1999 - 2010, Vodafone Group Services Ltd
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the Vodafone Group Services Ltd nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef MEROUTEABLEITEMLAYER_H
#define MEROUTEABLEITEMLAYER_H
#include "config.h"
#include "MEItemLayer.h"
/**
* Describes the RouteableItems on the map. This is a subclass to the
* general MEItemLayer since the RouteableItems are very special (nodes,
* connections etc.). This might either be an abstract or concrete
* class!
*
*/
class MERouteableItemLayer : public MEItemLayer {
public:
/**
* Create a new layer with the RouteableItem in the map.
* @param
*/
MERouteableItemLayer(Glib::RefPtr<Gdk::Window>& window,
ItemTypes::itemType type,
MEGdkColors::color_t col,
MEGdkColors::color_t highlightCol,
uint8 filterLevel);
virtual ~MERouteableItemLayer() {};
virtual void clearAllHighlight();
virtual void drawAllHighlight();
bool highlightConnection(uint32 toNodeID, uint32 fromNodeID);
bool addCostHighlight(uint32 itemID, uint32 cost);
private:
Glib::RefPtr<Gdk::GC> m_connectionHighlightGC;
Glib::RefPtr<Gdk::GC> m_highlightCostGC;
Glib::RefPtr<Gdk::GC> m_highlightMaxCostGC;
uint32 m_highlightNodeID;
uint32 m_highlightConnectionNodeID;
Vector m_highlightedMaxCostItems;
Vector m_highlightedCostItems;
};
#endif
|
manu88/RenderKit | RenderKit/Modest/source/myhtml/tokenizer_script.c | /*
Copyright (C) 2015-2017 <NAME>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Author: <EMAIL> (<NAME>)
*/
#include "myhtml/tokenizer_script.h"
size_t myhtml_tokenizer_state_script_data(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while (html_offset < html_size)
{
if(html[html_offset] == '<') {
token_node->element_begin = (tree->global_offset + html_offset);
html_offset++;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_LESS_THAN_SIGN;
break;
}
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_less_than_sign(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '/')
{
html_offset++;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_END_TAG_OPEN;
}
else if(html[html_offset] == '!')
{
html_offset++;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPE_START;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escape_start(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '-') {
html_offset++;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPE_START_DASH;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escape_start_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '-') {
html_offset++;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_DASH_DASH;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_end_tag_open(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(myhtml_ascii_char_cmp(html[html_offset])) {
token_node->str.length = (html_offset + tree->global_offset);
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_END_TAG_NAME;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_end_tag_name(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while(html_offset < html_size)
{
if(myhtml_whithspace(html[html_offset], ==, ||))
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0)
{
token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT);
if(token_node == NULL) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
token_node->raw_begin = tmp_size;
token_node->raw_length = 6;
token_node->tag_id = MyHTML_TAG_SCRIPT;
token_node->type = MyHTML_TOKEN_TYPE_CLOSE;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
html_offset++;
break;
}
else if(html[html_offset] == '/')
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0)
{
token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT);
if(token_node == NULL) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
token_node->raw_begin = tmp_size;
token_node->raw_length = 6;
token_node->tag_id = MyHTML_TAG_SCRIPT;
token_node->type = MyHTML_TOKEN_TYPE_CLOSE|MyHTML_TOKEN_TYPE_CLOSE_SELF;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
html_offset++;
break;
}
else if(html[html_offset] == '>')
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0)
{
token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT);
if(token_node == NULL) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
token_node->raw_begin = tmp_size;
token_node->raw_length = 6;
token_node->tag_id = MyHTML_TAG_SCRIPT;
token_node->type = MyHTML_TOKEN_TYPE_CLOSE;
html_offset++;
token_node->element_length = (tree->global_offset + html_offset) - token_node->element_begin;
if(myhtml_queue_add(tree, html_offset, token_node) != MyHTML_STATUS_OK) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_DATA;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
html_offset++;
}
break;
}
else if(myhtml_ascii_char_unless_cmp(html[html_offset]))
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
break;
}
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escaped_dash_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '-') {
html_offset++;
return html_offset;
}
if(html[html_offset] == '<') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN;
}
else if(html[html_offset] == '>') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
html_offset++;
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escaped_less_than_sign(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '/') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_END_TAG_OPEN;
html_offset++;
}
else if(myhtml_ascii_char_cmp(html[html_offset])) {
token_node->str.length = (html_offset + tree->global_offset);
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPE_START;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escaped_end_tag_open(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(myhtml_ascii_char_cmp(html[html_offset])) {
token_node->str.length = (html_offset + tree->global_offset);
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_END_TAG_NAME;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escaped_end_tag_name(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while(html_offset < html_size)
{
if(myhtml_whithspace(html[html_offset], ==, ||))
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0)
{
token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT);
if(token_node == NULL) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
token_node->raw_begin = tmp_size;
token_node->raw_length = 6;
token_node->tag_id = MyHTML_TAG_SCRIPT;
token_node->type = MyHTML_TOKEN_TYPE_CLOSE;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
html_offset++;
break;
}
else if(html[html_offset] == '/')
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0)
{
token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT);
if(token_node == NULL) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
token_node->raw_begin = tmp_size;
token_node->raw_length = 6;
token_node->tag_id = MyHTML_TAG_SCRIPT;
token_node->type = MyHTML_TOKEN_TYPE_CLOSE|MyHTML_TOKEN_TYPE_CLOSE_SELF;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_BEFORE_ATTRIBUTE_NAME;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
html_offset++;
break;
}
else if(html[html_offset] == '>')
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0)
{
token_node = myhtml_tokenizer_queue_create_text_node_if_need(tree, token_node, html, ((html_offset + tree->global_offset) - 8), MyHTML_TOKEN_TYPE_SCRIPT);
if(token_node == NULL) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
token_node->raw_begin = tmp_size;
token_node->raw_length = 6;
token_node->tag_id = MyHTML_TAG_SCRIPT;
token_node->type = MyHTML_TOKEN_TYPE_CLOSE;
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_DATA;
html_offset++;
token_node->element_length = (tree->global_offset + html_offset) - token_node->element_begin;
if(myhtml_queue_add(tree, html_offset, token_node) != MyHTML_STATUS_OK) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_PARSE_ERROR_STOP;
return 0;
}
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
html_offset++;
}
break;
}
else if(myhtml_ascii_char_unless_cmp(html[html_offset]))
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
break;
}
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escaped(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while(html_offset < html_size)
{
if(html[html_offset] == '-')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_DASH;
html_offset++;
break;
}
else if(html[html_offset] == '<')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN;
html_offset++;
break;
}
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_escaped_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '-') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_DASH_DASH;
html_offset++;
}
else if(html[html_offset] == '<') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED_LESS_THAN_SIGN;
}
else if(html[html_offset] == '\0') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_double_escape_start(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while(html_offset < html_size)
{
if(myhtml_whithspace(html[html_offset], ==, ||) || html[html_offset] == '/' || html[html_offset] == '>')
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
html_offset++;
break;
}
else if(myhtml_ascii_char_unless_cmp(html[html_offset]))
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
break;
}
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_double_escaped(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while(html_offset < html_size)
{
if(html[html_offset] == '-')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_DASH;
html_offset++;
break;
}
else if(html[html_offset] == '<')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN;
html_offset++;
break;
}
html_offset++;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_double_escaped_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '-')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_DASH_DASH;
}
else if(html[html_offset] == '<')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
}
html_offset++;
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_double_escaped_dash_dash(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '-') {
html_offset++;
return html_offset;
}
if(html[html_offset] == '<')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED_LESS_THAN_SIGN;
}
else if(html[html_offset] == '>')
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
}
html_offset++;
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_double_escaped_less_than_sign(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
if(html[html_offset] == '/') {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPE_END;
html_offset++;
token_node->str.length = (html_offset + tree->global_offset);
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
}
return html_offset;
}
size_t myhtml_tokenizer_state_script_data_double_escape_end(myhtml_tree_t* tree, myhtml_token_node_t* token_node, const char* html, size_t html_offset, size_t html_size)
{
while(html_offset < html_size)
{
if(myhtml_whithspace(html[html_offset], ==, ||) || html[html_offset] == '/' || html[html_offset] == '>')
{
if((html_offset - token_node->str.length) != 6) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
html_offset++;
break;
}
size_t tmp_size = token_node->str.length;
const char *tem_name = myhtml_tree_incomming_buffer_make_data(tree, tmp_size, 6);
if(mycore_strncasecmp(tem_name, "script", 6) == 0) {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_ESCAPED;
}
else {
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
}
html_offset++;
break;
}
else if(myhtml_ascii_char_unless_cmp(html[html_offset]))
{
myhtml_tokenizer_state_set(tree) = MyHTML_TOKENIZER_STATE_SCRIPT_DATA_DOUBLE_ESCAPED;
break;
}
html_offset++;
}
return html_offset;
}
|
MCS-Lite/mcs-lite | packages/mcs-lite-ui/src/utils/__tests__/getScrollTop.test.js | // @flow
import getScrollTop from '../getScrollTop';
it('should return correct scrollTop', () => {
expect(getScrollTop()).toBe(0);
});
|
sizeofvoid/ifconfigd | usr/src/sys/arch/mvme68k/stand/libsa/libsa.h | <filename>usr/src/sys/arch/mvme68k/stand/libsa/libsa.h<gh_stars>1-10
/* $OpenBSD: libsa.h,v 1.7 2011/03/13 00:13:53 deraadt Exp $ */
/*
* libsa prototypes
*/
#include "libbug.h"
/* bugdev.c */
int bugscopen(struct open_file *);
int bugscclose(struct open_file *);
int bugscioctl(struct open_file *, u_long, void *);
int bugscstrategy(void *, int, daddr32_t, size_t, void *, size_t *);
/* exec_mvme.c */
void exec_mvme(char *, int);
/* parse_args.c */
int parse_args(char **, int *);
|
rahulr4/RahulUdacity | Build-it-bigger/joke-lib/src/main/java/com/example/TellJoke.java | <gh_stars>0
package com.example;
import java.util.ArrayList;
import java.util.Random;
public class TellJoke {
private ArrayList<String> jokes;
private Random random;
public TellJoke() {
jokes = new ArrayList<>();
jokes.add("There are only 10 types of people in the world: those that understand binary and those that donโt.");
jokes.add("Computers make very fast, very accurate mistakes.");
jokes.add("Be nice to the nerds, for all you know they might be the next Bill Gates!");
jokes.add("CAPS LOCK โ Preventing Login Since 1980.");
jokes.add("The Internet: where men are men, women are men, and children are FBI agents.");
jokes.add("The box said โRequires Windows Vista or betterโ. So I installed LINUX.");
jokes.add("Microsoft: โYouโve got questions. Weโve got dancing paperclips.โ");
jokes.add("The more I C, the less I see.");
jokes.add("Java: write once, debug everywhere.");
random = new Random();
}
public ArrayList<String> getJokes() {
return jokes;
}
public String getRandomJoke() {
return jokes.get(random.nextInt(jokes.size()));
}
}
|
wino45/FPGE | src/filename.h | /*
* filename.h
*
* Created on: 2010-03-10
* Author: wino
*/
#ifndef FILENAME_H_
#define FILENAME_H_
//FPGE
extern char fpge_config[];
extern char fpge_mapfrg[];
extern char fpge_tiles[];
extern char fpge_countries[];
extern char fpge_icons[];
extern char fpge_bmp2ctry[];
extern char fpge_mapfrgt[];
extern char fpge_mapfrg_fgm[];
extern char fpge_0str_bmp[];
extern char fpge_pgcam_gv[];
//NULP
extern char nulp_pslots[];
extern char nulp_nulp[];
extern char nulp_unit[];
//EXE
extern char exe_ag[];
extern char exe_pgwin[];
extern char exe_panzer[];
extern char exe_pgf[];
extern char exe_pacgen[];
//AG
extern char ag_tileart[];
//PGDOS
extern char til_file[];
extern char mtil_file[];
extern char uicons_file[];
extern char muicons_file[];
extern char sicons_file[];
extern char equip_file[];
extern char flag_file[];
extern char mflag_file[];
extern char strength_file[];
extern char names_file[];
extern char description_file[];
//PGF
extern char pgf_equip_file[];
extern char pgf_units_bmp[];
extern char pgf_flags_bmp[];
extern char pgf_stackicn_bmp[];
extern char pgf_strength_bmp[];
extern char pgf_tacmap_dry_bmp[];
extern char pgf_tacmap_muddy_bmp[];
extern char pgf_tacmap_frozen_bmp[];
extern char pgf_pg_pgcam[];
// PacGen-FPGE
extern char pac_tt2tiles[];
extern char pac_cl2pg_cl[];
extern char pac_mt2pg_mt[];
// PacGen
extern char pac_ext[];
extern char pac_pfpdata[];
extern char pac_til[];
extern char pac_shp[];
extern char pac_pal[];
extern char pac_txt[];
extern char pac_pacequip[];
extern char pac_pacequip_txt[];
//PZC
extern char pzc_equip_file[];
extern char pgu2pzcu_file[];
extern char pgc2pzcc_file[];
extern char pgtt2pzctt_file[];
//PG2
extern char pg2_equip_file[];
extern char pg2_equip_name_file[];
extern char pg2_icons_name_file[];
extern char pg2u2pgu_file[];
extern char pg2tt2pgtr_file[];
extern char pg2tt2pgtt_file[];
extern char pg2c2pgc_file[];
extern char pg2cl2pgcl_file[];
#define SCN_NAME_LEN 128
extern int ScenarioNumber;
extern char ScenarioName[SCN_NAME_LEN];
int getScenarioNumber();
void setScenarioNumber(int);
void getScenarioNameRaw(char *);
void setScenarioNameRaw(char *);
#endif /* FILENAME_H_ */
|
yufeiminds/ucloud-sdk-java | ucloud-sdk-java-unet/src/test/java/cn/ucloud/unet/client/GetEIPPayModeTest.java | package cn.ucloud.unet.client;
import cn.ucloud.unet.model.GetEIPPayModeParam;
import cn.ucloud.unet.model.GetEIPPayModeResult;
import cn.ucloud.common.pojo.Account;
import cn.ucloud.unet.pojo.UnetConfig;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
/**
* @description:
* @author: codezhang
* @date: 2018-09-27 14:14
**/
public class GetEIPPayModeTest {
private UnetClient client;
private GetEIPPayModeParam param;
@Before
public void initData() {
client = new DefaultUnetClient(new UnetConfig(
new Account(System.getenv("UCloudPrivateKey"),
System.getenv("UCloudPublicKey"))));
param = new GetEIPPayModeParam("cn-sh2");
List<String> ids = new ArrayList<>();
ids.add("eip-vcsnbgi4");
param.setEipIds(ids);
}
@Test
public void getEIPPayMode() {
try {
GetEIPPayModeResult eipPayMode = client.getEIPPayMode(param);
JSONComparator.jsonComparator(eipPayMode);
} catch (Exception e) {
e.printStackTrace();
}
}
} |
dports/dxup | src/d3d9/d3d9_constant_buffer.h | <gh_stars>100-1000
#pragma once
#include "../dx9asm/dx9asm_meta.h"
#include "d3d9_base.h"
#include <array>
#include <memory>
#include <cstring>
#include "../util/vectypes.h"
#include "d3d11_dynamic_buffer.h"
namespace dxup {
struct D3D9ShaderConstants {
D3D9ShaderConstants() {
std::memset(floatConstants.data(), 0, floatConstants.size() * sizeof(floatConstants[0]));
std::memset(intConstants.data(), 0, intConstants.size() * sizeof(intConstants[0]));
std::memset(boolConstants.data(), 0, boolConstants.size() * sizeof(boolConstants[0]));
}
std::array<Vector<float, 4>, 256> floatConstants;
std::array<Vector<int, 4>, 16> intConstants;
std::array<int, 16> boolConstants;
};
template <bool Pixel>
class D3D9ConstantBuffer {
public:
D3D9ConstantBuffer(ID3D11Device1* device, ID3D11DeviceContext1* context)
: m_device{ device }
, m_context{ context }
, m_buffer{ device, D3D11_BIND_CONSTANT_BUFFER }
, m_offset{ 0 } {
}
constexpr uint32_t getConstantSize() {
return 4 * sizeof(float);
}
constexpr uint32_t getLength() {
uint32_t length = sizeof(D3D9ShaderConstants::floatConstants) + sizeof(D3D9ShaderConstants::intConstants) + (4 * sizeof(D3D9ShaderConstants::boolConstants));
return alignTo(length, 16 * getConstantSize());
}
constexpr uint32_t getConstantCount() {
return getLength() / (getConstantSize());
}
void update(const D3D9ShaderConstants& constants) {
const uint32_t length = getLength();
m_buffer.reserve(length); // TODO make bool constants a bitfield.
uint8_t* data;
m_buffer.map(m_context, (void**)(&data), length);
// This can probably be consolidated into a single one.
std::memcpy(data, constants.floatConstants.data(), sizeof(constants.floatConstants));
std::memcpy(data + sizeof(constants.floatConstants), constants.intConstants.data(), sizeof(constants.intConstants));
int* boolData = (int*)(data + sizeof(constants.floatConstants) + sizeof(constants.intConstants));
for (uint32_t i = 0; i < constants.boolConstants.size(); i++) {
for (uint32_t j = 0; j < 4; j++)
boolData[i * 4 + j] = constants.boolConstants[i];
}
m_offset = m_buffer.unmap(m_context, length);
bind();
}
void bind() {
const uint32_t constantOffset = m_offset / getConstantSize();
const uint32_t constantCount = getConstantCount();
ID3D11Buffer* buffer = m_buffer.getBuffer();
if constexpr (Pixel)
m_context->PSSetConstantBuffers1(0, 1, &buffer, &constantOffset, &constantCount);
else
m_context->VSSetConstantBuffers1(0, 1, &buffer, &constantOffset, &constantCount);
}
void endFrame() {
m_buffer.endFrame();
}
private:
// I exist as long as my parent D3D9 device exists. No need for COM.
ID3D11Device1* m_device;
ID3D11DeviceContext1* m_context;
D3D11DynamicBuffer m_buffer;
uint32_t m_offset;
};
} |
phlo/concubine | src/main.cc | /* ConcuBinE
*
* Copyright (C) 2020 <NAME>.
*
* This file is part of ConcuBinE.
* See LICENSE for more information on using this software.
*/
#include <cstring>
#include <iostream>
#include "mmap.hh"
#include "trace.hh"
#include "parser.hh"
#include "simulator.hh"
#include "encoder_btor2.hh"
#include "encoder_smtlib_functional.hh"
#include "encoder_smtlib_relational.hh"
#include "boolector.hh"
#include "btormc.hh"
#include "z3.hh"
#include "cvc4.hh"
#include "runtime.hh"
namespace ConcuBinE {
//==============================================================================
// global variables
//==============================================================================
bool verbose = false;
uint64_t seed = static_cast<uint64_t>(time(NULL));
namespace btor2 { long expressions = 0; }
namespace smtlib { long expressions = 0; }
//==============================================================================
// usage
//==============================================================================
void print_usage_main (const char * name)
{
std::cout << "usage: " << name <<
" <command> [options]" <<
eol << eol <<
"available commands:" << eol <<
" help print help for a specific <command>" << eol <<
" simulate simulate concurrent programs" << eol <<
" replay reevaluates a given trace" << eol <<
" solve solve concurrent programs using SMT" << eol;
}
void print_usage_help (const char * name)
{
std::cout << "usage: " << name << " help <command>" << eol;
}
void print_usage_simulate (const char * name)
{
std::cout << "usage: " << name <<
" simulate [options] <program> ..." <<
eol << eol <<
"options:" << eol <<
" -c run until an exit code > 0 is encountered" << eol <<
" -k bound execute a specific number of steps" << eol <<
" -m mmap read initial heap contents from file" << eol <<
" -o name output file name (default: sim.{trace,mmap})" << eol <<
" -s seed random number generator's seed" << eol <<
" program one ore more source files, each being executed as a separate thread" << eol;
}
void print_usage_solve (const char * name)
{
std::cout << "usage: " << name <<
" solve [options] <bound> <program> ..."
<< eol << eol <<
"options:" << eol <<
" -c file read constraints from file" << eol <<
" -e encoder use a specific encoding, options are:" << eol <<
" * btor2 (default)" << eol <<
" * smtlib" << eol <<
" * smtlib-relational" << eol <<
" -m mmap read initial heap contents from file" << eol <<
" -o name output file name (default: smt.{trace,mmap})" << eol <<
" -p prints the generated formula and exits" << eol <<
" -s solver use a specific solver, options are:" << eol <<
" * btormc (default)" << eol <<
" * boolector" << eol <<
" * cvc4" << eol <<
" * z3" << eol <<
" -v verbose formula output" << eol <<
" bound execute a specific number of steps" << eol <<
" program one or more programs to encode" << eol;
}
void print_usage_replay (const char * name)
{
std::cout << "usage: " << name << " replay <trace>" << eol;
}
//==============================================================================
// submodules
//==============================================================================
void write (Trace & trace, const std::string & path)
{
// write mmap
if (trace.mmap)
{
trace.mmap->path = path + ".mmap";
std::ofstream mmap_ofs(trace.mmap->path);
mmap_ofs << trace.mmap->print();
}
// write trace
std::ofstream trace_ofs(path + ".trace");
trace_ofs << trace.print();
}
void print_error (const std::string & m) { std::cerr << "error: " << m << eol; }
//------------------------------------------------------------------------------
// help
//------------------------------------------------------------------------------
int help (const char * name, const int argc, const char **argv)
{
if (argc < 1)
{
print_error("no command given");
print_usage_help(name);
return -1;
}
if (!strcmp(argv[0], "help"))
print_usage_help(name);
else if (!strcmp(argv[0], "simulate"))
print_usage_simulate(name);
else if (!strcmp(argv[0], "replay"))
print_usage_replay(name);
else if (!strcmp(argv[0], "solve"))
print_usage_solve(name);
else
{
print_error("unknown command " + std::string(argv[0]));
print_usage_help(name);
return -1;
}
return 0;
}
//------------------------------------------------------------------------------
// simulate
//------------------------------------------------------------------------------
int simulate (const char * name, const int argc, const char ** argv)
{
if (argc < 1)
{
print_error("too few arguments");
print_usage_simulate(name);
return -1;
}
try
{
// bound
size_t bound = 0;
// memory map
std::shared_ptr<MMap> mmap;
// output file name
std::string outfile = "sim";
// run until exit > 0
bool check = false;
// parse options
int i = 0;
do
{
if (!strcmp(argv[i], "-c"))
{
check = true;
}
else if (!strcmp(argv[i], "-k"))
{
if (++i >= argc)
{
print_error("missing bound");
print_usage_simulate(name);
return -1;
}
try { bound = std::stoul(argv[i], nullptr, 0); }
catch (...)
{
print_error("illegal bound [" + std::string(argv[i]) + "]");
return -1;
}
}
else if (!strcmp(argv[i], "-m"))
{
if (++i >= argc)
{
print_error("missing path to memory map");
print_usage_simulate(name);
return -1;
}
mmap = std::make_shared<MMap>(create_from_file<MMap>(argv[i]));
}
else if (!strcmp(argv[i], "-o"))
{
if (++i >= argc)
{
print_error("missing output file name");
print_usage_simulate(name);
return -1;
}
outfile = argv[i];
}
else if (!strcmp(argv[i], "-s"))
{
if (++i >= argc)
{
print_error("missing seed");
print_usage_simulate(name);
return -1;
}
try { seed = std::stoul(argv[i], nullptr, 0); }
catch (...)
{
print_error("illegal seed [" + std::string(argv[i]) + "]");
return -1;
}
}
else if (argv[i][0] == '-')
{
print_error("unknown option [" + std::string(argv[i]) + "]");
print_usage_simulate(name);
return -1;
}
else
break;
}
while (++i < argc);
// check programs
if (i >= argc)
{
print_error("missing programs");
print_usage_simulate(name);
return -1;
}
// program list
auto programs = std::make_shared<Program::List>();
// parse programs
while (i < argc)
programs->push_back(create_from_file<Program>(argv[i++]));
// simulate
Simulator simulator;
auto trace = simulator.simulate(programs, mmap, bound);
for (; check && !trace->exit; seed++)
trace = simulator.simulate(programs, mmap, bound);
write(*trace, outfile);
return trace->exit;
}
catch (const std::exception & e)
{
print_error(e.what());
return -1;
}
}
//------------------------------------------------------------------------------
// solve
//------------------------------------------------------------------------------
int solve (const char * name, const int argc, const char ** argv)
{
if (argc < 2)
{
print_error("too few arguments");
print_usage_solve(name);
return -1;
}
try
{
// only print formula
bool pretend = false;
// constraints file path
std::string constraints;
// memory map
std::shared_ptr<MMap> mmap;
// encoder type
enum
{
btor2,
smtlib_functional,
smtlib_relational
}
encoder_type = btor2;
const char * encoder_names[] = {
"btor2",
"smtlib",
"smtlib-relational"
};
// solver type
enum
{
btormc,
boolector,
cvc4,
z3
}
solver_type = btormc;
const char * solver_names[] = {
"btormc",
"boolector",
"cvc4",
"z3"
};
// output file name
std::string outfile = "smt";
// parse options
int i = 0;
do
if (!strcmp(argv[i], "-c"))
{
if (++i >= argc)
{
print_error("missing constraints file");
print_usage_solve(name);
return -1;
}
constraints = argv[i];
}
else if (!strcmp(argv[i], "-e"))
{
if (++i >= argc)
{
print_error("missing encoder");
print_usage_solve(name);
return -1;
}
if (!strcmp(argv[i], encoder_names[btor2]))
encoder_type = btor2;
else if (!strcmp(argv[i], encoder_names[smtlib_functional]))
encoder_type = smtlib_functional;
else if (!strcmp(argv[i], encoder_names[smtlib_relational]))
encoder_type = smtlib_relational;
else
{
print_error("unknown encoder [" + std::string(argv[i]) + "]");
print_usage_solve(name);
return -1;
}
}
else if (!strcmp(argv[i], "-m"))
{
if (++i >= argc)
{
print_error("missing path to memory map");
print_usage_solve(name);
return -1;
}
mmap = std::make_shared<MMap>(create_from_file<MMap>(argv[i]));
}
else if (!strcmp(argv[i], "-o"))
{
if (++i >= argc)
{
print_error("missing output file name");
print_usage_solve(name);
return -1;
}
outfile = argv[i];
}
else if (!strcmp(argv[i], "-p"))
{
pretend = true;
}
else if (!strcmp(argv[i], "-s"))
{
if (++i >= argc)
{
print_error("missing solver");
print_usage_solve(name);
return -1;
}
if (!strcmp(argv[i], solver_names[btormc]))
solver_type = btormc;
else if (!strcmp(argv[i], solver_names[boolector]))
solver_type = boolector;
else if (!strcmp(argv[i], solver_names[cvc4]))
solver_type = cvc4;
else if (!strcmp(argv[i], solver_names[z3]))
solver_type = z3;
else
{
print_error("unknown solver [" + std::string(argv[i]) + "]");
print_usage_solve(name);
return -1;
}
}
else if (!strcmp(argv[i], "-v"))
{
verbose = true;
}
else if (argv[i][0] == '-')
{
print_error("unknown option [" + std::string(argv[i]) + "]");
print_usage_solve(name);
return -1;
}
else
break;
while (++i < argc);
// check bound
if (i >= argc)
{
print_error("missing bound");
print_usage_solve(name);
return -1;
}
// parse bound
size_t bound = 0;
try
{
bound = std::stoul(argv[i++], nullptr, 0);
if (!bound) throw std::runtime_error("");
}
catch (...)
{
print_error("illegal bound [" + std::string(argv[i - 1]) + "]");
return -1;
}
// check programs
if (i >= argc)
{
print_error("missing programs");
print_usage_solve(name);
return -1;
}
// list of programs
auto programs = std::make_shared<Program::List>();
// parse programs
while (i < argc)
programs->push_back(create_from_file<Program>(argv[i++]));
// encode programs
std::unique_ptr<Encoder> encoder;
if (encoder_type == btor2)
encoder = std::make_unique<btor2::Encoder>(programs, mmap, bound);
else if (encoder_type == smtlib_functional)
encoder = std::make_unique<smtlib::Functional>(programs, mmap, bound);
else if (encoder_type == smtlib_relational)
encoder = std::make_unique<smtlib::Relational>(programs, mmap, bound);
double encoder_time = runtime::measure([&encoder] {
encoder->encode();
});
if (verbose)
std::cout << "[concubine>main] encoding took "
<< encoder_time
<< " seconds"
<< eol
<< "[concubine>main] generated "
<< (btor2::expressions + smtlib::expressions)
<< " commands"
<< eol;
// append constraints
if (constraints.empty())
encoder->assert_exit();
else
{
std::ifstream ifs(constraints);
if (!ifs.is_open())
{
print_error(constraints + " not found");
return -1;
}
encoder->formula << ifs.rdbuf();
}
// select solver
std::unique_ptr<Solver> solver;
if (solver_type == btormc)
solver = std::make_unique<BtorMC>();
else if (solver_type == boolector)
solver = std::make_unique<Boolector>();
else if (solver_type == cvc4)
solver = std::make_unique<CVC4>();
else if (solver_type == z3)
solver = std::make_unique<Z3>();
// check compatibility
if ((encoder_type == btor2 && solver_type != btormc) ||
(encoder_type != btor2 && solver_type == btormc))
{
print_error(
'[' +
std::string(solver_names[solver_type]) +
"] cannot be used with encoder [" +
std::string(encoder_names[encoder_type]) +
']');
return -1;
}
// solve
if (pretend)
{
std::cout << solver->formula(*encoder);
}
else
{
auto trace = solver->solve(*encoder);
if (verbose)
std::cout << solver->stdout.str()
<< "[concubine>main] solving took "
<< solver->time
<< " seconds"
<< eol;
if (!trace->empty())
{
write(*trace, outfile);
return trace->exit;
}
}
return 0;
}
catch (const std::exception & e)
{
print_error(e.what());
return -1;
}
}
//------------------------------------------------------------------------------
// replay
//------------------------------------------------------------------------------
int replay (const char * name, const int argc, const char ** argv)
{
if (!argc)
{
print_error("missing trace file");
print_usage_replay(name);
return -1;
}
try
{
// parse
auto t1 = std::make_unique<Trace>(create_from_file<Trace>(argv[0]));
// replay
auto t2 = Simulator().replay(*t1);
// compare
if (t1->size() != t2->size())
{
std::cout
<< "size differs: "
<< std::to_string(t1->size())
<< " vs. "
<< std::to_string(t2->size())
<< eol;
return 1;
}
else
{
auto it1 = t1->begin();
auto it2 = t2->begin();
for (; it1 != t1->end(); ++it1, ++it2)
if (*it1 != *it2)
{
std::cout << "< " << t1->print(*it1) << "> " << t2->print(*it2);
return *it1;
}
}
return 0;
}
catch (const std::exception & e)
{
print_error(e.what());
return -1;
}
}
} // namespace ConcuBinE
//==============================================================================
// main
//==============================================================================
int main (const int argc, const char ** argv)
{
if (argc > 1)
{
if (!strcmp(argv[1], "help"))
return ConcuBinE::help(argv[0], argc - 2, argv + 2);
else if (!strcmp(argv[1], "simulate"))
return ConcuBinE::simulate(argv[0], argc - 2, argv + 2);
else if (!strcmp(argv[1], "replay"))
return ConcuBinE::replay(argv[0], argc - 2, argv + 2);
else if (!strcmp(argv[1], "solve"))
return ConcuBinE::solve(argv[0], argc - 2, argv + 2);
}
ConcuBinE::print_usage_main(argv[0]);
return -1;
}
|
jpchagas/hfa3 | hellfireos-master/usr/doc/doxygen/html/search/variables_6f.js | var searchData=
[
['other_5fdata',['other_data',['../structtcb__entry.html#accd675f017bb0ec5ae63b4d729bd73aa',1,'tcb_entry']]]
];
|
MeirBon/rendering-fw | RFW/system/utils/src/rfw/utils/mersenne_twister.h | #pragma once
#include "rng.h"
#include <random>
namespace rfw::utils
{
class mersenne_twister : public rfw::utils::rng
{
public:
mersenne_twister() : mt_gen(std::random_device()()) {}
float rand(float range) override final { return rand_uint() * 2.3283064365387e-10f * range; }
unsigned int rand_uint() override final { return mt_gen(); }
private:
std::mt19937 mt_gen;
};
} // namespace rfw::utils |
wingnet/leetcode | hard/array_string/FirstMissingPositive.java | package hard.array_string;
public class FirstMissingPositive {
int[] nums;
public int firstMissingPositive(int[] nums) {
this.nums=nums;
return 0;
}
int[] quickSelect(){
int curPos=0;
int targetPos=nums.length-1;
int nagetiveCount=0;
int target=-1;
while(curPos<targetPos){
int curNumber=nums[curPos];
if(curNumber<=0){
curPos++;
nagetiveCount++;
}
else if(target<0){
target=curNumber;
swap(curPos,targetPos);
targetPos--;
}
else if(curNumber>=target){
swap(curPos,targetPos);
targetPos--;
}
else{
curPos++;
}
}
return new int[]{targetPos,nagetiveCount};
}
void swap(int i,int j){
int tmp=nums[i];
nums[i]=nums[j];
nums[j]=tmp;
}
} |
peanut-chenzhong/huaweicloud-mrs-example | src/graphbase-examples/graphbase-core-example/src/com/huawei/graphbase/rest/request/AddEdgeReqObj.java | package com.huawei.graphbase.rest.request;
import java.util.List;
public class AddEdgeReqObj {
private String outVertexId;
private String inVertexId;
private String edgeLabel;
private List<PropertyReqObj> propertyList;
public String getOutVertexId() {
return outVertexId;
}
public void setOutVertexId(String outVertexId) {
this.outVertexId = outVertexId;
}
public String getInVertexId() {
return inVertexId;
}
public void setInVertexId(String inVertexId) {
this.inVertexId = inVertexId;
}
public String getEdgeLabel() {
return edgeLabel;
}
public void setEdgeLabel(String edgeLabel) {
this.edgeLabel = edgeLabel;
}
public List<PropertyReqObj> getPropertyList() {
return propertyList;
}
public void setPropertyList(List<PropertyReqObj> propertyList) {
this.propertyList = propertyList;
}
@Override
public String toString() {
return "AddEdgeReqObj{" + "outVertexId='" + outVertexId + '\'' + ", inVertexId='" + inVertexId + '\''
+ ", edgeLabel='" + edgeLabel + '\'' + ", propertyList=" + propertyList + '}';
}
}
|
vinothsparrow/SparrowToolkit | Work/Source/Sparrow.DirectX/DirectX/DXGI/DXGIObject.cpp | <reponame>vinothsparrow/SparrowToolkit
// Copyright (c) Microsoft Corporation. All rights reserved.
#include "stdafx.h"
#include "DXGIObject.h"
using namespace Microsoft::WindowsAPICodePack::DirectX::Utilities;
using namespace Microsoft::WindowsAPICodePack::DirectX::Graphics;
generic <typename T> where T : GraphicsObject
T GraphicsObject::GetParent(void)
{
void* tempParent = NULL;
GUID guid = CommonUtils::GetGuid(T::typeid);
Validate::VerifyResult(CastInterface<IDXGIObject>()->GetParent(guid, &tempParent));
return Utilities::Convert::CreateIUnknownWrapper<T>(static_cast<IUnknown*>(tempParent));
} |
trespasserw/MPS | languages/languageDesign/constraints/rules/kinds/generator/source_gen/util/KindUtil.java | <filename>languages/languageDesign/constraints/rules/kinds/generator/source_gen/util/KindUtil.java
package util;
/*Generated by MPS */
import org.jetbrains.mps.openapi.model.SNodeReference;
import org.jetbrains.mps.openapi.model.SNode;
import jetbrains.mps.generator.template.TemplateQueryContext;
import jetbrains.mps.lang.smodel.generator.smodelAdapter.SNodeOperations;
import java.util.Objects;
import jetbrains.mps.smodel.SNodePointer;
import org.jetbrains.mps.openapi.language.SConcept;
import jetbrains.mps.smodel.adapter.structure.MetaAdapterFactory;
public final class KindUtil {
private KindUtil() {
}
public static SNodeReference matchContextForRuleKind(SNode kind, TemplateQueryContext genContext) {
// original copied for the case when we are in the same model where the kind is located
kind = SNodeOperations.cast(genContext.getOriginalCopiedInputNode(kind), CONCEPTS.RuleKind$7C);
SNodeReference pointer = SNodeOperations.getPointer(kind);
if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206382583"))) {
return new SNodePointer("8e98f4e2-decf-4e97-bf80-9109e8b759ee/java:jetbrains.mps.core.aspects.constraints.rules.kinds(jetbrains.mps.lang.constraints.rules.runtime/)", "~ContainmentContext");
} else if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206382590"))) {
return new SNodePointer("8e98f4e2-decf-4e97-bf80-9109e8b759ee/java:jetbrains.mps.core.aspects.constraints.rules.kinds(jetbrains.mps.lang.constraints.rules.runtime/)", "~ContainmentContext");
} else if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206373487"))) {
return new SNodePointer("8e98f4e2-decf-4e97-bf80-9109e8b759ee/java:jetbrains.mps.core.aspects.constraints.rules.kinds(jetbrains.mps.lang.constraints.rules.runtime/)", "~CanBeRootContext");
} else if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206382576"))) {
return new SNodePointer("8e98f4e2-decf-4e97-bf80-9109e8b759ee/java:jetbrains.mps.core.aspects.constraints.rules.kinds(jetbrains.mps.lang.constraints.rules.runtime/)", "~CanBeAncestorContext");
}
genContext.showErrorMessage(kind, "Unknown kind");
return null;
}
public static String matchKindForRuleKind(SNode kind, TemplateQueryContext genContext) {
// original copied for the case when we are in the same model where the kind is located
kind = SNodeOperations.cast(genContext.getOriginalCopiedInputNode(kind), CONCEPTS.RuleKind$7C);
SNodeReference pointer = SNodeOperations.getPointer(kind);
if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206382583"))) {
return "CAN_BE_CHILD";
} else if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206382590"))) {
return "CAN_BE_PARENT";
} else if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206373487"))) {
return "CAN_BE_ROOT";
} else if (Objects.equals(pointer, new SNodePointer("r:52ea8481-08b2-4cbd-ad9d-1b42825f7d09(jetbrains.mps.lang.constraints.rules.kinds.constraints)", "8018723092206382576"))) {
return "CAN_BE_ANCESTOR";
}
genContext.showErrorMessage(kind, "Unknown kind");
return null;
}
private static final class CONCEPTS {
/*package*/ static final SConcept RuleKind$7C = MetaAdapterFactory.getConcept(0x5dae8159ab9946bbL, 0xa40d0cee30ee7018L, 0x6530303593554248L, "jetbrains.mps.lang.constraints.rules.kinds.structure.RuleKind");
}
}
|
Carlosvva/PS-T-2016 | www/modules/partner/partnerController.js | <reponame>Carlosvva/PS-T-2016<gh_stars>0
app.controller('partnerController', function($scope, $rootScope, $state, $timeout, $http, api, localStorage, dialog, $mdDialog, $mdToast, $interval) {
//Variables & defaults
$scope.tabSelected = 1;
$scope.total = 1000;
var date = new Date();
var today = date.getDate();
$scope.tableHeaders = [{
title: 'NAME'
}, {
title: 'EMAIL'
}, {
title: 'CONTRASEรA'
}, {
title: 'PERFIL'
}];
//Methods
$scope.changeTab = function(tab) {
if ($scope.tabSelected != tab) {
$scope.tabSelected = tab;
}
}
$scope.profile = ''
// Fetch data
api.partner.getOne({
id: $state.params.id,
success: function(data) {
console.log('AQUI AQUI AQUI');
console.log(data);
$scope.partner = data;
// Fetch partner profile
if(data.profile == null){
$scope.profile = 'No definido';
}else{
api.profile.getOne({
params : name,
id : data.profile,
success : function(partnerProfile){
$scope.profile = partnerProfile.name;
},
error : function(error){}
});
}
//GET PARTNERS FROM PARTNERS
api.partner.getFromPartner({
id: $scope.partner.id,
success: function(data) {
console.log('-------partners-----')
console.log(data);
$scope.partners = data;
console.log('-------partners-----')
},
error: function(error) {
console.log(error);
}
});
api.agent.getFromPartner({
id: $scope.partner.id,
success: function(data) {
console.log('-------agents-----')
console.log(data);
$scope.agents = data;
console.log('-------agents-----')
},
error: function(error) {
console.log(error);
}
});
},
error: function(error) {
console.log(error);
}
});
//Model
// SI STATUS = 2 Y TERM = 0
$scope.partnerActions = {
form: {
fields: {
name: '',
email: '',
password: '',
confirm: '',
profile: null,
},
edit: function() {
api.profile.getAll({
params: 'entity=1&id&name',
success: function(data) {
console.log(data);
$scope.profiles = data;
},
error: function(error) {
console.log(error);
}
})
$scope.partnerActions.form.fields = {
name: $scope.partner.name,
email: $scope.partner.email,
password: $<PASSWORD>,
confirm: $scope.partner.password,
profile: $scope.partner.profile
}
dialog.create({
"title": "Nuevo partner",
"body": "<div layout='row' layout-wrap>" +
'<md-input-container flex="100">' +
'<label>Nombre</label>' +
'<input ng-model="partnerActions.form.fields.name">' +
'</md-input-container>' +
'<md-input-container flex="100">' +
'<label>Email</label>' +
'<input ng-model="partnerActions.form.fields.email">' +
'</md-input-container>' +
'<md-input-container flex="100">' +
'<label>Contraseรฑa</label>' +
'<input ng-model="partnerActions.form.fields.password">' +
'</md-input-container>' +
'<md-input-container flex="100">' +
'<label>Confirmar contraseรฑa</label>' +
'<input ng-model="partnerActions.form.fields.confirm">' +
'</md-input-container>' +
'<md-input-container flex="100">' +
'<md-select ng-model="partnerActions.form.fields.profile" placeholder="Selecciona un perfil">' +
'<md-option ng-value="{{profile.id}}" ng-repeat="profile in profiles">{{profile.name}}</md-option>' +
'</md-select>' +
'</md-input-container>' +
"</div>",
"data": "",
"buttons": [{
"text": "Guardar",
"action": "partnerActions.update"
}, {
"text": "Cancelar"
}]
})
},
validate: function() {
var error = {
count: 0,
message: "<p>Debe llenar los siguientes campos:</p><p>",
check: false
}
var form = $scope.partnerActions.form.fields;
var re = /^([\w-]+(?:\.[\w-]+)*)@((?:[\w-]+\.)*\w[\w-]{0,66})\.([a-z]{2,6}(?:\.[a-z]{2})?)$/i;
if (form.name == '') {
error.count++;
error.message = error.message + 'Nombre, ';
}
if (form.email == '') {
error.count++;
error.message = error.message + 'Email, ';
}
if (form.password == '') {
error.count++;
error.message = error.message + 'Contraseรฑa, ';
}
if (form.confirm == '') {
error.count++;
error.message = error.message + 'Confirmar contraseรฑa, ';
}
// Trim string
error.message = error.message.substring(0, error.message.length - 2) + ' .</p> '
var testMail = re.test(form.email);
if (!testMail) {
error.count++;
error.message = error.message + '<p>Ingresa un Email vรกlido.</p>';
}
if (form.password != form.confirm) {
error.count++;
error.message = error.message + '<p>Las contraseรฑas no coinciden.</p>';
}
if (error.count > 0) {
error.check = false;
} else {
error.check = true;
}
return error;
}
},
update: function() {
$rootScope.coreDialog = null;
var error = $scope.partnerActions.form.validate();
if (error.check) {
api.partner.update({
id: $scope.partner.id,
partner: $scope.partnerActions.form.fields,
success: function(data) {
console.log(data);
$timeout(function() {
$mdToast.showSimple('ยกDatos del partner actualizados correctamente!');
}, 1000);
$scope.partner = window.mergeObj($scope.partner, $scope.partnerActions.form.fields);
},
error: function(error) {
console.log(error);
}
})
} else {
var confirm = $mdDialog.confirm()
.title('Formulario incompleto.')
.content(error.message)
.ok('Completar formulario')
$mdDialog.show(confirm).then(function() {});
}
},
disable: function() {
var confirm = $mdDialog.confirm()
.title('Inhabilitar partner')
.content('Al confirmar esta acciรณn bloqueas el acceso al partner y todos los agentes creados por el partner. Ningรบn dato serรก eliminado y puedes reestablecer el acceso en cualquier momento utilizando este mismo botรณn.')
.ok('Bloquear compaรฑรญa')
.cancel('Regresar');
$mdDialog.show(confirm).then(function() {
api.partner.update({
id: $scope.partner.id,
partner: {
status: 3
},
success: function(data) {
$scope.partner.status = 3;
$timeout(function() {
$mdToast.showSimple('ยกEl partner ha sido Inhabilitado con รฉxito!');
}, 500);
},
error: function(error) {
$mdToast.showSimple('Hubo un error al procesar tu solicitud, intรฉntalo nuevamente mรกs tarde.');
}
})
}, function() {});
},
enable: function() {
var confirm = $mdDialog.confirm()
.title('Habilitar partner')
.content('Al confirmar esta acciรณn devuelves el acceso al partner y todos los agentes creados por el partner. Puedes volver a bloquear el acceso en cualquier momento utilizando este mismo botรณn.')
.ok('Desbloquear compaรฑรญa')
.cancel('Regresar');
$mdDialog.show(confirm).then(function() {
api.partner.update({
id: $scope.partner.id,
partner: {
status: 2
},
success: function(data) {
$scope.partner.status = 2;
$timeout(function() {
$mdToast.showSimple('ยกEl partner ha sido Habilitado con รฉxito!');
}, 500);
},
error: function(error) {
$mdToast.showSimple('Hubo un error al procesar tu solicitud, intรฉntalo nuevamente mรกs tarde.');
}
})
}, function() {});
},
delete: function() {
var confirm = $mdDialog.confirm()
.title('Eliminar partner.')
.content('Al confirmar esta acciรณn eliminas por completo a este partner, sus agentes y todos sus datos de manera irreversible, ยฟEstรกs seguro que quieres continuar?')
.ok('Eliminar compaรฑรญa')
.cancel('Regresar');
$mdDialog.show(confirm).then(function() {
api.partner.delete({
id: $scope.partner.id,
success: function(data) {
$state.go('main.partners');
$timeout(function() {
$mdToast.showSimple('ยกEl partner ha sido eliminado con รฉxito!');
}, 2000);
},
error: function(error) {
$mdToast.showSimple('Hubo un error al procesar tu solicitud, intรฉntalo nuevamente mรกs tarde.');
}
})
}, function() {});
}
}
$scope.actions = [{
icon: 'ion-edit',
execute: $scope.partnerActions.form.edit,
title: 'Editar'
}, {
icon: 'ion-ios-trash',
execute: $scope.partnerActions.delete,
title: 'Eliminar'
}, {
icon: 'ion-close-circled',
execute: $scope.partnerActions.disable,
title: 'Inhabilitar'
}];
//check if access
if(!$rootScope.profile.partners.module){
var none = true;
if($rootScope.profile.customers.module){
$state.go('main.customers');
none = false;
}
if($rootScope.profile.partners.module){
$state.go('main.partners');
none = false;
}
if($rootScope.profile.agents.module){
$state.go('main.agents');
none = false;
}
if($rootScope.profile.profiles.module){
$state.go('main.profiles');
none = false;
}
if(none){
location.href="http://www.pentcloud.com"
}
}
}); |
cthacker-udel/NCT-AndroidGUI | app/src/main/java/com/example/nctai_trading/coinbasePro/coinBaseKeys.java | package com.example.nctai_trading.coinbasePro;
import androidx.appcompat.app.AlertDialog;
import androidx.appcompat.app.AppCompatActivity;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import com.example.nctai_trading.R;
import com.example.nctai_trading.mainPage;
public class coinBaseKeys extends AppCompatActivity {
Button coinBaseBtn;
EditText coinBaseApiKeyEditText;
EditText coinBaseSecretKeyEditText;
String coinBaseApiText;
String coinBaseSecretKeyText;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_coin_base_keys);
coinBaseBtn = findViewById(R.id.coinbaseKeysButton);
coinBaseApiKeyEditText = findViewById(R.id.coinbaseKeysApiKey);
coinBaseSecretKeyEditText = findViewById(R.id.coinBaseSecretKeyEditText);
AlertDialog.Builder coinBaseAlert = new AlertDialog.Builder(this);
coinBaseAlert.setTitle("Api Keys Applied");
coinBaseAlert.setMessage("Coinbase Api Key and Secret Key have been applied");
coinBaseBtn.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
coinBaseApiText = coinBaseApiKeyEditText.getText().toString();
coinBaseSecretKeyText = coinBaseSecretKeyEditText.getText().toString();
SharedPreferences sharedPreferences = getSharedPreferences("test",MODE_PRIVATE);
SharedPreferences.Editor editor = sharedPreferences.edit();
//editor.putString("coinBaseApiKey",coinBaseApiText);
//editor.putString("coinBaseSecretKey",coinBaseSecretKeyText);
// temp
editor.putString("coinBaseApiKey","17e5d0f33c9074a2f67c95cf0436fca9");
editor.putString("coinBaseSecretKey","<KEY>);
editor.putString("coinBasePassPhrase","<PASSWORD>");
//editor.putString("coinBaseApiKey","<KEY>");
//editor.putString("coinBaseSecretKey","<KEY>);
//editor.putString("coinBasePassPhrase","<PASSWORD>");
editor.apply();
editor.commit();
coinBaseAlert.setPositiveButton("Ok", new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
Intent toMainPage = new Intent(getApplicationContext(), mainPage.class);
startActivity(toMainPage);
}
});
coinBaseAlert.create().show();
return;
}
});
}
} |
DianwodaCompany/vera | piper/src/main/java/com/dianwoda/usercenter/vera/piper/data/ActivePiperData.java | package com.dianwoda.usercenter.vera.piper.data;
import com.dianwoda.usercenter.vera.common.protocol.route.PiperData;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* active piper data
* @author seam
*/
public class ActivePiperData {
private Map<String /* location */, PiperData> piperDataMap = new ConcurrentHashMap<>();
private long updateTime;
public boolean containPiperData(PiperData piperData) {
return piperDataMap.containsKey(piperData.getLocation());
}
public boolean containPiperData(String location) {
return piperDataMap.containsKey(location);
}
public void addPiperData(PiperData piperData) {
piperDataMap.put(piperData.getLocation(), piperData);
}
public void removePiperData(PiperData piperData) {
piperDataMap.remove(piperData.getLocation());
}
public void addPiperDatas(List<PiperData> list) {
if (list != null) {
for (PiperData piperData : list) {
addPiperData(piperData);
}
}
}
public void removePiperDatas(List<PiperData> list) {
if (list != null) {
for (PiperData piperData : list) {
removePiperData(piperData);
}
}
}
public void setUpdateTime(long updateTime) {
this.updateTime = updateTime;
}
public Map<String, PiperData> getPiperDataMap() {
return piperDataMap;
}
public void setPiperDataMap(Map<String, PiperData> piperDataMap) {
this.piperDataMap = piperDataMap;
}
public Collection<PiperData> values() {
return piperDataMap.values();
}
}
|
Masriyan/gojek-commons | gojek-commons-kafka/src/main/java/com/gojek/kafka/event/KafkaProducer.java | /**
*
*/
package com.gojek.kafka.event;
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import com.gojek.core.event.Destination;
import com.gojek.core.event.Producer;
/**
* @author ganesh.s
*
*/
public class KafkaProducer<K, E> implements Producer<E> {
private org.apache.kafka.clients.producer.Producer<K, E> producer;
/**
* @param configs
*/
public KafkaProducer(Map<String, Object> configs) {
this(configs, null, null);
}
/**
* @param producer
*/
public KafkaProducer(org.apache.kafka.clients.producer.Producer<K, E> producer) {
this.producer = producer;
}
/**
* @param configs
* @param keySerializer
* @param valueSerializer
*/
public KafkaProducer(Map<String, Object> configs, Class<?> keySerializer, Class<?> valueSerializer) {
if (keySerializer != null) {
configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, keySerializer);
}
if (valueSerializer != null) {
configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, valueSerializer);
}
this.producer = new org.apache.kafka.clients.producer.KafkaProducer<>(configs);
}
@Override
public void send(E event, Destination destination) {
send(null, event, destination.getExchange());
}
/**
* @param key
* @param value
* @param topic
*/
public void send(K key, E value, String topic) {
ProducerRecord<K, E> record = null;
if (key == null) {
record = new ProducerRecord<K, E>(topic, value);
} else {
record = new ProducerRecord<K, E>(topic, key, value);
}
producer.send(record);
producer.flush();
}
}
|
ZaidKaleem/60-212 | Lab7_P1/Q3/Page.java | <filename>Lab7_P1/Q3/Page.java<gh_stars>0
package Lab7_P1.Q3;
public class Page implements Turner {
public String turn() {
return "Going to the next page.";
}
}
|
jmartisk/hibernate-validator | documentation/src/test/java/org/hibernate/validator/referenceguide/chapter04/resourcebundlelocator/ResourceBundleLocatorTest.java | <reponame>jmartisk/hibernate-validator
package org.hibernate.validator.referenceguide.chapter04.resourcebundlelocator;
import java.util.Arrays;
import java.util.Set;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import org.junit.Test;
import org.hibernate.validator.messageinterpolation.ResourceBundleMessageInterpolator;
import org.hibernate.validator.resourceloading.AggregateResourceBundleLocator;
import org.hibernate.validator.resourceloading.PlatformResourceBundleLocator;
import static org.junit.Assert.assertEquals;
public class ResourceBundleLocatorTest {
@Test
public void messagesRetrievedFromSpecificBundle() {
Validator validator = Validation.byDefaultProvider()
.configure()
.messageInterpolator(
new ResourceBundleMessageInterpolator(
new PlatformResourceBundleLocator( "MyMessages" )
)
)
.buildValidatorFactory()
.getValidator();
Set<ConstraintViolation<Car>> violations = validator.validateProperty(
new Car(),
"licensePlate"
);
assertEquals( 1, violations.size() );
assertEquals( "null is not supported", violations.iterator().next().getMessage() );
}
@Test
public void usingAggregateResourceBundleLocator() {
Validator validator = Validation.byDefaultProvider()
.configure()
.messageInterpolator(
new ResourceBundleMessageInterpolator(
new AggregateResourceBundleLocator(
Arrays.asList(
"MyMessages",
"MyOtherMessages"
)
)
)
)
.buildValidatorFactory()
.getValidator();
Set<ConstraintViolation<Car>> violations = validator.validateProperty(
new Car(),
"licensePlate"
);
assertEquals( 1, violations.size() );
assertEquals( "null is not supported", violations.iterator().next().getMessage() );
violations = validator.validateProperty( new Car(), "topSpeed" );
assertEquals( 1, violations.size() );
assertEquals( "too high", violations.iterator().next().getMessage() );
}
}
|
goroda/Compressed-Continuous-Computation | c3/lib_probability/probability.c | <filename>c3/lib_probability/probability.c
// Copyright (c) 2015-2016, Massachusetts Institute of Technology
// Copyright (c) 2016-2017 Sandia Corporation
// Copyright (c) 2017 NTESS, LLC.
// This file is part of the Compressed Continuous Computation (C3) Library
// Author: <NAME>
// Contact: <EMAIL>
// All rights reserved.
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// 3. Neither the name of the copyright holder nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//Code
/** \file probability.c
* Provides routines for working with probability with function trains
*/
#include <stdlib.h>
#include <stdio.h>
#include <assert.h>
#include <string.h>
#include <math.h>
#include <float.h>
#include "stringmanip.h"
#include "array.h"
#include "lib_optimization.h"
#include "probability.h"
#include "linalg.h"
#include "lib_clinalg.h"
// Linked list of vectors of indices
struct IndexList
{
size_t nelem;
size_t * vals;
struct IndexList * next;
};
// create an element of the list
struct IndexList * index_list_create(size_t nelem, size_t * vals)
{
struct IndexList * il = malloc(sizeof(struct IndexList));
if (il == NULL){
fprintf(stderr,"Failure to allocate index list for sobol indices\n");
exit(1);
}
il->nelem = nelem;
il->vals = calloc_size_t(nelem);
memmove(il->vals,vals,nelem*sizeof(size_t));
il->next = NULL;
return il;
}
// destroy the linked list
void index_list_destroy(struct IndexList * list)
{
if (list != NULL){
free(list->vals); list->vals = NULL;
index_list_destroy(list->next);
free(list); list = NULL;
}
}
// append a new element to the end of the list
void index_list_append(struct IndexList ** list, size_t nelem, size_t * vals)
{
struct IndexList * newitem = index_list_create(nelem,vals);
if (*list == NULL){
*list = newitem;
}
else{
struct IndexList * temp = *list;
while (temp->next != NULL){
temp = temp->next;
}
temp->next = newitem;
}
}
// n choose k, order doesn't matter, return ordered sequences in list
void n_choose_k_no_order(size_t s, size_t n, size_t k,
struct IndexList ** list,
size_t nprev, size_t * prev){
size_t * base = calloc_size_t(nprev+1);
if (nprev != 0){
memmove(base,prev,nprev * sizeof(size_t));
}
if (k == 1){
for (size_t ii = s; ii < n; ii++){
base[nprev] = ii;
index_list_append(list,nprev+1,base);
}
}
else{
for (size_t ii = s; ii < n; ii++){
base[nprev] = ii;
n_choose_k_no_order(ii+1,n,k-1,list,nprev+1,base);
}
}
free(base); base = NULL;
}
//
/** \struct C3SobolSensitivity
* \brief Stores sobol sensitivity information
* \var C3SobolSensitivity::dim
* number of variables
* \var C3SobolSensitivity::total_effects
* vector of total effect sensitivities
* \var C3SobolSensitivity::variance
* Total variance of the output
* \var C3SobolSensitivity::max_order
* The maximum number of variable interactions (<= dim)
* \var C3SobolSensitivity::interactions
* A tree structure holding the interaction information
*/
typedef struct C3SobolSensitivity
{
size_t dim;
double * total_effects;
double variance;
size_t max_order;
struct SInteract ** interactions;
} c3_sobol_t;
/**********************************************************//**
Compute the variance due to the inteaction of certain
variables
\param[in] ft - function train representing a random variable
\param[in] ninteract - number of variables interacting
\param[in] interacting_vars - variables that are interacting
\returns Variance_{interacting_vars}(Expectation(ft|interacting_vars))
**************************************************************/
double function_train_sobol_interact_var(
const struct FunctionTrain * ft,
size_t ninteract,
const size_t * interacting_vars)
{
if (ft->dim == ninteract){
double fm = function_train_integrate_weighted(ft);
double sm = function_train_inner_weighted(ft,ft);
double vari = sm - fm*fm;
return vari;
}
struct FunctionTrain * ft_1 = NULL;
size_t * ind_contract = calloc_size_t(ft->dim-ninteract);
size_t on_contract = 0;
for (size_t ii = 0; ii < ft->dim; ii++){
size_t non_interacting = 1;
for (size_t jj = 0; jj < ninteract; jj++){
if (ii == interacting_vars[jj]){
non_interacting = 0;
break;
}
}
if (non_interacting)
{
ind_contract[on_contract] = ii;
on_contract++;
}
}
ft_1 = function_train_integrate_weighted_subset(ft,
ft->dim-ninteract,
ind_contract);
free(ind_contract); ind_contract = NULL;
double fm = function_train_integrate_weighted(ft_1);
double sm = function_train_inner_weighted(ft_1,ft_1);
double vari = sm - fm*fm;
function_train_free(ft_1); ft_1 = NULL;
return vari;
}
// Tree representing sobol indices
struct SInteract
{
size_t val; // variable that denotes this node of the tree
size_t nlabels; // total interactions considered before this node + 1
size_t * label; // denotes variables higher in the tree label[nlabels-1] = val
double vari; // Variance_{interacting_vars}(Expectation(ft|interacting_vars))
double var_subtract; // variance of all interactions of combinations of variables
// given in label.
int set_sub; // whether or not var_subtract is set
// the variance attributed to the variable interactions specified in label is
// vari - var_subtract
// branches to the rest of the variables
size_t nleaves;
struct SInteract ** leaves;
};
void sinteract_free(struct SInteract * si)
{
if (si != NULL){
free(si->label); si->label = NULL;
for (size_t ii = 0; ii < si->nleaves; ii++){
sinteract_free(si->leaves[ii]); si->leaves[ii] = NULL;
}
free(si->leaves); si->leaves = NULL;
free(si); si = NULL;
}
}
// create a tree structure
struct SInteract * sinteract_create(size_t val, size_t nleaves,
size_t n_prev_labels, size_t * prev_labels,
const struct FunctionTrain * ft)
{
struct SInteract * si = malloc(sizeof(struct SInteract));
if (si == NULL){
fprintf(stderr, "Failure to allocate sobol interaction list\n");
exit(1);
}
si->val = val;
si->nleaves = nleaves;
if (nleaves > 0){
si->leaves = malloc(nleaves * sizeof(struct SInteract *));
for (size_t ii = 0; ii < si->nleaves; ii++){
si->leaves[ii] = NULL;
}
}
else{
si->leaves = NULL;
}
if (n_prev_labels == 0){
si->nlabels = 1;
si->label = calloc_size_t(1);
si->label[0] = val;
}
else{
si->nlabels = n_prev_labels+1;
si->label = calloc_size_t(si->nlabels);
memmove(si->label,prev_labels,n_prev_labels * sizeof(size_t));
si->label[si->nlabels-1] = val;
}
si->vari = function_train_sobol_interact_var(ft,si->nlabels,si->label);
si->set_sub = 0;
return si;
}
// get variance due to interaction
double sinteract_get_vari(struct SInteract * si, size_t distance, size_t * vars)
{
if (distance == 0){
return si->vari;
}
else{
size_t nextind = vars[0] - si->val-1;
return sinteract_get_vari(si->leaves[nextind],distance-1,vars+1);
}
}
// get variance due to interaction minus interactions of combination of variables
double sinteract_get_sensitivity(struct SInteract * si,
size_t distance,
const size_t * vars)
{
if (distance == 0){
if (si->set_sub == 1){
return si->vari - si->var_subtract;
}
else{
return si->vari;
}
}
else{
size_t nextind = vars[0]-si->val-1;
return sinteract_get_sensitivity(si->leaves[nextind],distance-1,vars+1);
}
}
void sinteract_print(struct SInteract * si)
{
if (si != NULL){
printf("Interaction: ");
for (size_t ii = 0; ii < si->nlabels; ii++){
printf("%zu ",si->label[ii]);
}
printf("--> Variance: %G\n",si->vari);
for (size_t ii = 0; ii < si->nleaves; ii++){
sinteract_print(si->leaves[ii]);
}
/* printf("\n"); */
}
}
void sinteract_apply_external(struct SInteract *si, void (*f)(double,size_t,size_t*,void*),void * arg)
{
if (si != NULL){
if (si->set_sub){
f(si->vari-si->var_subtract,si->nlabels,si->label,arg);
}
else{
f(si->vari,si->nlabels,si->label,arg);
}
for (size_t ii = 0; ii < si->nleaves; ii++){
sinteract_apply_external(si->leaves[ii],f,arg);
}
}
}
void sinteract_push(struct SInteract ** si, size_t val, size_t which_leaf,
size_t nleaves,
const struct FunctionTrain * ft)
{
/* printf("push (val,leaf,nleaves) %zu, %zu, %zu\n",val,which_leaf,nleaves); */
struct SInteract * new_leaf =
sinteract_create(val, nleaves,
(*si)->nlabels, (*si)->label,
ft);
(*si)->leaves[which_leaf] = new_leaf;
}
void sinteract_compute_var_subtract(struct SInteract * element,
struct C3SobolSensitivity * head_of_tree)
{
size_t nlabels = element->nlabels;
double var_subtract = 0.0;
for (size_t jj = 0; jj < nlabels; jj++){
struct IndexList * il = NULL;
n_choose_k_no_order(0,nlabels,jj,&il,0,NULL);
struct IndexList * temp = il;
while (temp != NULL){
size_t * vars = calloc_size_t(temp->nelem);
for (size_t kk = 0; kk < temp->nelem; kk++){
vars[kk] = element->label[temp->vals[kk]];
}
double vari = c3_sobol_sensitivity_get_interaction(
head_of_tree,temp->nelem,vars);
var_subtract += vari;
temp = temp->next;
free(vars); vars = NULL;
}
index_list_destroy(il); il = NULL;
}
element->set_sub = 1;
element->var_subtract = var_subtract;
}
void get_combination(size_t num_elements, size_t elements_left, size_t start,
struct SInteract ** elements, size_t * index,
const struct FunctionTrain * ft,
struct C3SobolSensitivity * head_of_tree)
{
sinteract_compute_var_subtract(*elements,head_of_tree);
if (elements_left == 1){
for (size_t ii = start; ii < num_elements; ii++){
sinteract_push(elements,ii,ii-start,0,ft);
sinteract_compute_var_subtract((*elements)->leaves[ii-start],
head_of_tree);
}
}
else{
for (size_t ii = num_elements-1; ii >= start; ii--){
sinteract_push(elements,ii,ii-start,num_elements-ii-1,ft);
get_combination(num_elements, elements_left-1, ii+1,
&((*elements)->leaves[ii-start]),
index, ft, head_of_tree);
}
}
}
/**********************************************************//**
Perform sobol sensitivity analysis
\param[in] ft - function train to free
\param[in] order - maximum order of interactions to calculate
\return sobol sensitivity structure
**************************************************************/
struct C3SobolSensitivity *
c3_sobol_sensitivity_calculate(const struct FunctionTrain * ft,
size_t order)
{
size_t dim = function_train_get_dim(ft);
c3_sobol_t * sobol = malloc(sizeof(c3_sobol_t));
if (sobol == NULL){
fprintf(stderr, "Failure to allocate space for sobol sensitivities\n");
exit(1);
}
sobol->dim = dim;
sobol->max_order = order;
sobol->interactions = malloc(dim * sizeof(struct SInteract *));
if (sobol->interactions == NULL){
fprintf(stderr, "Failure to allocate space for sobol sensitivities\n");
exit(1);
}
for (size_t ii = 0; ii < dim; ii++){
sobol->interactions[dim-1-ii] = sinteract_create(dim-1-ii,dim-1,0,NULL,ft);
get_combination(dim,order-1,dim-ii,&(sobol->interactions[dim-1-ii]),NULL,
ft,sobol);
}
double mean = function_train_integrate_weighted(ft);
double second_moment = function_train_inner_weighted(ft,ft);
sobol->variance = second_moment - mean*mean;
sobol->total_effects = calloc_double(ft->dim);
struct FunctionTrain * ft_1 = NULL;;
for (size_t ii = 0; ii < ft->dim; ii++){
ft_1 = function_train_integrate_weighted_subset(ft,1,&ii);
double fm = function_train_integrate_weighted(ft_1);
double sm = function_train_inner_weighted(ft_1,ft_1);
double vari = sm - fm*fm;
sobol->total_effects[ii] = (sobol->variance - vari)/sobol->variance;
function_train_free(ft_1); ft_1 = NULL;
}
return sobol;
}
/**********************************************************//**
Free memory allocated to sobol sensitivity analysis
**************************************************************/
void c3_sobol_sensitivity_free(struct C3SobolSensitivity * si)
{
if (si != NULL){
free(si->total_effects); si->total_effects = NULL;
for (size_t ii = 0; ii < si->dim; ii++){
sinteract_free(si->interactions[ii]);
si->interactions[ii] = NULL;
}
free(si->interactions); si->interactions = NULL;
free(si); si = NULL;
}
}
/**********************************************************//**
Get the contribution to the variance from interaction
of a set of variables
\param[in] sobol - sobol sensitivity structure
\param[in] ninteract - number of variables interacting
\param[in] vars - variables that are interacting
\return variance contribution to the interaction of *vars*
**************************************************************/
double c3_sobol_sensitivity_get_interaction(
const c3_sobol_t * sobol, size_t ninteract, const size_t * vars)
{
if (ninteract == 1){
return sobol->interactions[vars[0]]->vari;
}
else{
return sinteract_get_sensitivity(sobol->interactions[vars[0]],
ninteract-1,
vars+1);
}
}
/**********************************************************//**
Get a total sensitivity
\param[in] sobol - sobol sensitivity structure
\param[in] var - variable whose total sensitivity to get
\return total sensitivity of variable *var*
**************************************************************/
double c3_sobol_sensitivity_get_total(const c3_sobol_t * sobol, size_t var)
{
return sobol->total_effects[var];
}
/**********************************************************//**
Get the first order main effect sensitivity
\param[in] sobol - sobol sensitivity structure
\param[in] var - variable whose total sensitivity to get
\return main sensitivity of variable *var*
**************************************************************/
double c3_sobol_sensitivity_get_main(const c3_sobol_t * sobol, size_t var)
{
return sobol->interactions[var]->vari;
}
/**********************************************************//**
Get the variance of the random variable
\param[in] sobol - sobol sensitivity structure
\return the variable
**************************************************************/
double c3_sobol_sensitivity_get_variance(const c3_sobol_t * sobol)
{
return sobol->variance;
}
/**********************************************************//**
Print sobol sensitivities
**************************************************************/
void c3_sobol_sensitivity_print(const c3_sobol_t * sobol)
{
printf("\nTotal variance:\n%G\n",sobol->variance);
printf("\n\n\n");
printf("\nMain effects:\n");
for (size_t ii = 0; ii < sobol->dim; ii++){
sinteract_print(sobol->interactions[ii]);
}
printf("\nTotal effects:\n");
dprint(sobol->dim,sobol->total_effects);
}
/**********************************************************//**
Apply an external function to the main effects along the
sobol index tree
**************************************************************/
void c3_sobol_sensitivity_apply_external(const c3_sobol_t * sobol,
void (*f)(double, size_t, size_t*,void*),
void * arg)
{
for (size_t ii = 0; ii < sobol->dim; ii++){
sinteract_apply_external(sobol->interactions[ii],f,arg);
}
}
/***********************************************************//**
Allocate a linear transform Ax +b
\param[in] dimin - input dimension
\param[in] dimout - output dimension
\param[in] A - slopes of transformation
\param[in] b - offset of transformation
\param[in] det - pointer to a double representing the determinant of A
\return linear transform struct
\note
Makes a copy of A/b. if A/b are NULL then allocates
space for them.
***************************************************************/
struct LinearTransform *
linear_transform_alloc(size_t dimin, size_t dimout, double * A,
double * b, double det)
{
struct LinearTransform * lt = malloc(sizeof(struct LinearTransform));
if (lt == NULL){
fprintf(stderr, "Failed to allocate LinearTransform\n");
exit(1);
}
lt->dimin = dimin;
lt->dimout = dimout;
lt->A = calloc_double(dimin*dimout);
if (A != NULL){
memmove(lt->A,A,dimin*dimout*sizeof(double));
lt->det = det;
}
lt->b = calloc_double(dimout);
if (b!= NULL){
memmove(lt->b,b,dimout*sizeof(double));
}
// these are only allocated if necessary!!
lt->Ainv = NULL;
lt->binv = NULL;
return lt;
}
/***********************************************************//**
Copy a linear transform
\param[in] lt - linear transform to copy
\return newlt - copied linear transform
***************************************************************/
struct LinearTransform * linear_transform_copy(struct LinearTransform * lt)
{
struct LinearTransform * newlt =
linear_transform_alloc(lt->dimin, lt->dimout, lt->A, lt->b, lt->det);
if (lt->Ainv != NULL){
newlt->Ainv = calloc_double(lt->dimin*lt->dimout);
memmove(newlt->Ainv, lt->Ainv, lt->dimin * lt->dimout * sizeof(double));
newlt->binv = calloc_double(lt->dimin);
memmove(newlt->binv, lt->binv, lt->dimin * sizeof(double));
newlt->detinv = lt->detinv;
}
return newlt;
}
/***********************************************************//**
Frees memory allocated to Linear Transform
\param[in,out] lt - linear transform to free
***************************************************************/
void linear_transform_free(struct LinearTransform * lt){
if (lt != NULL){
free(lt->A); lt->A = NULL;
free(lt->b); lt->b = NULL;
free(lt->Ainv); lt->Ainv = NULL;
free(lt->binv); lt->binv = NULL;
free(lt); lt = NULL;
}
}
/***********************************************************//**
Apply a linear transform y = Ax + b
\param[in] dimx - dimension of input
\param[in] dimb - dimension of output
\param[in] A - A matrix
\param[in] x - input vector
\param[in] b - bvector
\return transformed vector
\note should specialize this to particular transforms ...
***************************************************************/
double *
linear_transform_apply(size_t dimx, size_t dimb, double * A,
double * x, double * b)
{
double * xout = calloc_double(dimb);
memmove(xout,b,dimb*sizeof(double));
cblas_dgemv(CblasColMajor, CblasNoTrans, dimb,dimx,1.0,A,
dimb,x,1,1.0,xout,1);
//printf("xout[0]=%G\n",xout[0]);
return xout;
}
/***********************************************************//**
Invert a linear transform
\param[in,out] lt - linear transform
***************************************************************/
void linear_transform_invert(struct LinearTransform * lt)
{
if (lt->Ainv == NULL){
if (lt->mt == LT){
lt->Ainv = calloc_double(lt->dimin*lt->dimout);
memmove(lt->Ainv, lt->A, lt->dimin * lt->dimout * sizeof(double));
int info;
dtrtri_("L","N",(int*)&(lt->dimout),lt->Ainv,(int*)&(lt->dimout),&info);
if (info != 0){
fprintf(stderr, "Error inverting lower triangular lin transform %d\n",info);
}
size_t ii, jj;
lt->binv = calloc_double(lt->dimin);
for (ii = 0; ii < lt->dimout; ii++){
lt->binv[ii] = -lt->b[ii];
for (jj = 0; jj < ii; jj++){
lt->Ainv[ii*lt->dimout+jj] = 0.0;//lt->Ainv[jj*lt->dimout+ii];
}
}
cblas_dtrmv(CblasColMajor,CblasLower,CblasNoTrans,CblasNonUnit,lt->dimout,lt->Ainv,
lt->dimout,lt->binv,1);
//cblas_dsymv(CblasColMajor,CblasLower,lt->dimout,-1.0,lt->Ainv,
// lt->dimout,lt->b,1,0.0,lt->binv,1);
lt->detinv = 1.0/lt->det;
lt->mti = LT;
}
else{
fprintf(stderr, "Cannot invert this type of transform\n");
}
}
}
/***********************************************************//**
Serialize a linear transform
\param[in,out] ser - stream to serialize to
\param[in] lt - function train
\param[in,out] totSizeIn - if NULL then serialize, if not NULL then return size
\return ptr - ser shifted by number of ytes
***************************************************************/
unsigned char *
linear_transform_serialize(unsigned char * ser, struct LinearTransform * lt,
size_t *totSizeIn)
{
int invexists = (lt->Ainv != NULL);
size_t totSize = (lt->dimin * lt->dimout) * sizeof(double) + sizeof(size_t) + // A
(lt->dimout) * sizeof(double) + sizeof(size_t) +//
sizeof(size_t) + sizeof(size_t) + // dimin dimout
sizeof(double) + sizeof(enum lt_matrix_type) + // det and mat type
sizeof(int); // flag for whether inverse exists
if (invexists){
totSize += (lt->dimin * lt->dimout) * sizeof(double) + sizeof(size_t) + // Ainv
(lt->dimin) * sizeof(double) + sizeof(size_t) + //binv
sizeof(double) + sizeof(int); // det and mat type
}
if (totSizeIn != NULL){
*totSizeIn = totSize;
return ser;
}
unsigned char * ptr = ser;
ptr = serialize_size_t(ptr, lt->dimin);
ptr = serialize_size_t(ptr, lt->dimout);
ptr = serialize_int(ptr, lt->mt);
ptr = serialize_int(ptr, invexists);
ptr = serialize_doublep(ptr,lt->A, lt->dimin * lt->dimout);
ptr = serialize_doublep(ptr,lt->b, lt->dimout);
ptr = serialize_double(ptr,lt->det);
if (invexists){
ptr = serialize_int(ptr, lt->mti);
ptr = serialize_doublep(ptr,lt->Ainv, lt->dimin * lt->dimout);
ptr = serialize_doublep(ptr,lt->binv, lt->dimin);
ptr = serialize_double(ptr,lt->detinv);
}
return ptr;
}
/********************************************************//**
* deserialize a linear transform
*
* \param[in] ser - string to deserialize
* \param[in,out] lt - linear transform
*
* \return ptr - ser + number of bytes of linear transform
*************************************************************/
unsigned char *
linear_transform_deserialize(unsigned char * ser, struct LinearTransform ** lt)
{
if ( NULL == ( (*lt) = malloc(sizeof(struct LinearTransform)))){
fprintf(stderr, "failed to allocate memory for linear transform.\n");
exit(1);
}
int invexists;
unsigned char * ptr;
int mat_type;
ptr = deserialize_size_t(ser,&((*lt)->dimin));
ptr = deserialize_size_t(ptr,&((*lt)->dimout));
size_t Asize;
size_t bsize;
ptr = deserialize_int(ptr,&mat_type);
(*lt)->mt = (enum lt_matrix_type) mat_type;
ptr = deserialize_int(ptr,&invexists);
ptr = deserialize_doublep(ptr,&((*lt)->A),&Asize);
ptr = deserialize_doublep(ptr,&((*lt)->b),&bsize);
ptr = deserialize_double(ptr,&((*lt)->det));
if (invexists){
ptr = deserialize_int(ptr,&mat_type);
(*lt)->mti = (enum lt_matrix_type) mat_type;
ptr = deserialize_doublep(ptr,&((*lt)->Ainv),&Asize);
ptr = deserialize_doublep(ptr,&((*lt)->binv),&bsize);
ptr = deserialize_double(ptr,&((*lt)->detinv));
}
else{
(*lt)->Ainv = NULL;
(*lt)->binv = NULL;
}
return ptr;
}
/***********************************************************//**
Allocate a Probability Density function
\return pdf - allocated pdf (every membor is NULL)
***************************************************************/
struct ProbabilityDensity *
probability_density_alloc()
{
struct ProbabilityDensity * pdf = malloc(sizeof(struct ProbabilityDensity));
if (pdf == NULL){
fprintf(stderr, "Failed to allocate LinearTransform\n");
exit(1);
}
pdf->pdf = NULL;
pdf->lt = NULL;
pdf->transform = 0;
pdf->extra = NULL;
return pdf;
}
/***********************************************************//**
Frees memory allocated to ProbabilityDensity
\param[in,out] pdf - pdf to free
***************************************************************/
void probability_density_free(struct ProbabilityDensity * pdf){
if (pdf != NULL){
function_train_free(pdf->pdf); pdf->pdf = NULL;
linear_transform_free(pdf->lt); pdf->lt = NULL;
assert (pdf->extra == NULL); // dont have this yet
free(pdf); pdf=NULL;
}
}
/***********************************************************//**
Serialize a probability density function
\param[in,out] ser - stream to serialize to
\param[in] pdf - density function
\param[in,out] totSizeIn - if NULL then serialize, if not NULL then return size
\return ptr - ser shifted by number of bytes
***************************************************************/
unsigned char *
probability_density_serialize(unsigned char * ser,
struct ProbabilityDensity * pdf, size_t * totSizeIn)
{
assert(pdf->extra == NULL);
size_t totSize;
if (totSizeIn != NULL){
size_t size_pdf, size_lt;
function_train_serialize(NULL,pdf->pdf,&size_pdf);
linear_transform_serialize(NULL,pdf->lt,&size_lt);
totSize = size_pdf + size_lt + sizeof(int) + sizeof(enum pdf_type);
*totSizeIn = totSize;
return ser;
}
unsigned char * ptr = ser;
ptr = serialize_int(ptr,pdf->transform);
ptr = serialize_int(ptr,pdf->type);
ptr = function_train_serialize(ptr,pdf->pdf,NULL);
ptr = linear_transform_serialize(ptr,pdf->lt,NULL);
return ptr;
}
/********************************************************//**
* deserialize a probability density function
*
* \param[in] ser - string to deserialize
* \param[in] pdf - density function
*
* \return ptr - ser + number of bytes of linear transform
*************************************************************/
unsigned char *
probability_density_deserialize(unsigned char * ser,
struct ProbabilityDensity ** pdf)
{
unsigned char * ptr = ser;
int ptype;
*pdf = probability_density_alloc();
ptr = deserialize_int(ptr,&((*pdf)->transform));
ptr = deserialize_int(ptr,&ptype);
(*pdf)->type = (enum pdf_type) ptype;
ptr = function_train_deserialize(ptr,&((*pdf)->pdf));
ptr = linear_transform_deserialize(ptr,&((*pdf)->lt));
return ptr;
}
/***********************************************************//**
Save a probability density function to a file
\param[in] pdf - pdf to to save
\param[in] filename - name of file to save to
\return success (1) or failure (0) of opening the file
***************************************************************/
int probability_density_save(struct ProbabilityDensity * pdf, char * filename)
{
FILE *fp;
fp = fopen(filename, "w");
if (fp == NULL){
fprintf(stderr, "cat: can't open %s\n", filename);
return 0;
}
size_t totsize;
probability_density_serialize(NULL,pdf,&totsize);
unsigned char * data = malloc(totsize+sizeof(size_t));
if (data == NULL){
fprintf(stderr, "can't allocate space for saving density\n");
return 0;
}
unsigned char * ptr = serialize_size_t(data,totsize);
ptr = probability_density_serialize(ptr,pdf,NULL);
fwrite(data,sizeof(unsigned char),totsize+sizeof(size_t),fp);
free(data); data = NULL;
fclose(fp);
return 1;
}
/***********************************************************//**
Load a probability density function from a file;
\param[in] filename - filename to load
\return pdf if successfull NULL otherwise
***************************************************************/
struct ProbabilityDensity * probability_density_load(char * filename)
{
FILE *fp;
fp = fopen(filename, "r");
if (fp == NULL){
fprintf(stderr, "cat: can't open %s\n", filename);
return NULL;
}
size_t totsize;
size_t k = fread(&totsize,sizeof(size_t),1,fp);
if ( k != 1){
printf("error reading file %s\n",filename);
return NULL;
}
unsigned char * data = malloc(totsize);
if (data == NULL){
fprintf(stderr, "can't allocate space for loading density\n");
return NULL;
}
k = fread(data,sizeof(unsigned char),totsize,fp);
struct ProbabilityDensity * pdf = NULL;
probability_density_deserialize(data,&pdf);
free(data); data = NULL;
fclose(fp);
return pdf;
}
// n is number of samples
// dim is number of dimensions
// x is the location of evaluation (n x 1)
// out is allocated storage area
// args is additional arguments
int pdf_stdmvn_helper(size_t n, size_t dim,const double *x, double * out, void * args)
{
size_t * totdim = args;
if (dim >= *totdim){
fprintf(stderr, "Arguments to pdf_stdmvn_helper specify dimension larger than totalt\n");
exit(1);
}
/* assert (dim < *totdim); */
double scale = 1.0 / sqrt(pow(2.0*M_PI,*totdim));
/* double scale[4] = {1.0, 2.0, 3.0, 4.0}; */
/* double width[4] = { 0.1, 0.2, 0.3, 0.4} */
scale = pow(scale,1.0/( (double) *totdim));
for (size_t ii = 0; ii < n; ii++){
out[ii] = scale * exp(-0.5*pow(x[ii],2));
/* out[ii] = scale[dim] * exp(-1.0/width[dim] * pow(x[ii],2)); // for dimension dependent output */
}
return 0;
}
/***********************************************************//**
Construct a standard normal pdf
\param[in] dim - number of dimensions
\return gaussian pdf
***************************************************************/
struct ProbabilityDensity * probability_density_standard_normal(size_t dim)
{
/* struct BoundingBox * bds = bounding_box_init_std(dim); */
/* size_t ii; */
/* for (ii = 0; ii < dim; ii++){ */
/* bds->lb[ii] = -10.0; */
/* bds->ub[ii] = 10.0; */
/* } */
double lb = -10.0;
double ub = 10.0;
double hmin = 1e-3;
double delta = 1e-5;
struct LinElemExpAopts * opts = lin_elem_exp_aopts_alloc_adapt(0,NULL,lb,ub,delta,hmin);
struct OneApproxOpts * qmopts = one_approx_opts_alloc(LINELM,opts);
struct MultiApproxOpts * fopts = multi_approx_opts_alloc(dim);
/* size_t init_rank = 3; */
/* double round_tol = 1e-3; */
/* double cross_tol = 1e-8; */
/* struct C3Approx * c3a = c3approx_create(CROSS,dim,bds->lb,bds->ub); */
/* c3approx_init_cross(c3a,init_rank,0); */
/* c3approx_set_round_tol(c3a,round_tol); */
/* c3approx_set_cross_tol(c3a,cross_tol); */
/* struct FtApproxArgs * fapp = c3approx_get_approx_args(c3a); */
struct ProbabilityDensity * pdf = probability_density_alloc();
struct Fwrap * fw = fwrap_create(1,"MOVEC");
fwrap_set_mofvec(fw,pdf_stdmvn_helper,&dim);
pdf->pdf = function_train_rankone(fopts,fw);
pdf->type = GAUSSIAN;
/* bounding_box_free(bds); */
/* c3approx_destroy(c3a); */
one_approx_opts_free_deep(&qmopts);
multi_approx_opts_free(fopts);
fwrap_destroy(fw);
return pdf;
}
/***********************************************************//**
construct a multivariate normal distribution
\param[in] dim - number of dimensions
\param[in] mean - mean of distribution
\param[in] cov - covariance of distribution (lower triangular)
\return gaussian pdf
***************************************************************/
struct ProbabilityDensity *
probability_density_mvn(size_t dim, double * mean, double * cov)
{
struct ProbabilityDensity * pdf =
probability_density_standard_normal(dim);
//printf("in mvn!\n");
pdf->transform = 1;
pdf->lt = linear_transform_alloc(dim,dim,NULL,NULL,0.0);
memmove(pdf->lt->b, mean, sizeof(double) * dim);
memmove(pdf->lt->A, cov, dim*dim*sizeof(double));
// compute cholesky
int info;
dpotrf_("L",(int*)&dim,pdf->lt->A,(int*)&dim,&info);
pdf->lt->mt = LT;
if (info != 0){
fprintf(stdout,"Warning: cholesky finished with info=%d\n",info);
}
size_t ii, jj;
pdf->lt->det = 1.0;
for (ii = 0; ii < dim; ii++){
pdf->lt->det *= pdf->lt->A[ii*dim+ii];
for (jj = 0; jj < ii; jj++){
pdf->lt->A[ii*dim+jj] = 0.0;
}
}
pdf->type = GAUSSIAN;
return pdf;
}
/***********************************************************//**
Generate a Sample from a probability density function
\param[in] pdf - probability density function
\return sample
***************************************************************/
double * probability_density_sample(struct ProbabilityDensity * pdf)
{
double * out = NULL;
if (pdf->type != GAUSSIAN){
fprintf(stderr,"Cannot sample from a non-Gaussian pdf");
exit(1);
}
if (pdf->type == GAUSSIAN){
size_t dim = pdf->pdf->dim;
double * norm = calloc_double(dim);
size_t ii;
for (ii = 0; ii < dim; ii++){
norm[ii] = randn();
}
out = linear_transform_apply(dim,dim,pdf->lt->A,
norm,pdf->lt->b);
free(norm); norm = NULL;
}
return out;
}
/***********************************************************//**
Construct a laplace approximation
\param[in] f - log posterior
\param[in] hessLogPost - hessian of the log posterior
\param[in] args - arguments to f and hessLogPost
\param[in] dim - dimension of state space
\param[in] start - guess for optimization
\return gaussian pdf
***************************************************************/
struct ProbabilityDensity *
/* probability_density_laplace(double *(*gradLogPost)(double * x, void * args), */
/* void * args, size_t dim, double * start) */
probability_density_laplace(double (*f)(size_t,const double *,double*,void *),
double *(*hessLogPost)(double * x, void * args),
void * args, size_t dim, double * start)
{
//double tol = 1e-4;
double * mean = calloc_double(dim);
memmove(mean,start,dim*sizeof(double));
struct c3Opt * opt = c3opt_alloc(BFGS,dim);
double * lb = calloc_double(dim);
double * ub = calloc_double(dim);
for (size_t ii = 0; ii < dim; ii++){
lb[ii] = -DBL_MAX;
ub[ii] = DBL_MAX;
}
c3opt_add_lb(opt,lb);
c3opt_add_ub(opt,ub);
c3opt_add_objective(opt,f,args);
c3opt_set_verbose(opt,0);
double val;
int res = c3opt_minimize(opt,mean,&val);
/* printf("res = %d\n",res); */
if (res <= -1){
fprintf(stderr, "Failure to Optimize in probability_density_laplace\n");
exit(1);
}
/* assert (res >- 1); */
/* printf("do newtons method\n"); */
/* newton(&mean, dim, 1.0, tol, gradLogPost,hessLogPost,args); */
double * hess = hessLogPost(mean,args);
double * cov = calloc_double(dim*dim);
pinv(dim,dim,dim,hess,cov,0.0);
struct ProbabilityDensity * pdf =
probability_density_mvn(dim, mean, cov);
free(mean); mean = NULL;
free(hess); hess = NULL;
free(cov); cov= NULL;
return pdf;
}
/***********************************************************//**
Evaluate the probability density function
\param[in] pdf - pdf to evaluate
\param[in] x - location at which to evaluate
\return out - evaluation
***************************************************************/
double probability_density_eval(struct ProbabilityDensity * pdf, double * x)
{
double out = 0.0;
double * lb = probability_density_lb_base(pdf);
double * ub = probability_density_ub_base(pdf);
//assert(pdf->transform == 0); // havent implemented transform yet
if (pdf->transform == 1){
//printf("not implemented yet!\n");
linear_transform_invert(pdf->lt);
double * temp = linear_transform_apply(pdf->lt->dimout,
pdf->lt->dimin, pdf->lt->Ainv, x, pdf->lt->binv);
size_t ii;
int good = 1;
for (ii = 0; ii < pdf->pdf->dim; ii++){
if (temp[ii] < lb[ii]){
good = 0;
out = 0.0;
break;
}
else if (temp[ii] > ub[ii]){
good = 0;
out = 0.0;
break;
}
}
if (good == 1){
out = function_train_eval(pdf->pdf,temp) * fabs(pdf->lt->detinv);
}
free(temp); temp = NULL;
}
else{
size_t ii;
int good = 1;
for (ii = 0; ii < pdf->pdf->dim; ii++){
if (x[ii] < lb[ii]){
good = 0;
out = 0.0;
break;
}
else if (x[ii] > ub[ii]){
good = 0;
out = 0.0;
break;
}
}
if (good == 1){
out = function_train_eval(pdf->pdf,x);
}
}
free(lb); lb = NULL;
free(ub); ub = NULL;
return out;
}
/***********************************************************//**
Evaluate the gradient of the log probability density function
\param[in] pdf - gradient of log(pdf) is obtained
\param[in] x - location at which to obtain the gradient
\return out - gradient of log pdf at *x*
***************************************************************/
double *
probability_density_log_gradient_eval(struct ProbabilityDensity * pdf,
double * x)
{
double * out = NULL;
//printf("here!\n");
struct FT1DArray * grad = function_train_gradient(pdf->pdf);
//printf("got grad\n");
if (pdf->transform == 0){
double val = 1.0 / probability_density_eval(pdf,x);
out = ft1d_array_eval(grad,x);
size_t ii;
for (ii = 0; ii < grad->size; ii++){
out[ii] = out[ii] * val;
}
}
else if (pdf->transform == 1){
if (pdf->lt->Ainv == NULL){
linear_transform_invert(pdf->lt);
}
double * xtemp = linear_transform_apply(pdf->lt->dimout,
pdf->lt->dimin, pdf->lt->Ainv, x, pdf->lt->binv);
double val = 1.0 / function_train_eval(pdf->pdf, xtemp);
double * temp = ft1d_array_eval(grad,xtemp);
out = calloc_double(pdf->pdf->dim);
cblas_dgemv(CblasColMajor, CblasTrans, pdf->pdf->dim, pdf->pdf->dim,
val, pdf->lt->Ainv, pdf->pdf->dim,temp, 1,
0.0, out, 1);
free(xtemp); xtemp = NULL;
free(temp); temp = NULL;
}
else{
fprintf(stderr,
"gradient of LOG pdf with transform type %d is not available\n",
pdf->transform);
}
ft1d_array_free(grad); grad = NULL;
return out;
}
/***********************************************************//**
Evaluate the hessian of the log probability density function
\param[in] pdf - hessian of log(pdf) is obtained
\param[in] x - location at which to obtain the hessian
\return out - hessian of log pdf at *x*
***************************************************************/
double *
probability_density_log_hessian_eval(struct ProbabilityDensity * pdf,
double * x)
{
double * out = NULL;
if (pdf->transform == 0){
double val = 1.0 / probability_density_eval(pdf,x);
double val_squared = pow(val,2);
struct FT1DArray * grad = function_train_gradient(pdf->pdf);
double * grad_eval = ft1d_array_eval(grad, x);
struct FT1DArray * hess = function_train_hessian(pdf->pdf);
double * hess_eval = ft1d_array_eval(hess,x);
out = calloc_double(pdf->pdf->dim * pdf->pdf->dim);
size_t ii,jj;
for (ii = 0; ii < pdf->pdf->dim; ii++){
for (jj = 0; jj < pdf->pdf->dim; jj++){
out[jj*pdf->pdf->dim+ii] =
-val_squared * grad_eval[ii] * grad_eval[jj] +
val * hess_eval[jj*pdf->pdf->dim+ii];
}
}
free(grad_eval); grad_eval = NULL;
free(hess_eval); hess_eval = NULL;
ft1d_array_free(grad); grad = NULL;
ft1d_array_free(hess); hess = NULL;
}
else if (pdf->transform == 1){
if (pdf->lt->Ainv == NULL){
linear_transform_invert(pdf->lt);
}
double * xtemp = linear_transform_apply(pdf->lt->dimout,
pdf->lt->dimin, pdf->lt->Ainv, x, pdf->lt->binv);
double val = 1.0 / function_train_eval(pdf->pdf,xtemp);
double val_squared = pow(val,2.0);
struct FT1DArray * grad = function_train_gradient(pdf->pdf);
double * grad_eval = ft1d_array_eval(grad, xtemp);
struct FT1DArray * hess = function_train_hessian(pdf->pdf);
double * hess_eval = ft1d_array_eval(hess,xtemp);
out = calloc_double(pdf->lt->dimout * pdf->lt->dimout);
size_t ii, jj;
double * grad_prod_temp = calloc_double(pdf->lt->dimout);
for (ii = 0; ii < pdf->lt->dimout; ii++){
grad_prod_temp[ii] =
cblas_ddot(pdf->lt->dimout,grad_eval,1,
pdf->lt->Ainv + pdf->lt->dimout*ii,1);
}
double * temp = calloc_double(pdf->lt->dimout);
for (ii = 0; ii < pdf->lt->dimout; ii++){
cblas_dgemv(CblasColMajor, CblasTrans, pdf->pdf->dim,
pdf->pdf->dim, val, hess_eval, pdf->pdf->dim,
pdf->lt->Ainv+ii*pdf->lt->dimout, 1, 0.0, temp, 1);
for (jj = 0; jj < pdf->lt->dimout; jj++){
out[jj*pdf->lt->dimout + ii] = - grad_prod_temp[jj];
out[jj*pdf->lt->dimout + ii] *= grad_prod_temp[ii];
out[jj*pdf->lt->dimout + ii] *= val_squared;
out[jj*pdf->lt->dimout+ii] +=
cblas_ddot(pdf->lt->dimout,temp,1,
pdf->lt->Ainv+jj*pdf->lt->dimout,1);
}
}
free(grad_prod_temp); grad_prod_temp = NULL;
free(temp); temp = NULL;
free(grad_eval); grad_eval = NULL;
free(hess_eval); hess_eval = NULL;
free(xtemp); xtemp = NULL;
ft1d_array_free(grad); grad = NULL;
ft1d_array_free(hess); hess = NULL;
}
else{
fprintf(stderr,
"gradient of LOG pdf with transform type %d is not available\n",
pdf->transform);
}
return out;
}
/***********************************************************//**
Compute the mean from the pdf
\param[in] pdf whose mean to compute
\return out - mean of the pdf
\note
I can make this faster by precomputing the integrals of each
core first.
***************************************************************/
/* double * probability_density_mean(struct ProbabilityDensity * pdf) */
/* { */
/* size_t ii; */
/* size_t dimft = pdf->pdf->dim; // dimension of ft */
/* size_t dimpdf = dimft; // dimension of pdf variable */
/* double * mean = NULL; */
/* struct BoundingBox * bds = bounding_box_init_std(dimft); */
/* for (ii = 0; ii < dimft; ii++){ */
/* bds->lb[ii] = */
/* generic_function_get_lb(pdf->pdf->cores[ii]->funcs[0]); */
/* bds->ub[ii] = */
/* generic_function_get_ub(pdf->pdf->cores[ii]->funcs[0]); */
/* } */
/* if (pdf->transform == 1){ */
/* dimpdf = pdf->lt->dimout; */
/* mean = calloc_double(dimpdf); */
/* double * offset = calloc_double(dimft); */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* offset[0] = pdf->lt->b[ii]; */
/* struct FunctionTrain * ftlin = */
/* function_train_linear2(LINELM,NULL,dimft, */
/* bds,pdf->lt->A+ii,dimpdf, */
/* offset,1,NULL); */
/* mean[ii] = function_train_inner(ftlin,pdf->pdf); */
/* //printf("mean[%zu]=%G\n",ii,mean[ii]); */
/* function_train_free(ftlin); */
/* } */
/* free(offset); */
/* } */
/* else{ */
/* mean = calloc_double(dimft); */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* struct Qmarray * temp = qmarray_copy(pdf->pdf->cores[ii]); */
/* struct Qmarray * tempx = qmarray_alloc(1,1); */
/* tempx->funcs[0] = */
/* generic_function_linear(1.0,0.0,LINELM,NULL, */
/* bds->lb[ii],bds->ub[ii],NULL); */
/* qmarray_free(pdf->pdf->cores[ii]); */
/* pdf->pdf->cores[ii] = qmarray_kron(tempx,temp); */
/* mean[ii] = function_train_integrate(pdf->pdf); */
/* qmarray_free(pdf->pdf->cores[ii]); */
/* pdf->pdf->cores[ii] = qmarray_copy(temp); */
/* qmarray_free(temp); */
/* qmarray_free(tempx); */
/* } */
/* } */
/* bounding_box_free(bds); bds = NULL; */
/* return mean; */
/* } */
/***********************************************************//**
Compute the covariance from the pdf
\param[in] pdf - pdf whose mean to compute
\return covariance matrix
\note
I can make this faster by precomputing the integrals of each
core first.
***************************************************************/
/* double * probability_density_cov(struct ProbabilityDensity * pdf) */
/* { */
/* size_t ii, jj; */
/* size_t dimft = pdf->pdf->dim; // dimension of ft */
/* size_t dimpdf = dimft; // dimension of pdf variable */
/* double * cov = NULL; */
/* struct BoundingBox * bds = bounding_box_init_std(dimft); */
/* for (ii = 0; ii < dimft; ii++){ */
/* bds->lb[ii] = generic_function_get_lb(pdf->pdf->cores[ii]->funcs[0]); */
/* bds->ub[ii] = generic_function_get_ub(pdf->pdf->cores[ii]->funcs[0]); */
/* } */
/* double * mean = probability_density_mean(pdf); */
/* if (pdf->transform == 1){ */
/* dimpdf = pdf->lt->dimout; */
/* cov = calloc_double(dimpdf*dimpdf); */
/* double * offset = calloc_double(dimft); */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* offset[0] = pdf->lt->b[ii] - mean[ii]; */
/* struct FunctionTrain * ftleft = */
/* function_train_linear2(LINELM,NULL,dimft,bds, */
/* pdf->lt->A+ii,dimpdf,offset,1,NULL); */
/* struct FunctionTrain * temp = function_train_product(ftleft,ftleft); */
/* cov[ii*dimpdf+ii] = function_train_inner(temp,pdf->pdf); */
/* function_train_free(temp); */
/* for (jj = ii+1; jj < dimpdf; jj++){ */
/* offset[0] = pdf->lt->b[jj] - mean[jj]; */
/* struct FunctionTrain * ftright = */
/* function_train_linear2(LINELM,NULL,dimft,bds, */
/* pdf->lt->A+jj,dimpdf,offset,1,NULL); */
/* struct FunctionTrain * ftprod = NULL; */
/* ftprod = function_train_product(ftleft,ftright); */
/* cov[ii*dimpdf+jj] = function_train_inner(ftprod,pdf->pdf); */
/* cov[jj*dimpdf+ii] = cov[ii*dimpdf+jj]; */
/* function_train_free(ftright); ftright = NULL; */
/* function_train_free(ftprod); ftprod = NULL; */
/* function_train_free(ftright); */
/* function_train_free(ftprod); */
/* } */
/* function_train_free(ftleft); */
/* } */
/* free(offset); */
/* } */
/* else{ */
/* cov = calloc_double(dimpdf * dimpdf); */
/* // enum poly_type ptype = LEGENDRE; */
/* struct FunctionTrain * ftc = function_train_copy(pdf->pdf); */
/* struct FunctionTrain * xvals = function_train_alloc(dimft); */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* double lb = generic_function_get_lb(ftc->cores[ii]->funcs[0]); */
/* double ub = generic_function_get_ub(ftc->cores[ii]->funcs[0]); */
/* xvals->cores[ii] = qmarray_alloc(1,1); */
/* xvals->cores[ii]->funcs[0] = */
/* generic_function_linear(1.0,-mean[ii],LINELM,NULL,lb,ub,NULL); */
/* } */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* struct Qmarray * temp = qmarray_kron(xvals->cores[ii],pdf->pdf->cores[ii]); */
/* qmarray_free(ftc->cores[ii]); */
/* ftc->cores[ii] = qmarray_kron(xvals->cores[ii],temp); */
/* cov[ii*dimpdf+ii] = function_train_integrate(ftc); */
/* qmarray_free(ftc->cores[ii]); */
/* //maybe dont have to copy bc freeing later */
/* ftc->cores[ii] = qmarray_copy(temp); */
/* qmarray_free(temp); */
/* for (jj = ii+1; jj < dimpdf; jj++){ */
/* qmarray_free(ftc->cores[jj]); */
/* ftc->cores[jj] = qmarray_kron(xvals->cores[jj],pdf->pdf->cores[jj]); */
/* cov[ii*dimpdf+jj] = function_train_integrate(ftc); */
/* cov[jj*dimpdf+ii] = cov[ii*dimpdf+jj]; */
/* qmarray_free(ftc->cores[jj]); */
/* ftc->cores[jj] = qmarray_copy(pdf->pdf->cores[jj]); */
/* } */
/* qmarray_free(ftc->cores[ii]); */
/* ftc->cores[ii] = qmarray_copy(pdf->pdf->cores[ii]); */
/* } */
/* function_train_free(xvals); */
/* function_train_free(ftc); */
/* } */
/* bounding_box_free(bds); bds=NULL; */
/* free(mean); mean=NULL; */
/* return cov; */
/* } */
/***********************************************************//**
Compute the variance (only diagonal of covariance matrix) from the pdf
\param[in] pdf - pdf whose mean to compute
\return out - variance of all variables
\note
I can make this faster by precomputing the integrals of each
core first.
***************************************************************/
/* double * probability_density_var(struct ProbabilityDensity * pdf) */
/* { */
/* size_t ii; */
/* size_t dimft = pdf->pdf->dim; // dimension of ft */
/* size_t dimpdf = dimft; // dimension of pdf variable */
/* double * var = NULL; */
/* struct BoundingBox * bds = bounding_box_init_std(dimft); */
/* for (ii = 0; ii < dimft; ii++){ */
/* bds->lb[ii] = generic_function_get_lb(pdf->pdf->cores[ii]->funcs[0]); */
/* bds->ub[ii] = generic_function_get_ub(pdf->pdf->cores[ii]->funcs[0]); */
/* } */
/* double * mean = probability_density_mean(pdf); */
/* if (pdf->transform == 1){ */
/* dimpdf = pdf->lt->dimout; */
/* var = calloc_double(dimpdf); */
/* double * offset = calloc_double(dimft); */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* offset[0] = pdf->lt->b[ii] - mean[ii]; */
/* struct FunctionTrain * ftleft = */
/* function_train_linear2(LINELM,NULL,dimft,bds, */
/* pdf->lt->A+ii,dimpdf,offset,1,NULL); */
/* struct FunctionTrain * temp = function_train_product(ftleft,ftleft); */
/* var[ii] = function_train_inner(temp,pdf->pdf); */
/* function_train_free(temp); */
/* function_train_free(ftleft); */
/* } */
/* free(offset); offset=NULL; */
/* } */
/* else{ */
/* var = calloc_double(dimpdf); */
/* struct FunctionTrain * ftc = function_train_copy(pdf->pdf); */
/* struct FunctionTrain * xvals = function_train_alloc(dimft); */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* double lb = generic_function_get_lb(ftc->cores[ii]->funcs[0]); */
/* double ub = generic_function_get_ub(ftc->cores[ii]->funcs[0]); */
/* xvals->cores[ii] = qmarray_alloc(1,1); */
/* xvals->cores[ii]->funcs[0] = */
/* generic_function_linear(1.0,-mean[ii],LINELM,NULL,lb,ub,NULL); */
/* } */
/* for (ii = 0; ii < dimpdf; ii++){ */
/* struct Qmarray * temp = qmarray_kron(xvals->cores[ii],pdf->pdf->cores[ii]); */
/* qmarray_free(ftc->cores[ii]); */
/* ftc->cores[ii] = qmarray_kron(xvals->cores[ii],temp); */
/* qmarray_free(temp); temp = NULL; */
/* var[ii] = function_train_integrate(ftc); */
/* qmarray_free(ftc->cores[ii]); */
/* //maybe dont have to copy bc freeing later */
/* ftc->cores[ii] = qmarray_copy(pdf->pdf->cores[ii]); */
/* } */
/* function_train_free(xvals); xvals = NULL; */
/* function_train_free(ftc); ftc = NULL; */
/* } */
/* bounding_box_free(bds); */
/* free(mean); mean = NULL; */
/* return var; */
/* } */
/***********************************************************//**
Get the lower bounds of the underlying (transformed) pdf
\param[in] pdf - pdf whose lower bounds to obtain
\return lb - array of lower bounds
***************************************************************/
double * probability_density_lb_base(struct ProbabilityDensity * pdf)
{
size_t dim = pdf->pdf->dim;
double * lb = calloc_double(dim);
size_t ii;
for (ii = 0; ii < dim; ii++){
lb[ii] = generic_function_get_lb(
pdf->pdf->cores[ii]->funcs[0]);
}
return lb;
}
/***********************************************************//**
Get the upper bounds of the underlying (transformed) pdf
\param[in] pdf - pdf whose lower bounds to obtain
\return ub - array of upper bounds
***************************************************************/
double * probability_density_ub_base(struct ProbabilityDensity * pdf)
{
size_t dim = pdf->pdf->dim;
double * ub = calloc_double(dim);
size_t ii;
for (ii = 0; ii < dim; ii++){
ub[ii] = generic_function_get_ub(
pdf->pdf->cores[ii]->funcs[0]);
}
return ub;
}
struct LikeParamDataCouple {
double * param;
double * data;
};
double like_poisson_helper(double x, size_t dim, void * args)
{
struct LikeParamDataCouple * pdc = args;
double out = pdc->data[dim] * x -
pdc->param[0] * exp(x);
//printf("out=%G\n",out);
out = exp(out);
return out;
}
/***********************************************************//**
Allocate a likelihood function
\param[in] datadim - dimension of the data
\param[in] data - data
\param[in] paramdim - dimension of extra parameters
\param[in] param - parameters
\param[in] inputdim - dimension of the input
\param[in] lb - lower bound of each dimension
\param[in] ub - upper bound of each dimension
\param[in] type - likelihood type
\return like - likelihood function
***************************************************************/
/* struct Likelihood * */
/* likelihood_alloc(size_t datadim, double * data, size_t paramdim, */
/* double * param, size_t inputdim, */
/* double lb, double ub, enum likelihood_type type) */
/* { */
/* struct Likelihood * like = malloc(sizeof(struct Likelihood)); */
/* if (like == NULL){ */
/* fprintf(stderr, "Failed to allocate Likelihood\n"); */
/* exit(1); */
/* } */
/* like->datadim = datadim; */
/* like->paramdim = paramdim; */
/* like->inputdim = inputdim; */
/* like->type = type; */
/* if (type == POISSON_LIKE){ */
/* enum poly_type ptype = LEGENDRE; */
/* size_t start_num = 7; */
/* size_t c_check = 2; */
/* struct OpeAdaptOpts ao; */
/* ao.start_num = start_num; */
/* ao.coeffs_check = c_check; */
/* ao.tol = 1e-10; */
/* struct FtApproxArgs * fapp = */
/* ft_approx_args_createpoly(inputdim,&ptype,&ao); */
/* struct BoundingBox * bds = bounding_box_init_std(inputdim); */
/* size_t ii; */
/* for (ii = 0; ii < inputdim; ii++){ */
/* bds->lb[ii] = lb; */
/* bds->ub[ii] = ub; */
/* } */
/* struct LikeParamDataCouple args = {param,data}; */
/* //printf("compute here\n"); */
/* like->like = function_train_rankone(inputdim,like_poisson_helper, */
/* &args,bds,fapp); */
/* //printf("compute there\n"); */
/* like->loglike = 0; */
/* bounding_box_free(bds); */
/* ft_approx_args_free(fapp); */
/* } */
/* else{ */
/* like->like = NULL; */
/* } */
/* return like; */
/* } */
/***********************************************************
Allocate and initialize a Gaussian likelihood
\param[in] noise_type - type of noise
\param[in] ndata - number of data points
\param[in] datadim - dimension of the data
\param[in] data - data (ndata * datadim,)
\param[in] paramdim - dimension of parameters
\param[in] param - parameters defining noise
\param[in] inputdim - dimension of the input
\param[in] meanfunc - function array evaluate the mean of the Gaussian
(ndata * datadim,)
\return like - likelihood function
\note
noise_type = 0
- paramdim should be = 1, param indicates noise std)
noise_type = 1
- paramdim should be = datadim,
param specifies diff noise std for each dimension)
***************************************************************/
/* struct Likelihood * likelihood_gaussian(int noise_type, size_t ndata, */
/* size_t datadim, double * data, size_t paramdim, double * param, */
/* size_t inputdim, struct FT1DArray * meanfunc) */
/* { */
/* struct Likelihood * like = malloc(sizeof(struct Likelihood)); */
/* if (like == NULL){ */
/* fprintf(stderr, "Failed to allocate Likelihood\n"); */
/* exit(1); */
/* } */
/* like->type = GAUSSIAN_LIKE; */
/* like->datadim = datadim; */
/* like->paramdim = paramdim; */
/* like->inputdim = inputdim; */
/* struct FunctionTrain * temp = NULL; */
/* struct FunctionTrain * temp2 = NULL; */
/* size_t ii,jj; */
/* like->logextra = 0.0; */
/* for (jj = 0; jj < ndata; jj++){ */
/* // printf("jj = %zu\n",jj); */
/* struct FT1DArray * moff = ft1d_array_alloc(datadim); */
/* for (ii = 0; ii < datadim; ii++){ */
/* // printf("ii=%zu, data=%G \n",ii,data[jj*datadim+ii]); */
/* moff->ft[ii] = function_train_afpb(1.0,-data[jj*datadim+ii], */
/* meanfunc->ft[jj*datadim+ii],1e-12); */
/* } */
/* // printf("got moff noise_type = %d\n",noise_type); */
/* if (noise_type == 0){ */
/* assert (paramdim == 1); */
/* double * coeff = darray_val(datadim,-0.5/pow(param[0],2)); */
/* // printf("datadim = %zu\n",datadim); */
/* temp = ft1d_array_sum_prod(datadim,coeff,moff,moff,1e-10); */
/* // printf("got sum\n"); */
/* free(coeff); coeff = NULL; */
/* //printf("here! %G \n", log(pow(pow(pow(param[0],2),datadim),-0.5))); */
/* like->logextra += log(pow(2.0*M_PI,- ((double) datadim) /2.0) * */
/* pow(pow(param[0] * param[0],datadim),-0.5)); */
/* // printf("log extra is now = %G\n", like->logextra); */
/* } */
/* else if (noise_type == 1){ */
/* assert (paramdim == datadim); */
/* double * coeff = calloc_double(paramdim); */
/* double tempdet = 1.0; */
/* for (ii = 0; ii < paramdim; ii++){ */
/* coeff[ii] = -0.5 / pow(param[ii],2); */
/* tempdet *= pow(param[ii],2); */
/* } */
/* like->logextra += log( pow(2*M_PI,-(double) datadim/2.0) * */
/* pow(tempdet,-0.5)); */
/* temp = ft1d_array_sum_prod(datadim,coeff,moff,moff,1e-10); */
/* free(coeff); coeff = NULL; */
/* } */
/* else{ */
/* fprintf(stderr, */
/* "Noise type (%d) not available for Gaussian likelihood", */
/* noise_type); */
/* exit(1); */
/* } */
/* if (jj == 0){ */
/* like->like = function_train_copy(temp); */
/* } */
/* else{ */
/* temp2 = function_train_sum(temp,like->like); */
/* function_train_free(like->like); like->like = NULL; */
/* like->like = function_train_round(temp2,1e-10); */
/* function_train_free(temp2); temp2 = NULL; */
/* } */
/* function_train_free(temp); temp = NULL; */
/* ft1d_array_free(moff); moff = NULL; */
/* } */
/* like->loglike = 1; */
/* return like; */
/* } */
/* struct Likelihood * likelihood_linear_regression(size_t dim, size_t N, */
/* double * data, double * covariates, double noise, struct BoundingBox * bds) */
/* { */
/* // enum poly_type ptype = LEGENDRE; */
/* struct FT1DArray * meanfunc = ft1d_array_alloc(N); */
/* size_t ii; */
/* double * pt = calloc_double(dim+1); */
/* pt[0] = 1.0; */
/* for (ii = 0; ii < N; ii++){ */
/* //printf("ii = %zu\n",ii); */
/* memmove(pt+1,covariates + ii*dim,dim*sizeof(double)); */
/* // meanfunc->ft[ii] = function_train_linear(POLYNOMIAL,&ptype,dim+1,bds,pt,NULL); */
/* meanfunc->ft[ii] = function_train_linear(LINELM,NULL,dim+1,bds,pt,NULL); */
/* } */
/* struct Likelihood * like = */
/* likelihood_gaussian(0,N,1,data,1,&noise,dim+1,meanfunc); */
/* free(pt); */
/* pt = NULL; */
/* ft1d_array_free(meanfunc); */
/* meanfunc = NULL; */
/* return like; */
/* } */
/***********************************************************//**
Free memory allocated to a likelihood function
\param[in,out] like - likelihood function
***************************************************************/
void likelihood_free(struct Likelihood * like)
{
if (like != NULL){
function_train_free(like->like); like->like = NULL;
free(like); like = NULL;
}
}
struct LikeLinCouple
{
struct Likelihood * like;
struct LinearTransform * lt;
};
double like_transform_helper(double * x, void * args)
{
struct LikeLinCouple * lc = args;
double * xnew = linear_transform_apply(lc->lt->dimout, lc->lt->dimin,
lc->lt->A,x,lc->lt->b);
//*
//printf("xnew = ");
//dprint(lc->lt->dimin, x);
//*/
double out = 0.0;
int outofbounds = 0;
size_t ii;
for (ii = 0; ii < lc->lt->dimout; ii++){
double lb = generic_function_get_lb(lc->like->like->cores[ii]->funcs[0]);
if (xnew[ii] < lb){
xnew[ii] = lb;
//out = 0.0;
//outofbounds = 1;
//break;
}
double ub = generic_function_get_ub(lc->like->like->cores[ii]->funcs[0]);
if (xnew[ii] > ub){
xnew[ii] = ub;
//out = 0.0;
//outofbounds = 1;
//break;
}
}
if (outofbounds == 0){
out = function_train_eval(lc->like->like,xnew);
}
//printf("xnew = ");
//dprint(lc->lt->dimout, xnew);
//printf("out=%G\n",out);
free(xnew); xnew = NULL;
return out;
}
/***********************************************************//**
Transform the likelihood using a linear variable transformation
\param[in] like - likelihood
\param[in] lt - linear transformation
\param[in] bds - domain
\return newlike - new likelihood
***************************************************************/
/* struct Likelihood * */
/* likelihood_transform(struct Likelihood * like, struct LinearTransform * lt, */
/* struct BoundingBox * bds) */
/* { */
/* struct LikeLinCouple lc = {like,lt}; */
/* // */
/* size_t dim = lt->dimin; */
/* size_t init_rank = 2; */
/* double hmin = 1e-2; */
/* double delta = 1e-5; */
/* double round_tol = 1e-3; */
/* double cross_tol = 1e-5; */
/* struct C3Approx * c3a = c3approx_create(CROSS,dim,bds->lb,bds->ub); */
/* c3approx_init_lin_elem(c3a); */
/* c3approx_set_lin_elem_delta(c3a,delta); */
/* c3approx_set_lin_elem_hmin(c3a,hmin); */
/* c3approx_init_cross(c3a,init_rank,0); */
/* c3approx_set_round_tol(c3a,round_tol); */
/* c3approx_set_cross_tol(c3a,cross_tol); */
/* /\* struct FtCrossArgs temp; *\/ */
/* /\* struct FtApproxArgs * fapp = NULL; *\/ */
/* //size_t ii; */
/* /\* size_t * init_ranks = calloc_size_t(dim+1); *\/ */
/* /\* for (ii = 0; ii < dim ;ii++){ *\/ */
/* /\* init_ranks[ii] = init_rank; *\/ */
/* /\* } *\/ */
/* /\* init_ranks[0] = 1; *\/ */
/* /\* init_ranks[dim] = 1; *\/ */
/* /\* struct OpeAdaptOpts aopts; *\/ */
/* /\* aopts.start_num = 10; *\/ */
/* /\* aopts.coeffs_check = 0; *\/ */
/* /\* aopts.tol = 1e-5; *\/ */
/* /\* enum poly_type ptype = LEGENDRE; *\/ */
/* /\* fapp = ft_approx_args_createpoly(dim,&ptype,&aopts); *\/ */
/* /\* temp.dim = dim; *\/ */
/* /\* temp.ranks = init_ranks; *\/ */
/* /\* temp.epsilon = 1e-5; *\/ */
/* /\* temp.maxiter = 10; *\/ */
/* /\* temp.verbose = 2; *\/ */
/* /\* temp.epsround = 100.0*DBL_EPSILON; *\/ */
/* /\* temp.kickrank = 4; *\/ */
/* /\* temp.maxiteradapt = 5; *\/ */
/* struct Likelihood * newlike = malloc(sizeof(struct Likelihood)); */
/* if (newlike == NULL){ */
/* fprintf(stderr, "Failed to allocate Likelihood\n"); */
/* exit(1); */
/* } */
/* newlike->datadim = like->datadim; */
/* newlike->paramdim = 0; */
/* newlike->inputdim = dim; */
/* newlike->type = GENERIC_LIKE; */
/* //printf("here!!!\n"); */
/* /\* newlike->like = function_train_cross(like_transform_helper,&lc,bds,NULL,&temp,fapp); *\/ */
/* newlike->like = c3approx_do_cross(c3a,like_transform_helper,&lc); */
/* //printf("there!!!\n"); */
/* /\* free(init_ranks); init_ranks = NULL; *\/ */
/* /\* ft_approx_args_free(fapp); fapp = NULL; *\/ */
/* return newlike; */
/* } */
/* /\***********************************************************\//\** */
/* Compute the gradient of the log posterior from bayes rule */
/* \param[in] x - location at which to obtain the gradient log posterior */
/* \param[in] args - BayesRule structure holding likelihood and prior */
/* \return out - gradient of log posterior */
/* ***************************************************************\/ */
/* double * bayes_rule_log_gradient(double * x, void * args){ */
/* struct BayesRule * br = args; */
/* size_t dim = br->like->inputdim; */
/* double * gradp = probability_density_log_gradient_eval(br->prior,x); */
/* struct FT1DArray * gradl = function_train_gradient(br->like->like); */
/* double * gradl_eval = ft1d_array_eval(gradl,x); */
/* double * out = calloc_double(dim); */
/* size_t ii; */
/* if (br->like->loglike == 0){ */
/* double likeval = function_train_eval(br->like->like,x); */
/* for (ii = 0; ii < dim; ii++){ */
/* out[ii] = 1.0/likeval * gradl_eval[ii] + gradp[ii]; */
/* } */
/* } */
/* else{ */
/* for (ii = 0; ii < dim; ii++){ */
/* out[ii] = gradl_eval[ii] + gradp[ii]; */
/* } */
/* } */
/* ft1d_array_free(gradl); gradl = NULL; */
/* free(gradl_eval); gradl_eval = NULL; */
/* free(gradp); gradp = NULL; */
/* return out; */
/* } */
/***********************************************************//**
Compute the gradient of the negative log posterior from bayes rule
\param[in] x - location at which to obtain the hessian of log posterior
\param[in] args - BayesRule structure holding likelihood and prior
\return out - gradient of negative log posterior
***************************************************************/
/* double * bayes_rule_log_gradient_negative(double * x, void * args){ */
/* struct BayesRule * br = args; */
/* size_t dim = br->like->inputdim; */
/* double * grad = bayes_rule_log_gradient(x,args); */
/* size_t ii; */
/* for (ii=0; ii < dim; ii++){ */
/* grad[ii] *= -1.0; */
/* } */
/* return grad; */
/* } */
/***********************************************************//**
Compute the hessian of the log posterior from bayes rule
\param[in] x - location at which to obtain the hessian of log posterior
\param[in] args - BayesRule structure holding likelihood and prior
\return out - hessian of log posterior
***************************************************************/
/* double * bayes_rule_log_hessian(double * x, void * args){ */
/* struct BayesRule * br = args; */
/* size_t dim = br->like->inputdim; */
/* double * hessp = probability_density_log_hessian_eval(br->prior,x); */
/* struct FT1DArray * hessl = function_train_hessian(br->like->like); */
/* double * hessl_eval = ft1d_array_eval(hessl,x); */
/* double * out = calloc_double(dim*dim); */
/* size_t ii,jj; */
/* if (br->like->loglike == 0){ */
/* double likeval = function_train_eval(br->like->like,x); */
/* double likeval_squared = pow(likeval,2); */
/* struct FT1DArray * gradl = function_train_gradient(br->like->like); */
/* double * gradl_eval = ft1d_array_eval(gradl,x); */
/* for (ii = 0; ii < dim; ii++){ */
/* for (jj = 0; jj < dim; jj++){ */
/* out[ii*dim+jj] = */
/* -1.0/likeval_squared * gradl_eval[ii] * gradl_eval[jj] + */
/* 1.0/likeval * hessl_eval[ii*dim+jj] + hessp[ii*dim+jj]; */
/* } */
/* } */
/* ft1d_array_free(gradl); gradl = NULL; */
/* free(gradl_eval); gradl_eval = NULL; */
/* } */
/* else{ */
/* for (ii = 0; ii < dim; ii++){ */
/* for (jj = 0; jj < dim; jj++){ */
/* out[ii*dim+jj] = hessl_eval[ii*dim+jj] + hessp[ii*dim+jj]; */
/* } */
/* } */
/* } */
/* ft1d_array_free(hessl); hessl = NULL; */
/* free(hessl_eval); hessl_eval = NULL; */
/* free(hessp); hessp = NULL; */
/* return out; */
/* } */
/***********************************************************//**
Compute the hessian of the negative log posterior from bayes rule
\param x [in] - location at which to obtain the hessian of log posterior
\param args [in] - BayesRule structure holding likelihood and prior
\return out - hessian of negative log posterior
***************************************************************/
/* double * bayes_rule_log_hessian_negative(double * x, void * args){ */
/* struct BayesRule * br = args; */
/* size_t dim = br->like->inputdim; */
/* double * hess = bayes_rule_log_hessian(x,args); */
/* size_t ii,jj; */
/* for (ii=0; ii < dim; ii++){ */
/* for (jj=0; jj < dim; jj++){ */
/* hess[ii*dim+jj] *= -1.0; */
/* } */
/* } */
/* return hess; */
/* } */
/***********************************************************//**
Compute the Laplace approximation to Bayes rule
\param[in] br - Bayes Rule structure holding likelihood and prior
\return posterior - posterior distribution
***************************************************************/
/* struct ProbabilityDensity * bayes_rule_laplace(struct BayesRule * br) */
/* { */
/* double * start = probability_density_mean(br->prior); */
/* // size_t dim = br->like->inputdim; */
/* struct ProbabilityDensity * pdf = NULL; */
/* /\* struct ProbabilityDensity * pdf = *\/ */
/* /\* probability_density_laplace(bayes_rule_log_gradient_negative, *\/ */
/* /\* bayes_rule_log_hessian_negative, *\/ */
/* /\* br, dim, start); *\/ */
/* free(start); start = NULL; */
/* return pdf; */
/* } */
struct BrTrans
{
struct BayesRule * br;
struct LinearTransform * lt;
double mult;
};
double bayes_rule_evaluate(double * x, void * arg)
{
struct BrTrans * brt = arg;
//printf("x in = \n");
//dprint(brt->lt->dimin, x);
double * xtemp = linear_transform_apply(brt->lt->dimout,
brt->lt->dimin, brt->lt->A, x, brt->lt->b);
/* printf("xtemp = \n"); */
/* dprint(brt->lt->dimin, xtemp); */
double prior_val = probability_density_eval(brt->br->prior,xtemp);
double like_val = function_train_eval(brt->br->like->like,xtemp);
//printf("likeval first = %G\n",like_val);
if (brt->br->like->loglike == 1){
like_val = exp(like_val + brt->br->like->logextra);
}
/* printf("priorval = %G\n",prior_val); */
/* printf("likeval = %G\n",like_val); */
/* printf("brt->mult = %G\n",brt->mult); */
free(xtemp); xtemp = NULL;
double out = prior_val * like_val * brt->mult;
/* printf("out = %G\n",out); */
return out;
}
double bayes_rule_log(double * x, void * arg)
{
struct BrTrans * brt = arg;
//printf("x in = \n");
//dprint(brt->lt->dimin, x);
double * xtemp = linear_transform_apply(brt->lt->dimout,
brt->lt->dimin, brt->lt->A, x, brt->lt->b);
//printf("xtemp = \n");
//dprint(brt->lt->dimin, xtemp);
double prior_val = log(probability_density_eval(brt->br->prior,xtemp));
double like_val = function_train_eval(brt->br->like->like,xtemp)
+ brt->br->like->logextra;
//printf("priorval = %G\n",prior_val);
//printf("likeval = %G\n",like_val);
free(xtemp); xtemp = NULL;
double out = 1.0/((prior_val + like_val)/800);
//printf("out = %G\n",out);
return out;
}
/***********************************************************//**
Compute the posterior from Bayes Rule
\param br [in] - BayesRule structure holding likelihood and prior
\return posterior - posterior distribution
***************************************************************/
/* struct ProbabilityDensity * bayes_rule_compute(struct BayesRule * br) */
/* { */
/* size_t dim = br->like->inputdim; */
/* // laplace approximation */
/* struct ProbabilityDensity * lap = bayes_rule_laplace(br); */
/* // Initialize transform with laplace approximation */
/* struct ProbabilityDensity * posterior = probability_density_alloc(); */
/* posterior->type = GENERAL; */
/* posterior->transform = 1; */
/* posterior->lt = linear_transform_alloc(dim,dim,NULL,NULL,0.0); */
/* memmove(posterior->lt->b, lap->lt->b, sizeof(double) * dim); */
/* memmove(posterior->lt->A, lap->lt->A, dim*dim*sizeof(double)); */
/* posterior->lt->mt = lap->lt->mt; */
/* /\* printf("mean = \n"); *\/ */
/* /\* dprint(dim,posterior->lt->b); *\/ */
/* struct BrTrans brt; */
/* brt.br = br; */
/* brt.lt = posterior->lt; */
/* brt.mult = 1.0; */
/* //double normalize; */
/* double prior_val = log(probability_density_eval(br->prior,lap->lt->b)); */
/* double like_val = function_train_eval(br->like->like,lap->lt->b); */
/* if (br->like->loglike == 0){ */
/* like_val = log(like_val); */
/* } */
/* /\* printf("log prior at map = %G\n",prior_val); *\/ */
/* /\* printf("log likelihood at map = %G\n",like_val); *\/ */
/* br->like->logextra = -(like_val+prior_val); */
/* /\* printf("log extra = %G\n",br->like->logextra); *\/ */
/* double val_at_map = bayes_rule_evaluate(lap->lt->b,&brt); */
/* /\* printf("the value at the map is %G\n",val_at_map); *\/ */
/* brt.mult = 0.5/val_at_map; */
/* val_at_map = bayes_rule_evaluate(lap->lt->b,&brt); */
/* /\* printf("the value at the map 2 is %G\n",val_at_map); *\/ */
/* /\* printf("BAYESRUL IS BROKE!!!\n"); *\/ */
/* /\* exit(1); *\/ */
/* // */
/* struct BoundingBox * bds = bounding_box_init(dim,-6.0,6.0); */
/* double hmin = 1e-2; */
/* double delta = 1e-5; */
/* size_t init_rank = 3; */
/* double round_tol = 1e-3; */
/* double cross_tol = 1e-8; */
/* struct C3Approx * c3a = c3approx_create(CROSS,dim,bds->lb,bds->ub); */
/* c3approx_init_lin_elem(c3a); */
/* c3approx_set_lin_elem_delta(c3a,delta); */
/* c3approx_set_lin_elem_hmin(c3a,hmin); */
/* c3approx_init_cross(c3a,init_rank,0); */
/* c3approx_set_round_tol(c3a,round_tol); */
/* c3approx_set_cross_tol(c3a,cross_tol); */
/* /\* struct FunctionTrain * roughpdf = c3approx_do_cross(c3a,bayes_rule_log,&brt); *\/ */
/* /\* double * temp = calloc_double(dim); *\/ */
/* /\* double postval = function_train_eval(roughpdf,temp); *\/ */
/* /\* double postval_check = bayes_rule_log(temp,&brt); *\/ */
/* /\* free(temp); temp = NULL; *\/ */
/* /\* printf("log post at map = %G, %G\n", 1.0/postval,1.0/postval_check); *\/ */
/* /\* double normalize = function_train_integrate(roughpdf); *\/ */
/* /\* printf("normalizing constant is %G \n", normalize); *\/ */
/* /\* printf("integral of log %G \n", normalize); *\/ */
/* /\* brt.mult = fabs(1.0/normalize); *\/ */
/* /\* function_train_free(roughpdf); roughpdf = NULL; *\/ */
/* /\* exit(1); *\/ */
/* /\* posterior->pdf = *\/ */
/* /\* function_train_cross(bayes_rule_evaluate, &brt, bds, NULL,NULL,NULL); *\/ */
/* posterior->pdf = c3approx_do_cross(c3a,bayes_rule_evaluate,&brt); */
/* double normalize = function_train_integrate(posterior->pdf); */
/* /\* printf("second normalizing constant is %G \n", normalize); *\/ */
/* function_train_scale(posterior->pdf,1.0/normalize); */
/* // exit(1); */
/* bounding_box_free(bds); bds = NULL; */
/* probability_density_free(lap); lap = NULL; */
/* return posterior; */
/* } */
|
Gaboso/java-design-patterns | src/main/java/com/github/gaboso/behavior/interpreter/expression/TerminalExpression.java | package com.github.gaboso.behavior.interpreter.expression;
import java.util.StringTokenizer;
public class TerminalExpression implements Expression {
private final String data;
public TerminalExpression(String data) {
this.data = data;
}
@Override
public boolean interpret(String context) {
var tokenizer = new StringTokenizer(context);
while (tokenizer.hasMoreTokens()) {
String test = tokenizer.nextToken();
if (test.equals(data)) {
return true;
}
}
return false;
}
}
|
testarOpenshift/redmineigen | lib/redmine/search.rb | # Redmine - project management software
# Copyright (C) 2006-2014 <NAME>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
module Redmine
module Search
mattr_accessor :available_search_types
@@available_search_types = []
class << self
def map(&block)
yield self
end
# Registers a search provider
def register(search_type, options={})
search_type = search_type.to_s
@@available_search_types << search_type unless @@available_search_types.include?(search_type)
end
end
module Controller
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
@@default_search_scopes = Hash.new {|hash, key| hash[key] = {:default => nil, :actions => {}}}
mattr_accessor :default_search_scopes
# Set the default search scope for a controller or specific actions
# Examples:
# * search_scope :issues # => sets the search scope to :issues for the whole controller
# * search_scope :issues, :only => :index
# * search_scope :issues, :only => [:index, :show]
def default_search_scope(id, options = {})
if actions = options[:only]
actions = [] << actions unless actions.is_a?(Array)
actions.each {|a| default_search_scopes[controller_name.to_sym][:actions][a.to_sym] = id.to_s}
else
default_search_scopes[controller_name.to_sym][:default] = id.to_s
end
end
end
def default_search_scopes
self.class.default_search_scopes
end
# Returns the default search scope according to the current action
def default_search_scope
@default_search_scope ||= default_search_scopes[controller_name.to_sym][:actions][action_name.to_sym] ||
default_search_scopes[controller_name.to_sym][:default]
end
end
end
end
|
GregEakin/NutrishSr28 | sr28/src/test/java/dev/eakin/dao/entities/FoodGroupTests.java | <gh_stars>1-10
/*
* Copyright (c) 2019. <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.eakin.dao.entities;
import dev.eakin.dao.utilities.NutrishRepositoryExtension;
import org.hibernate.Session;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.function.Executable;
import static dev.eakin.dao.entities.FoodDescriptionTests.createFoodDescription;
import static org.junit.jupiter.api.Assertions.*;
@ExtendWith(NutrishRepositoryExtension.class)
public class FoodGroupTests {
private final Session session;
FoodGroupTests(Session session) {
this.session = session;
}
public static FoodGroup createFoodGroup() {
FoodGroup foodGroup = new FoodGroup();
foodGroup.setFdGrp_Cd("0000");
return foodGroup;
}
@Test
public void addNullFoodDescriptionTest() {
FoodGroup foodGroup = createFoodGroup();
Executable closureContainingCodeToTest = () -> foodGroup.addFoodDescriptionSet(null);
assertThrows(IllegalArgumentException.class, closureContainingCodeToTest, "null FoodDescription");
}
@Test
public void addFoodDescriptionTest() {
FoodGroup foodGroup = createFoodGroup();
FoodDescription foodDescription = createFoodDescription();
foodGroup.addFoodDescriptionSet(foodDescription);
assertSame(foodGroup, foodDescription.getFoodGroup());
assertTrue(foodGroup.getFoodDescriptionSet().contains(foodDescription));
}
}
|
zdenda/aosp_platform_frameworks_support | work/workmanager/src/main/java/androidx/work/impl/utils/LiveDataUtils.java | <gh_stars>1-10
/*
* Copyright 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.work.impl.utils;
import android.arch.core.util.Function;
import android.arch.lifecycle.LiveData;
import android.arch.lifecycle.MediatorLiveData;
import android.arch.lifecycle.Observer;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.annotation.RestrictTo;
import androidx.work.impl.utils.taskexecutor.TaskExecutor;
/**
* Utility methods for {@link LiveData}.
*
* @hide
*/
@RestrictTo(RestrictTo.Scope.LIBRARY_GROUP)
public class LiveDataUtils {
/**
* Creates a new {@link LiveData} object that maps the values of {@code inputLiveData} using
* {@code mappingMethod} on a background thread, but only triggers its observers when the mapped
* values actually change.
*
* @param inputLiveData An input {@link LiveData}
* @param mappingMethod A {@link Function} that maps input of type {@code In} to output of type
* {@code Out}
* @param workTaskExecutor The {@link TaskExecutor} that will run this operation on a background
* thread
* @param <In> The type of data for {@code inputLiveData}
* @param <Out> The type of data to output
* @return A new {@link LiveData} of type {@code Out}
*/
public static <In, Out> LiveData<Out> dedupedMappedLiveDataFor(
@NonNull LiveData<In> inputLiveData,
@NonNull final Function<In, Out> mappingMethod,
@NonNull final TaskExecutor workTaskExecutor) {
final MediatorLiveData<Out> outputLiveData = new MediatorLiveData<>();
outputLiveData.addSource(inputLiveData, new Observer<In>() {
@Override
public void onChanged(@Nullable final In input) {
final Out previousOutput = outputLiveData.getValue();
workTaskExecutor.executeOnBackgroundThread(new Runnable() {
@Override
public void run() {
synchronized (outputLiveData) {
Out newOutput = mappingMethod.apply(input);
if (previousOutput == null && newOutput != null) {
outputLiveData.postValue(newOutput);
} else if (
previousOutput != null && !previousOutput.equals(newOutput)) {
outputLiveData.postValue(newOutput);
}
}
}
});
}
});
return outputLiveData;
}
private LiveDataUtils() {
}
}
|
GenaGeng/ObtainTrace | mcr-test/src/main/java/edu/tamu/aser/tests/simple/SimpleRWW.java | <reponame>GenaGeng/ObtainTrace
package edu.tamu.aser.tests.simple;
import edu.tamu.aser.reex.JUnit4MCRRunner;
import org.junit.Test;
import org.junit.runner.RunWith;
/**
* @author Gena
* @description
* @date 2020/3/26 0026
*/
@RunWith(JUnit4MCRRunner.class)
public class SimpleRWW {
static int x;
static void readAndWrite(){
int y = 2;
y = y + 10;
x = x + 3;
}
static void write(){
x = 5;
}
@Test
public void testcase(){
x=1;
Thread t1 = new Thread(new Runnable() {
@Override
public void run() {
readAndWrite();
}
});
Thread t2 = new Thread(new Runnable() {
@Override
public void run() {
write();
}
});
t1.start();
t2.start();
}
}
|
lechium/tvOS135Headers | System/Library/PrivateFrameworks/CoreDuet.framework/_CDInteractionAdviceEngine.h | <filename>System/Library/PrivateFrameworks/CoreDuet.framework/_CDInteractionAdviceEngine.h<gh_stars>1-10
/*
* This header is generated by classdump-dyld 1.0
* on Sunday, June 7, 2020 at 11:15:44 AM Mountain Standard Time
* Operating System: Version 13.4.5 (Build 17L562)
* Image Source: /System/Library/PrivateFrameworks/CoreDuet.framework/CoreDuet
* classdump-dyld is licensed under GPLv3, Copyright ยฉ 2013-2016 by <NAME>.
*/
#import <libobjc.A.dylib/_CDInteractionAdvising.h>
@class _CDInteractionStore, _CDTemporalInteractionAdvisor, _CDSocialInteractionAdvisor, _CDQueryInteractionAdvisor;
@interface _CDInteractionAdviceEngine : NSObject <_CDInteractionAdvising> {
_CDInteractionStore* _store;
_CDTemporalInteractionAdvisor* _temporalAdvisor;
_CDSocialInteractionAdvisor* _socialAdvisor;
_CDQueryInteractionAdvisor* _queryAdvisor;
}
@property (nonatomic,readonly) _CDSocialInteractionAdvisor * socialAdvisor;
@property (nonatomic,readonly) _CDTemporalInteractionAdvisor * temporalAdvisor;
+(id)interactionAdviceEngineWithStore:(id)arg1 ;
-(id)initWithStore:(id)arg1 ;
-(id)rankCandidateContacts:(id)arg1 usingSettings:(id)arg2 ;
-(id)adviseInteractionsUsingSettings:(id)arg1 ;
-(id)adviseInteractionsForDate:(id)arg1 usingSettings:(id)arg2 ;
-(id)adviseInteractionsForLocation:(id)arg1 usingSettings:(id)arg2 ;
-(id)adviseSocialInteractionsForDate:(id)arg1 andSeedContacts:(id)arg2 usingSettings:(id)arg3 ;
-(id)adviseInteractionsForKeywordsInString:(id)arg1 usingSettings:(id)arg2 ;
-(void)tuneSocialAdvisorUsingSettings:(id)arg1 heartBeatHandler:(id)arg2 ;
-(_CDSocialInteractionAdvisor *)socialAdvisor;
-(_CDTemporalInteractionAdvisor *)temporalAdvisor;
@end
|
TorinAsakura/cooking | povary/apps/gallery/urls.py | <reponame>TorinAsakura/cooking
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'povary.views.home', name='home'),
# url(r'^povary/', include('povary.foo.urls')),
url(r'^recipe_gallery/(?P<recipe_slug>.*)/$',
'gallery.views.recipe_gallery_upload',
name='recipe_gallery_upload'
),
# url(r'^$', 'recipes.views.recipe_list', name='recipe_list'),
# url(r'^(?P<recipe_slug>.*)/$', 'recipes.views.recipe_details', name='recipe_details'),
)
|
osgcc/descent-mac | bios/gtimer.h | /*
THE COMPUTER CODE CONTAINED HEREIN IS THE SOLE PROPERTY OF PARALLAX
SOFTWARE CORPORATION ("PARALLAX"). PARALLAX, IN DISTRIBUTING THE CODE TO
END-USERS, AND SUBJECT TO ALL OF THE TERMS AND CONDITIONS HEREIN, GRANTS A
ROYALTY-FREE, PERPETUAL LICENSE TO SUCH END-USERS FOR USE BY SUCH END-USERS
IN USING, DISPLAYING, AND CREATING DERIVATIVE WORKS THEREOF, SO LONG AS
SUCH USE, DISPLAY OR CREATION IS FOR NON-COMMERCIAL, ROYALTY OR REVENUE
FREE PURPOSES. IN NO EVENT SHALL THE END-USER USE THE COMPUTER CODE
CONTAINED HEREIN FOR REVENUE-BEARING PURPOSES. THE END-USER UNDERSTANDS
AND AGREES TO THE TERMS HEREIN AND ACCEPTS THE SAME BY USE OF THIS FILE.
COPYRIGHT 1993-1998 PARALLAX SOFTWARE CORPORATION. ALL RIGHTS RESERVED.
*/
/*
* $Source: Smoke:miner:source:bios::RCS:gtimer.h $
* $Revision: 1.2 $
* $Author: allender $
* $Date: 1995/05/11 13:04:25 $
*
* Header for timer functions
*
* $Log: gtimer.h $
* Revision 1.2 1995/05/11 13:04:25 allender
* no a5 references if under powerPC code
*
* Revision 1.1 1995/05/05 09:31:09 allender
* Initial revision
*
* Revision 1.1 1995/03/21 13:32:43 allender
* Initial revision
*
*
* --- PC RCS information ---
* Revision 1.5 1994/11/15 12:04:15 john
* Cleaned up timer code a bit... took out unused functions
* like timer_get_milliseconds, etc.
*
* Revision 1.4 1994/04/28 23:50:08 john
* Changed calling for init_timer. Made the function that the
* timer calls be a far function. All of this was done to make
* our timer system compatible with the HMI sound stuff.
*
* Revision 1.3 1994/02/17 15:57:12 john
* Changed key libary to C.
*
* Revision 1.2 1994/01/18 10:58:34 john
* Added timer_get_fixed_seconds
*
* Revision 1.1 1993/07/10 13:10:41 matt
* Initial revision
*
*
*/
#include "fix.h"
typedef struct timer_info
{
TMTask timer_task;
#ifndef __powerc
ulong current_a5;
#endif
} timer_info;
//==========================================================================
// This installs the timer services and interrupts at the rate specified by
// count_val. If 'function' isn't 0, the function pointed to by function will
// be called 'freq' times per second. Should be > 19 and anything around
// 2-3000 is gonna start slowing down the system. Count_val should be
// 1,193,180 divided by your target frequency. Use 0 for the normal 18.2 Hz
// interrupt rate.
#define TIMER_FREQUENCY 1193180
extern void timer_init();
extern void timer_close();
extern void timer_set_rate(int count_val);
extern void timer_set_function( void *function );
extern void delay(int d_time);
//==========================================================================
// These functions return the time since the timer was initialized in
// some various units. The total length of reading time varies for each
// one. They will roll around after they read 2^32.
// There are milliseconds, milliseconds times 10, milliseconds times 100,
// and microseconds. They time out after 1000 hrs, 100 hrs, 10 hrs, and
// 1 hr, respectively.
extern fix timer_get_fixed_seconds(); // Rolls about every 9 hours...
extern fix timer_get_fixed_secondsX(); // Assume interrupts already disabled
//NOT_USED extern unsigned int timer_get_microseconds();
//NOT_USED extern unsigned int timer_get_milliseconds100();
//NOT_USED extern unsigned int timer_get_milliseconds10();
//NOT_USED extern unsigned int timer_get_milliseconds();
//NOT_USED extern unsigned int timer_get_millisecondsX(); // Assume interrupts disabled
//==========================================================================
// Use to access the BIOS ticker... ie... i = TICKER
#define TICKER (*(volatile int *)0x46C)
#define USECS_PER_READING( start, stop, frames ) (((stop-start)*54945)/frames)
|
juanfelipe82193/opensap | sapui5-sdk-1.74.0/resources/sap/ovp/app/TemplateBaseExtension-dbg.js | sap.ui.define([
"sap/ui/core/mvc/ControllerExtension",
"sap/ui/core/mvc/OverrideExecution"
], function (
ControllerExtension,
OverrideExecution
) {
"use strict";
return ControllerExtension.extend("sap.ovp.app.TemplateBaseExtension", {
metadata: {
methods: {
provideExtensionAppStateData: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
},
restoreExtensionAppStateData: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
},
addFilters: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
},
provideStartupExtension: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
},
provideExtensionNavigation: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
},
provideCustomActionPress: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
},
provideCustomParameter: {
"public": true,
"final": false,
overrideExecution: OverrideExecution.After
}
}
},
provideExtensionAppStateData: function (fnSetAppStateData) {
},
restoreExtensionAppStateData: function (fnGetAppStateData) {
},
// allows extension to add filters. They will be combined via AND with all other filters
// For each filter the extension must call fnAddFilter(oControllerExtension, oFilter)
// oControllerExtension must be the ControllerExtension instance which adds the filter
// oFilter must be an instance of sap.ui.model.Filter
addFilters: function(fnAddFilter){},
//allow extension to modify iappStateData
provideStartupExtension: function(oCustomSelectionVariant){
},
//allow extension to do custom navigation
provideExtensionNavigation: function (sCardId, oContext, oNavigationEntry) {
},
//allow extension to add press event for custom actions added in the stack card
provideCustomActionPress: function (sCustomAction) {
},
//allow extension to add custom parameters for navigation
provideCustomParameter: function (sCustomParams) {
}
});
});
|
nickoliasxii/Class-Activities | Week-19/107-Ins_ArrowExample/Solved/arrows-lexical-this.js | function Person () {
this.age = 0;
setInterval(() => {
this.age++; // |this| is parent's context - properly refers to the person object
}, 1000);
}
// ---------- OLD Methods (no arrow function)
// compare to old methods, where new functions built their own scope
function Person() {
var that = this;
that.age = 0;
setInterval(function () {
// have to store |this| to a new variable at the higher scope
that.age++;
}, 1000);
}
// or other old method:
function Person() {
this.age = 0;
setInterval(function () {
this.age++;
// bind this function's 'this' value to current scope's 'this'
}.bind(this), 1000);
}
|
Caio-Moretti/115.Exercicios-Python | PythonExercicios/ex088.py | from random import randint
from time import sleep
total = 1
print('==' * 20)
print('MEGA SENA')
print('==' * 20)
quant = int(input(f'Quantos jogos vocรช quer jogar? '))
lista = []
jogos = []
cont = 0
while total <= quant:
cont = 0
while True:
num = randint(1, 60)
if num not in lista:
lista.append(num)
cont += 1
if cont >= 6:
break
lista.sort()
jogos.append(lista[:])
lista.clear()
total += 1
print('==' * 20)
print(f'Sorteando {quant} Jogos')
print('==' * 20)
for i, l in enumerate(jogos):
print(f'Jogo {i + 1}: {l}')
sleep(1)
|
Spiritdude/zencad | utest/ops1d2d_test.py | import unittest
import zencad
class Ops1d2dProbe(unittest.TestCase):
def setUp(self):
zencad.lazy.encache = False
zencad.lazy.decache = False
zencad.lazy.fastdo = True
def test_fill(self):
zencad.fill(zencad.circle(5, wire=True)).unlazy()
zencad.circle(5, wire=True).fill().unlazy()
def test_interpolate(self):
zencad.interpolate([(0, 0, 0), (1, 1, 0), (1, 1, 1)])
zencad.interpolate(pnts=[(0, 0, 0), (1, 1, 0), (1, 1, 1)], closed=True)
zencad.interpolate(
[(0, 0, 0), (1, 1, 0), (1, 1, 1)], [
(0, 0, 0), (1, 1, 0), (1, 1, 1)]
)
zencad.interpolate(
pnts=[(0, 0, 0), (1, 1, 0), (1, 1, 1)],
tangs=[(0, 0, 0), (1, 0, 0), (0, 0, 1)],
closed=True,
)
def test_sew(self):
pnts = [(0, 0, 0), (1, 1, 1), (1, 0, 0)]
zencad.sew(
[
zencad.segment(pnts[0], pnts[1]),
zencad.segment(pnts[1], pnts[2]),
zencad.segment(pnts[2], pnts[0]),
]
)
def test_fillet2d(self):
zencad.square(20).fillet2d(1)
zencad.square(20).fillet2d(r=1)
zencad.fillet2d(shp=zencad.square(20), r=1)
zencad.square(20).fillet2d(1, [(0, 0, 0)])
zencad.square(20).fillet2d(refs=[(0, 0, 0)], r=1)
zencad.fillet2d(shp=zencad.square(20), refs=[(0, 0, 0)], r=1)
def test_chamfer2d(self):
# not supported
pass
|
rickkas7/AB1805_RK | docs/html/search/all_d.js | <reponame>rickkas7/AB1805_RK<gh_stars>1-10
var searchData=
[
['timeset_234',['timeSet',['../class_a_b1805.html#a65393e3d43980d222b7942cec25f57b8',1,'AB1805']]],
['tmtoregisters_235',['tmToRegisters',['../class_a_b1805.html#a6673da5d88733e6457f161c9a17f2997',1,'AB1805']]],
['tmtostring_236',['tmToString',['../class_a_b1805.html#ae0d37f7e9b4bed655065e66cc0fe9bd1',1,'AB1805']]]
];
|
peniakoff/commercetools-sync-java | src/test/java/com/commercetools/sync/products/helpers/productreferenceresolver/ProductTypeReferenceResolverTest.java | <filename>src/test/java/com/commercetools/sync/products/helpers/productreferenceresolver/ProductTypeReferenceResolverTest.java
package com.commercetools.sync.products.helpers.productreferenceresolver;
import static com.commercetools.sync.commons.MockUtils.getMockTypeService;
import static com.commercetools.sync.commons.helpers.BaseReferenceResolver.BLANK_KEY_VALUE_ON_RESOURCE_IDENTIFIER;
import static com.commercetools.sync.inventories.InventorySyncMockUtils.getMockChannelService;
import static com.commercetools.sync.inventories.InventorySyncMockUtils.getMockSupplyChannel;
import static com.commercetools.sync.products.ProductSyncMockUtils.getBuilderWithProductTypeRefKey;
import static com.commercetools.sync.products.ProductSyncMockUtils.getBuilderWithRandomProductType;
import static com.commercetools.sync.products.ProductSyncMockUtils.getMockCustomObjectService;
import static com.commercetools.sync.products.ProductSyncMockUtils.getMockCustomerService;
import static com.commercetools.sync.products.ProductSyncMockUtils.getMockProductService;
import static com.commercetools.sync.products.ProductSyncMockUtils.getMockProductTypeService;
import static com.commercetools.sync.products.ProductSyncMockUtils.getMockStateService;
import static com.commercetools.sync.products.ProductSyncMockUtils.getMockTaxCategoryService;
import static com.commercetools.sync.products.helpers.ProductReferenceResolver.FAILED_TO_RESOLVE_REFERENCE;
import static com.commercetools.sync.products.helpers.ProductReferenceResolver.PRODUCT_TYPE_DOES_NOT_EXIST;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import com.commercetools.sync.commons.exceptions.ReferenceResolutionException;
import com.commercetools.sync.products.ProductSyncOptions;
import com.commercetools.sync.products.ProductSyncOptionsBuilder;
import com.commercetools.sync.products.helpers.ProductReferenceResolver;
import com.commercetools.sync.services.CategoryService;
import com.commercetools.sync.services.CustomerGroupService;
import com.commercetools.sync.services.ProductTypeService;
import io.sphere.sdk.client.SphereClient;
import io.sphere.sdk.models.ResourceIdentifier;
import io.sphere.sdk.models.SphereException;
import io.sphere.sdk.products.ProductDraftBuilder;
import io.sphere.sdk.producttypes.ProductType;
import java.util.Objects;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
class ProductTypeReferenceResolverTest {
private static final String CHANNEL_KEY = "channel-key_1";
private static final String CHANNEL_ID = "1";
private static final String PRODUCT_TYPE_ID = "productTypeId";
private static final String TAX_CATEGORY_ID = "taxCategoryId";
private static final String STATE_ID = "stateId";
private static final String PRODUCT_ID = "productId";
private static final String CUSTOM_OBJECT_ID = "customObjectId";
private static final String CUSTOMER_ID = "customerId";
private ProductTypeService productTypeService;
private ProductReferenceResolver referenceResolver;
/** Sets up the services and the options needed for reference resolution. */
@BeforeEach
void setup() {
productTypeService = getMockProductTypeService(PRODUCT_TYPE_ID);
final ProductSyncOptions syncOptions =
ProductSyncOptionsBuilder.of(mock(SphereClient.class)).build();
referenceResolver =
new ProductReferenceResolver(
syncOptions,
productTypeService,
mock(CategoryService.class),
getMockTypeService(),
getMockChannelService(getMockSupplyChannel(CHANNEL_ID, CHANNEL_KEY)),
mock(CustomerGroupService.class),
getMockTaxCategoryService(TAX_CATEGORY_ID),
getMockStateService(STATE_ID),
getMockProductService(PRODUCT_ID),
getMockCustomObjectService(CUSTOM_OBJECT_ID),
getMockCustomerService(CUSTOMER_ID));
}
@Test
void resolveProductTypeReference_WithKeys_ShouldResolveReference() {
final ProductDraftBuilder productBuilder = getBuilderWithProductTypeRefKey("productTypeKey");
final ProductDraftBuilder resolvedDraft =
referenceResolver.resolveProductTypeReference(productBuilder).toCompletableFuture().join();
assertThat(resolvedDraft.getProductType()).isNotNull();
assertThat(resolvedDraft.getProductType().getId()).isEqualTo(PRODUCT_TYPE_ID);
}
@Test
void resolveProductTypeReference_WithNonExistentProductType_ShouldNotResolveReference() {
final ProductDraftBuilder productBuilder =
getBuilderWithProductTypeRefKey("anyKey").key("dummyKey");
when(productTypeService.fetchCachedProductTypeId(anyString()))
.thenReturn(CompletableFuture.completedFuture(Optional.empty()));
final String expectedMessageWithCause =
format(
FAILED_TO_RESOLVE_REFERENCE,
ProductType.resourceTypeId(),
"dummyKey",
format(PRODUCT_TYPE_DOES_NOT_EXIST, "anyKey"));
referenceResolver
.resolveProductTypeReference(productBuilder)
.exceptionally(
exception -> {
assertThat(exception).hasCauseExactlyInstanceOf(ReferenceResolutionException.class);
assertThat(exception.getCause().getMessage()).isEqualTo(expectedMessageWithCause);
return null;
})
.toCompletableFuture()
.join();
}
@Test
void resolveProductTypeReference_WithNullKeyOnProductTypeReference_ShouldNotResolveReference() {
final ProductDraftBuilder productBuilder =
getBuilderWithProductTypeRefKey(null).key("dummyKey");
assertThat(referenceResolver.resolveProductTypeReference(productBuilder).toCompletableFuture())
.hasFailed()
.hasFailedWithThrowableThat()
.isExactlyInstanceOf(ReferenceResolutionException.class)
.hasMessage(
format(
"Failed to resolve '%s' resource identifier on ProductDraft"
+ " with key:'%s'. Reason: %s",
ProductType.referenceTypeId(),
productBuilder.getKey(),
BLANK_KEY_VALUE_ON_RESOURCE_IDENTIFIER));
}
@Test
void resolveProductTypeReference_WithEmptyKeyOnProductTypeReference_ShouldNotResolveReference() {
final ProductDraftBuilder productBuilder = getBuilderWithProductTypeRefKey("").key("dummyKey");
assertThat(referenceResolver.resolveProductTypeReference(productBuilder).toCompletableFuture())
.hasFailed()
.hasFailedWithThrowableThat()
.isExactlyInstanceOf(ReferenceResolutionException.class)
.hasMessage(
format(
"Failed to resolve '%s' resource identifier on ProductDraft"
+ " with key:'%s'. Reason: %s",
ProductType.referenceTypeId(),
productBuilder.getKey(),
BLANK_KEY_VALUE_ON_RESOURCE_IDENTIFIER));
}
@Test
void resolveProductTypeReference_WithExceptionOnProductTypeFetch_ShouldNotResolveReference() {
final ProductDraftBuilder productBuilder =
getBuilderWithProductTypeRefKey("anyKey").key("dummyKey");
final CompletableFuture<Optional<String>> futureThrowingSphereException =
new CompletableFuture<>();
futureThrowingSphereException.completeExceptionally(new SphereException("CTP error on fetch"));
when(productTypeService.fetchCachedProductTypeId(anyString()))
.thenReturn(futureThrowingSphereException);
assertThat(referenceResolver.resolveProductTypeReference(productBuilder))
.hasFailedWithThrowableThat()
.isExactlyInstanceOf(SphereException.class)
.hasMessageContaining("CTP error on fetch");
}
@Test
void resolveProductTypeReference_WithIdOnProductTypeReference_ShouldNotResolveReference() {
final ProductDraftBuilder productBuilder =
getBuilderWithRandomProductType()
.productType(ResourceIdentifier.ofId("existing-id"))
.key("dummyKey");
assertThat(referenceResolver.resolveProductTypeReference(productBuilder).toCompletableFuture())
.hasNotFailed()
.isCompletedWithValueMatching(
resolvedDraft ->
Objects.equals(resolvedDraft.getProductType(), productBuilder.getProductType()));
}
}
|
RiftValleySoftware/open-source-docs | docs/baobab/search/variables_1.js | <filename>docs/baobab/search/variables_1.js
var searchData=
[
['_5f_5fandisol_5fversion_5f_5f_1214',['__ANDISOL_VERSION__',['../a00098.html#a2e865e0ec885a77cb02801d064f468ac',1,'co_andisol.class.php']]],
['_5f_5fbadger_5fversion_5f_5f_1215',['__BADGER_VERSION__',['../a00176.html#a3f5839931ef01a2516f08b8b4d45b9dc',1,'co_access.class.php']]],
['_5f_5fbasalt_5fversion_5f_5f_1216',['__BASALT_VERSION__',['../a00086.html#aad70326c8c3b91568adb74ad0c9f2d9b',1,'co_basalt.class.php']]],
['_5f_5fchameleon_5fversion_5f_5f_1217',['__CHAMELEON_VERSION__',['../a00140.html#aadc78b810840f1f13431192109c201bd',1,'co_chameleon.class.php']]],
['_5f_5fcobra_5fversion_5f_5f_1218',['__COBRA_VERSION__',['../a00107.html#a276eb68c30701b62d24301edd4092fee',1,'co_cobra.class.php']]],
['_5fplugin_5fname_5f_1219',['_PLUGIN_NAME_',['../a00086.html#afe4313db916476596ca3f10bd3ef175c',1,'co_basalt.class.php']]]
];
|
DDeAlmeida/near-wallet | packages/frontend/src/components/wallet/Sidebar.js | import React, { useState, useEffect } from 'react';
import styled from 'styled-components';
import CreateCustomName from './CreateCustomName';
import ExploreApps from './ExploreApps';
const StyledContainer = styled.div`
background-color: black;
border-radius: 8px;
padding-bottom: 30px;
margin-bottom: 40px;
.dots {
margin-top: -30px;
display: flex;
align-items: center;
justify-content: center;
.dot {
width: 8px;
height: 8px;
background-color: #D5D4D8;
border-radius: 50%;
margin: 0 5px;
cursor: pointer;
&.active {
cursor: default;
background-color: #8FCDFF;
}
}
}
`;
export default ({ availableAccounts }) => {
const [activeComponent, setActiveComponent] = useState('ExploreApps');
useEffect(() => {
if (availableAccounts.length > 0) {
const numNonImplicitAccounts = availableAccounts.filter(a => a.length < 64).length;
setActiveComponent(numNonImplicitAccounts === 0 ? 'CreateCustomName' : 'ExploreApps');
}
}, [availableAccounts]);
return (
<StyledContainer>
{activeComponent === 'ExploreApps' ? <ExploreApps /> : <CreateCustomName />}
<div className='dots'>
<div className={`dot ${activeComponent === 'CreateCustomName' ? 'active' : ''}`} onClick={() => setActiveComponent('CreateCustomName')}></div>
<div className={`dot ${activeComponent === 'ExploreApps' ? 'active' : ''}`} onClick={() => setActiveComponent('ExploreApps')}></div>
</div>
</StyledContainer>
);
}; |
wayshall/onetwo | core/modules/boot/src/main/java/org/onetwo/boot/core/web/socket/ConectionLogHandlerDecoratorFactory.java | <filename>core/modules/boot/src/main/java/org/onetwo/boot/core/web/socket/ConectionLogHandlerDecoratorFactory.java
package org.onetwo.boot.core.web.socket;
import org.onetwo.boot.core.web.socket.event.WebsocketClosedEvent;
import org.onetwo.boot.core.web.socket.event.WebsocketConnectedEvent;
import org.onetwo.common.log.JFishLoggerFactory;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.web.socket.CloseStatus;
import org.springframework.web.socket.WebSocketHandler;
import org.springframework.web.socket.WebSocketSession;
import org.springframework.web.socket.handler.WebSocketHandlerDecorator;
import org.springframework.web.socket.handler.WebSocketHandlerDecoratorFactory;
public class ConectionLogHandlerDecoratorFactory implements WebSocketHandlerDecoratorFactory {
@Autowired
private ApplicationContext applicationContext;
@Override
public WebSocketHandler decorate(WebSocketHandler handler) {
return new LogWebSocketHandlerDecorator(applicationContext, handler);
}
public static class LogWebSocketHandlerDecorator extends WebSocketHandlerDecorator {
private final Logger logger = JFishLoggerFactory.getLogger(getClass());
private ApplicationContext applicationContext;
public LogWebSocketHandlerDecorator(ApplicationContext applicationContext, WebSocketHandler delegate) {
super(delegate);
this.applicationContext = applicationContext;
}
@Override
public void afterConnectionEstablished(WebSocketSession session) throws Exception {
if (logger.isInfoEnabled()) {
logger.info("websocket ่ฟๆฅๅทฒ็ๅฌ, ็จๆท๏ผ{}, sid: {}", session.getPrincipal()==null?"null":session.getPrincipal().getName(), session.getId());
}
applicationContext.publishEvent(new WebsocketConnectedEvent(this, session));
super.afterConnectionEstablished(session);
}
@Override
public void afterConnectionClosed(WebSocketSession session, CloseStatus closeStatus) throws Exception {
if (logger.isInfoEnabled()) {
logger.info("websocket ่ฟๆฅๅทฒ็ๅฌ, ็จๆท๏ผ{}, sid: {}", session.getPrincipal()==null?"null":session.getPrincipal().getName(), session.getId());
}
applicationContext.publishEvent(new WebsocketClosedEvent(this, session, closeStatus));
super.afterConnectionClosed(session, closeStatus);
}
}
}
|
STMicroelectronics/fp-sns-flight1 | Drivers/BSP/Components/Common/idd.h | /**
******************************************************************************
* @file idd.h
* @author MCD Application Team
* @brief This file contains all the functions prototypes for the IDD driver.
******************************************************************************
* @attention
*
* Copyright (c) 2018 STMicroelectronics.
* All rights reserved.
*
* This software is licensed under terms that can be found in the LICENSE file
* in the root directory of this software component.
* If no LICENSE file comes with this software, it is provided AS-IS.
*
******************************************************************************
*/
/* Define to prevent recursive inclusion -------------------------------------*/
#ifndef IDD_H
#define IDD_H
#ifdef __cplusplus
extern "C" {
#endif
/* Includes ------------------------------------------------------------------*/
#include <stdint.h>
/** @addtogroup BSP
* @{
*/
/** @addtogroup Components
* @{
*/
/** @addtogroup IDD
* @{
*/
/** @defgroup IDD_Exported_Types IDD Exported Types
* @{
*/
/** @defgroup IDD_Driver_structure IDD Driver structure
* @{
*/
typedef struct
{
int32_t (*Init)(void *);
int32_t (*DeInit)(void *);
int32_t (*ReadID)(void *, uint32_t *);
int32_t (*Reset)(void *);
int32_t (*LowPower)(void *);
int32_t (*WakeUp)(void *);
int32_t (*Start)(void *);
int32_t (*Config)(void *, void *);
int32_t (*GetValue)(void *, uint32_t *);
int32_t (*EnableIT)(void *);
int32_t (*DisableIT)(void *);
int32_t (*ITStatus)(void *);
int32_t (*ClearIT)(void *);
int32_t (*ErrorEnableIT)(void *);
int32_t (*ErrorClearIT)(void *);
int32_t (*ErrorGetITStatus)(void *);
int32_t (*ErrorDisableIT)(void *);
int32_t (*ErrorGetSrc)(void *);
int32_t (*ErrorGetCode)(void *);
} IDD_Drv_t;
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
/**
* @}
*/
#ifdef __cplusplus
}
#endif
#endif /* IDD_H */
|
findhappyman/blockchain | brownie_fund_me/node_modules/zer/lib/factories.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.createChainCreator = createChainCreator;
var _lodash = require('lodash');
var _lodash2 = _interopRequireDefault(_lodash);
var _chain = require('./chain');
var _chainBuilder = require('./chain-builder');
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Given a rendering function and a syntax, returns a Proxy which can intercept
* any property and creates a Chain that starts with that property 'name'.
*/
function createChainCreator(render, syntax) {
// This Proxy initiates the chain, and must return a new Chain
const handler = {
get(createProxiedChainTarget, name) {
return createProxiedChainTarget(name, render, syntax, {});
}
};
return new Proxy(createProxiedChain, handler);
}
;
/**
* Given a 'name', the Proxy intercepts and return a new ChainBuilder.
* 'createChainBuilder' is a function that returns a Proxy<Function>.
*/
function createProxiedChain(chainName, render, syntax) {
return new Proxy(_chainBuilder.createChainBuilder, {
get(target, name) {
const chain = (0, _chain.createChain)().startWith(chainName);
const builder = target(chain, render, syntax)[name];
return builder;
},
apply(target, thisArg, args) {
const chain = (0, _chain.createChain)().startWith(chainName);
const builder = target(chain, render, syntax)(...args);
return builder;
}
});
} |
hixio-mh/citadel_sdk_2.1.1 | drivers/broadcom/usb/cv/cvmain.h | /******************************************************************************
*
* Copyright 2007
* Broadcom Corporation
* 16215 <NAME>
* PO Box 57013
* Irvine CA 92619-7013
*
*****************************************************************************/
/*
* Broadcom Corporation Credential Vault API
*/
/*
* cvmain.h: PHX2 CV main header file
*/
/*
* Revision History:
*
* 01/08/07 DPA Created.
*/
#ifndef _CVMAIN_H_
#define _CVMAIN_H_ 1
#include "string.h"
#include "stdio.h"
#include "cvapi.h"
#include "cvinternal.h"
#include "phx_scapi.h"
#include "phx_otp.h"
#ifdef USH_BOOTROM /*AAI */
#include "volatile_mem.h"
#include "open_volatile_mem.h"
#include "extintf.h"
#include "fp_upk_api.h"
#include "fp_at_api.h"
#include "DPFR_API.h"
#include "dpResults.h"
#include "usbclient.h"
#include "fmalloc.h"
#endif /* USH_BOOTROM */
#endif /* end _CVMAIN_H_ */
|
siretty/BrotBoxEngine | BrotBoxEngine/IcoSphere.cpp | #include "BBE/IcoSphere.h"
#include "BBE/VertexWithNormal.h"
#include "BBE/Math.h"
#include "BBE/List.h"
#include <string.h>
bbe::INTERNAL::vulkan::VulkanBuffer bbe::IcoSphere::s_indexBuffer;
bbe::INTERNAL::vulkan::VulkanBuffer bbe::IcoSphere::s_vertexBuffer;
uint32_t bbe::IcoSphere::amountOfVertices = 0;
uint32_t bbe::IcoSphere::amountOfIndices = 0;
uint32_t getHalfPointIndex(bbe::List<bbe::Vector3> &vertices, bbe::Vector3 &a, bbe::Vector3 &b)
{
bbe::Vector3 halfPoint = (a + b).normalize() / 2;
for (uint32_t i = 0; i < vertices.getLength(); i++)
{
if (halfPoint == vertices[i])
{
return i;
}
}
vertices.add(halfPoint);
return static_cast<uint32_t>(vertices.getLength() - 1);
}
void createIcoSphereMesh(bbe::List<uint32_t> &indices, bbe::List<bbe::INTERNAL::VertexWithNormal> &vertices, int iterations)
{
indices.clear();
vertices.clear();
float x = (1 + bbe::Math::sqrt(5)) / 4;
bbe::List<bbe::Vector3> simpleVertices = {
bbe::Vector3(-0.5, x, 0).normalize() / 2,
bbe::Vector3( 0.5, x, 0).normalize() / 2,
bbe::Vector3(-0.5, -x, 0).normalize() / 2,
bbe::Vector3( 0.5, -x, 0).normalize() / 2,
bbe::Vector3(0, -0.5, x).normalize() / 2,
bbe::Vector3(0, 0.5, x).normalize() / 2,
bbe::Vector3(0, -0.5, -x).normalize() / 2,
bbe::Vector3(0, 0.5, -x).normalize() / 2,
bbe::Vector3(x, 0, -0.5).normalize() / 2,
bbe::Vector3(x, 0, 0.5).normalize() / 2,
bbe::Vector3(-x, 0, -0.5).normalize() / 2,
bbe::Vector3(-x, 0, 0.5).normalize() / 2,
};
indices = {
5, 11, 0,
1, 5, 0,
7, 1, 0,
10, 7, 0,
11, 10, 0,
9, 5, 1,
4, 11, 5,
2, 10, 11,
6, 7, 10,
8, 1, 7,
4, 9, 3,
2, 4, 3,
6, 2, 3,
8, 6, 3,
9, 8, 3,
5, 9, 4,
11, 4, 2,
10, 2, 6,
7, 6, 8,
1, 8, 9,
};
for (int i = 0; i < iterations; i++)
{
bbe::List<uint32_t> newIndices;
for (size_t k = 0; k < indices.getLength(); k += 3)
{
uint32_t a = getHalfPointIndex(simpleVertices, simpleVertices[indices[k + 0]], simpleVertices[indices[k + 1]]);
uint32_t b = getHalfPointIndex(simpleVertices, simpleVertices[indices[k + 1]], simpleVertices[indices[k + 2]]);
uint32_t c = getHalfPointIndex(simpleVertices, simpleVertices[indices[k + 2]], simpleVertices[indices[k + 0]]);
if (iterations % 2 == 0)
{
newIndices.addAll(
c, a, indices[k + 0],
a, b, indices[k + 1],
b, c, indices[k + 2],
c, b, a
);
}
else
{
newIndices.addAll(
a, c, indices[k + 0],
b, a, indices[k + 1],
c, b, indices[k + 2],
b, c, a
);
}
}
indices = std::move(newIndices);
}
for (size_t i = 0; i < simpleVertices.getLength(); i++)
{
vertices.add(bbe::INTERNAL::VertexWithNormal(simpleVertices[i], simpleVertices[i]));
}
}
void bbe::IcoSphere::s_init(VkDevice device, VkPhysicalDevice physicalDevice, INTERNAL::vulkan::VulkanCommandPool & commandPool, VkQueue queue)
{
bbe::List<uint32_t> indices;
bbe::List<bbe::INTERNAL::VertexWithNormal> vertices;
createIcoSphereMesh(indices, vertices, 2);
amountOfIndices = static_cast<uint32_t>(indices.getLength());
s_indexBuffer.create(device, physicalDevice, sizeof(uint32_t) * amountOfIndices, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_INDEX_BUFFER_BIT);
void* dataBuf = s_indexBuffer.map();
memcpy(dataBuf, indices.getRaw(), sizeof(uint32_t) * amountOfIndices);
s_indexBuffer.unmap();
s_indexBuffer.upload(commandPool, queue);
amountOfVertices = static_cast<uint32_t>(vertices.getLength());
s_vertexBuffer.create(device, physicalDevice, sizeof(INTERNAL::VertexWithNormal) * amountOfVertices, VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_VERTEX_BUFFER_BIT);
dataBuf = s_vertexBuffer.map();
memcpy(dataBuf, vertices.getRaw(), sizeof(INTERNAL::VertexWithNormal) * amountOfVertices);
s_vertexBuffer.unmap();
s_vertexBuffer.upload(commandPool, queue);
}
void bbe::IcoSphere::s_destroy()
{
s_indexBuffer.destroy();
s_vertexBuffer.destroy();
}
bbe::IcoSphere::IcoSphere()
{
}
bbe::IcoSphere::IcoSphere(const Vector3 & pos, const Vector3 & scale, const Vector3 & rotationVector, float radians)
{
set(pos, scale, rotationVector, radians);
}
bbe::IcoSphere::IcoSphere(const Matrix4 & transform)
: m_transform(transform)
{
}
void bbe::IcoSphere::set(const Vector3 & pos, const Vector3 & scale, const Vector3 & rotationVector, float radians)
{
Matrix4 matTranslation = Matrix4::createTranslationMatrix(pos);
Matrix4 matScale = Matrix4::createScaleMatrix(scale);
Matrix4 matRotation = Matrix4::createRotationMatrix(radians, rotationVector);
m_transform = matTranslation * matRotation * matScale;
}
bbe::Vector3 bbe::IcoSphere::getPos() const
{
return m_transform.extractTranslation();
}
float bbe::IcoSphere::getX() const
{
return getPos().x;
}
float bbe::IcoSphere::getY() const
{
return getPos().y;
}
float bbe::IcoSphere::getZ() const
{
return getPos().z;
}
bbe::Vector3 bbe::IcoSphere::getScale() const
{
return m_transform.extractScale();
}
float bbe::IcoSphere::getWidth() const
{
return getScale().x;
}
float bbe::IcoSphere::getHeight() const
{
return getScale().z;
}
float bbe::IcoSphere::getDepth() const
{
return getScale().y;
}
bbe::Matrix4 bbe::IcoSphere::getTransform() const
{
return m_transform;
}
|
Duffney/azure-sdk-for-go | sdk/resourcemanager/securityinsights/armsecurityinsights/zz_generated_models_serde.go | <filename>sdk/resourcemanager/securityinsights/armsecurityinsights/zz_generated_models_serde.go
//go:build go1.18
// +build go1.18
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
package armsecurityinsights
import (
"encoding/json"
"github.com/Azure/azure-sdk-for-go/sdk/azcore"
"reflect"
)
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type AADCheckRequirements.
func (a *AADCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: a.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type AADCheckRequirements.
func (a AADCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindAzureActiveDirectory
populate(objectMap, "properties", a.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AADCheckRequirements.
func (a *AADCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type AADDataConnector.
func (a *AADDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type AADDataConnector.
func (a AADDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = DataConnectorKindAzureActiveDirectory
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AADDataConnector.
func (a *AADDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type AATPCheckRequirements.
func (a *AATPCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: a.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type AATPCheckRequirements.
func (a AATPCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindAzureAdvancedThreatProtection
populate(objectMap, "properties", a.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AATPCheckRequirements.
func (a *AATPCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type AATPDataConnector.
func (a *AATPDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type AATPDataConnector.
func (a AATPDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = DataConnectorKindAzureAdvancedThreatProtection
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AATPDataConnector.
func (a *AATPDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type ASCCheckRequirements.
func (a *ASCCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: a.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type ASCCheckRequirements.
func (a ASCCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindAzureSecurityCenter
populate(objectMap, "properties", a.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ASCCheckRequirements.
func (a *ASCCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type ASCDataConnector.
func (a *ASCDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ASCDataConnector.
func (a ASCDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = DataConnectorKindAzureSecurityCenter
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ASCDataConnector.
func (a *ASCDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type AccountEntity.
func (a *AccountEntity) GetEntity() *Entity {
return &Entity{
Kind: a.Kind,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type AccountEntity.
func (a AccountEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", a.ID)
objectMap["kind"] = EntityKindAccount
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AccountEntity.
func (a *AccountEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AccountEntityProperties.
func (a AccountEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "aadTenantId", a.AADTenantID)
populate(objectMap, "aadUserId", a.AADUserID)
populate(objectMap, "accountName", a.AccountName)
populate(objectMap, "additionalData", a.AdditionalData)
populate(objectMap, "dnsDomain", a.DNSDomain)
populate(objectMap, "displayName", a.DisplayName)
populate(objectMap, "friendlyName", a.FriendlyName)
populate(objectMap, "hostEntityId", a.HostEntityID)
populate(objectMap, "isDomainJoined", a.IsDomainJoined)
populate(objectMap, "ntDomain", a.NtDomain)
populate(objectMap, "objectGuid", a.ObjectGUID)
populate(objectMap, "puid", a.Puid)
populate(objectMap, "sid", a.Sid)
populate(objectMap, "upnSuffix", a.UpnSuffix)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ActionsList.
func (a ActionsList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", a.NextLink)
populate(objectMap, "value", a.Value)
return json.Marshal(objectMap)
}
// GetCustomEntityQuery implements the CustomEntityQueryClassification interface for type ActivityCustomEntityQuery.
func (a *ActivityCustomEntityQuery) GetCustomEntityQuery() *CustomEntityQuery {
return &CustomEntityQuery{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ActivityCustomEntityQuery.
func (a ActivityCustomEntityQuery) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = CustomEntityQueryKindActivity
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ActivityCustomEntityQuery.
func (a *ActivityCustomEntityQuery) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ActivityEntityQueriesProperties.
func (a ActivityEntityQueriesProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "content", a.Content)
populateTimeRFC3339(objectMap, "createdTimeUtc", a.CreatedTimeUTC)
populate(objectMap, "description", a.Description)
populate(objectMap, "enabled", a.Enabled)
populate(objectMap, "entitiesFilter", a.EntitiesFilter)
populate(objectMap, "inputEntityType", a.InputEntityType)
populateTimeRFC3339(objectMap, "lastModifiedTimeUtc", a.LastModifiedTimeUTC)
populate(objectMap, "queryDefinitions", a.QueryDefinitions)
populate(objectMap, "requiredInputFieldsSets", a.RequiredInputFieldsSets)
populate(objectMap, "templateName", a.TemplateName)
populate(objectMap, "title", a.Title)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ActivityEntityQueriesProperties.
func (a *ActivityEntityQueriesProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "content":
err = unpopulate(val, &a.Content)
delete(rawMsg, key)
case "createdTimeUtc":
err = unpopulateTimeRFC3339(val, &a.CreatedTimeUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &a.Description)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &a.Enabled)
delete(rawMsg, key)
case "entitiesFilter":
err = unpopulate(val, &a.EntitiesFilter)
delete(rawMsg, key)
case "inputEntityType":
err = unpopulate(val, &a.InputEntityType)
delete(rawMsg, key)
case "lastModifiedTimeUtc":
err = unpopulateTimeRFC3339(val, &a.LastModifiedTimeUTC)
delete(rawMsg, key)
case "queryDefinitions":
err = unpopulate(val, &a.QueryDefinitions)
delete(rawMsg, key)
case "requiredInputFieldsSets":
err = unpopulate(val, &a.RequiredInputFieldsSets)
delete(rawMsg, key)
case "templateName":
err = unpopulate(val, &a.TemplateName)
delete(rawMsg, key)
case "title":
err = unpopulate(val, &a.Title)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntityQuery implements the EntityQueryClassification interface for type ActivityEntityQuery.
func (a *ActivityEntityQuery) GetEntityQuery() *EntityQuery {
return &EntityQuery{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ActivityEntityQuery.
func (a ActivityEntityQuery) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = EntityQueryKindActivity
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ActivityEntityQuery.
func (a *ActivityEntityQuery) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntityQueryTemplate implements the EntityQueryTemplateClassification interface for type ActivityEntityQueryTemplate.
func (a *ActivityEntityQueryTemplate) GetEntityQueryTemplate() *EntityQueryTemplate {
return &EntityQueryTemplate{
Kind: a.Kind,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ActivityEntityQueryTemplate.
func (a ActivityEntityQueryTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", a.ID)
objectMap["kind"] = EntityQueryTemplateKindActivity
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ActivityEntityQueryTemplate.
func (a *ActivityEntityQueryTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ActivityEntityQueryTemplateProperties.
func (a ActivityEntityQueryTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "content", a.Content)
populate(objectMap, "dataTypes", a.DataTypes)
populate(objectMap, "description", a.Description)
populate(objectMap, "entitiesFilter", a.EntitiesFilter)
populate(objectMap, "inputEntityType", a.InputEntityType)
populate(objectMap, "queryDefinitions", a.QueryDefinitions)
populate(objectMap, "requiredInputFieldsSets", a.RequiredInputFieldsSets)
populate(objectMap, "title", a.Title)
return json.Marshal(objectMap)
}
// GetEntityTimelineItem implements the EntityTimelineItemClassification interface for type ActivityTimelineItem.
func (a *ActivityTimelineItem) GetEntityTimelineItem() *EntityTimelineItem {
return &EntityTimelineItem{
Kind: a.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type ActivityTimelineItem.
func (a ActivityTimelineItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "bucketEndTimeUTC", a.BucketEndTimeUTC)
populateTimeRFC3339(objectMap, "bucketStartTimeUTC", a.BucketStartTimeUTC)
populate(objectMap, "content", a.Content)
populateTimeRFC3339(objectMap, "firstActivityTimeUTC", a.FirstActivityTimeUTC)
objectMap["kind"] = EntityTimelineKindActivity
populateTimeRFC3339(objectMap, "lastActivityTimeUTC", a.LastActivityTimeUTC)
populate(objectMap, "queryId", a.QueryID)
populate(objectMap, "title", a.Title)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ActivityTimelineItem.
func (a *ActivityTimelineItem) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "bucketEndTimeUTC":
err = unpopulateTimeRFC3339(val, &a.BucketEndTimeUTC)
delete(rawMsg, key)
case "bucketStartTimeUTC":
err = unpopulateTimeRFC3339(val, &a.BucketStartTimeUTC)
delete(rawMsg, key)
case "content":
err = unpopulate(val, &a.Content)
delete(rawMsg, key)
case "firstActivityTimeUTC":
err = unpopulateTimeRFC3339(val, &a.FirstActivityTimeUTC)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "lastActivityTimeUTC":
err = unpopulateTimeRFC3339(val, &a.LastActivityTimeUTC)
delete(rawMsg, key)
case "queryId":
err = unpopulate(val, &a.QueryID)
delete(rawMsg, key)
case "title":
err = unpopulate(val, &a.Title)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRule implements the AlertRuleClassification interface for type AlertRule.
func (a *AlertRule) GetAlertRule() *AlertRule { return a }
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type AlertRuleTemplate.
func (a *AlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate { return a }
// MarshalJSON implements the json.Marshaller interface for type AlertRuleTemplateDataSource.
func (a AlertRuleTemplateDataSource) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "connectorId", a.ConnectorID)
populate(objectMap, "dataTypes", a.DataTypes)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type AlertRuleTemplatePropertiesBase.
func (a AlertRuleTemplatePropertiesBase) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRulesCreatedByTemplateCount", a.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", a.CreatedDateUTC)
populate(objectMap, "description", a.Description)
populate(objectMap, "displayName", a.DisplayName)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", a.LastUpdatedDateUTC)
populate(objectMap, "requiredDataConnectors", a.RequiredDataConnectors)
populate(objectMap, "status", a.Status)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AlertRuleTemplatePropertiesBase.
func (a *AlertRuleTemplatePropertiesBase) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &a.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &a.CreatedDateUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &a.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &a.DisplayName)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &a.LastUpdatedDateUTC)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &a.RequiredDataConnectors)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &a.Status)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AlertRuleTemplateWithMitreProperties.
func (a AlertRuleTemplateWithMitreProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRulesCreatedByTemplateCount", a.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", a.CreatedDateUTC)
populate(objectMap, "description", a.Description)
populate(objectMap, "displayName", a.DisplayName)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", a.LastUpdatedDateUTC)
populate(objectMap, "requiredDataConnectors", a.RequiredDataConnectors)
populate(objectMap, "status", a.Status)
populate(objectMap, "tactics", a.Tactics)
populate(objectMap, "techniques", a.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AlertRuleTemplateWithMitreProperties.
func (a *AlertRuleTemplateWithMitreProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &a.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &a.CreatedDateUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &a.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &a.DisplayName)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &a.LastUpdatedDateUTC)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &a.RequiredDataConnectors)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &a.Status)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &a.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &a.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AlertRuleTemplatesList.
func (a AlertRuleTemplatesList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", a.NextLink)
populate(objectMap, "value", a.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AlertRuleTemplatesList.
func (a *AlertRuleTemplatesList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &a.NextLink)
delete(rawMsg, key)
case "value":
a.Value, err = unmarshalAlertRuleTemplateClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AlertRulesList.
func (a AlertRulesList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", a.NextLink)
populate(objectMap, "value", a.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AlertRulesList.
func (a *AlertRulesList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &a.NextLink)
delete(rawMsg, key)
case "value":
a.Value, err = unmarshalAlertRuleClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetSettings implements the SettingsClassification interface for type Anomalies.
func (a *Anomalies) GetSettings() *Settings {
return &Settings{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type Anomalies.
func (a Anomalies) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = SettingKindAnomalies
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Anomalies.
func (a *Anomalies) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAutomationRuleAction implements the AutomationRuleActionClassification interface for type AutomationRuleAction.
func (a *AutomationRuleAction) GetAutomationRuleAction() *AutomationRuleAction { return a }
// GetAutomationRuleCondition implements the AutomationRuleConditionClassification interface for type AutomationRuleCondition.
func (a *AutomationRuleCondition) GetAutomationRuleCondition() *AutomationRuleCondition { return a }
// GetAutomationRuleAction implements the AutomationRuleActionClassification interface for type AutomationRuleModifyPropertiesAction.
func (a *AutomationRuleModifyPropertiesAction) GetAutomationRuleAction() *AutomationRuleAction {
return &AutomationRuleAction{
Order: a.Order,
ActionType: a.ActionType,
}
}
// MarshalJSON implements the json.Marshaller interface for type AutomationRuleModifyPropertiesAction.
func (a AutomationRuleModifyPropertiesAction) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "actionConfiguration", a.ActionConfiguration)
objectMap["actionType"] = ActionTypeModifyProperties
populate(objectMap, "order", a.Order)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AutomationRuleModifyPropertiesAction.
func (a *AutomationRuleModifyPropertiesAction) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "actionConfiguration":
err = unpopulate(val, &a.ActionConfiguration)
delete(rawMsg, key)
case "actionType":
err = unpopulate(val, &a.ActionType)
delete(rawMsg, key)
case "order":
err = unpopulate(val, &a.Order)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AutomationRuleProperties.
func (a AutomationRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "actions", a.Actions)
populate(objectMap, "createdBy", a.CreatedBy)
populateTimeRFC3339(objectMap, "createdTimeUtc", a.CreatedTimeUTC)
populate(objectMap, "displayName", a.DisplayName)
populate(objectMap, "lastModifiedBy", a.LastModifiedBy)
populateTimeRFC3339(objectMap, "lastModifiedTimeUtc", a.LastModifiedTimeUTC)
populate(objectMap, "order", a.Order)
populate(objectMap, "triggeringLogic", a.TriggeringLogic)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AutomationRuleProperties.
func (a *AutomationRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "actions":
a.Actions, err = unmarshalAutomationRuleActionClassificationArray(val)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &a.CreatedBy)
delete(rawMsg, key)
case "createdTimeUtc":
err = unpopulateTimeRFC3339(val, &a.CreatedTimeUTC)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &a.DisplayName)
delete(rawMsg, key)
case "lastModifiedBy":
err = unpopulate(val, &a.LastModifiedBy)
delete(rawMsg, key)
case "lastModifiedTimeUtc":
err = unpopulateTimeRFC3339(val, &a.LastModifiedTimeUTC)
delete(rawMsg, key)
case "order":
err = unpopulate(val, &a.Order)
delete(rawMsg, key)
case "triggeringLogic":
err = unpopulate(val, &a.TriggeringLogic)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AutomationRulePropertyValuesCondition.
func (a AutomationRulePropertyValuesCondition) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "operator", a.Operator)
populate(objectMap, "propertyName", a.PropertyName)
populate(objectMap, "propertyValues", a.PropertyValues)
return json.Marshal(objectMap)
}
// GetAutomationRuleAction implements the AutomationRuleActionClassification interface for type AutomationRuleRunPlaybookAction.
func (a *AutomationRuleRunPlaybookAction) GetAutomationRuleAction() *AutomationRuleAction {
return &AutomationRuleAction{
Order: a.Order,
ActionType: a.ActionType,
}
}
// MarshalJSON implements the json.Marshaller interface for type AutomationRuleRunPlaybookAction.
func (a AutomationRuleRunPlaybookAction) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "actionConfiguration", a.ActionConfiguration)
objectMap["actionType"] = ActionTypeRunPlaybook
populate(objectMap, "order", a.Order)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AutomationRuleRunPlaybookAction.
func (a *AutomationRuleRunPlaybookAction) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "actionConfiguration":
err = unpopulate(val, &a.ActionConfiguration)
delete(rawMsg, key)
case "actionType":
err = unpopulate(val, &a.ActionType)
delete(rawMsg, key)
case "order":
err = unpopulate(val, &a.Order)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AutomationRuleTriggeringLogic.
func (a AutomationRuleTriggeringLogic) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "conditions", a.Conditions)
populateTimeRFC3339(objectMap, "expirationTimeUtc", a.ExpirationTimeUTC)
populate(objectMap, "isEnabled", a.IsEnabled)
populate(objectMap, "triggersOn", a.TriggersOn)
populate(objectMap, "triggersWhen", a.TriggersWhen)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AutomationRuleTriggeringLogic.
func (a *AutomationRuleTriggeringLogic) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "conditions":
a.Conditions, err = unmarshalAutomationRuleConditionClassificationArray(val)
delete(rawMsg, key)
case "expirationTimeUtc":
err = unpopulateTimeRFC3339(val, &a.ExpirationTimeUTC)
delete(rawMsg, key)
case "isEnabled":
err = unpopulate(val, &a.IsEnabled)
delete(rawMsg, key)
case "triggersOn":
err = unpopulate(val, &a.TriggersOn)
delete(rawMsg, key)
case "triggersWhen":
err = unpopulate(val, &a.TriggersWhen)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AutomationRulesList.
func (a AutomationRulesList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", a.NextLink)
populate(objectMap, "value", a.Value)
return json.Marshal(objectMap)
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type AwsCloudTrailCheckRequirements.
func (a *AwsCloudTrailCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: a.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type AwsCloudTrailCheckRequirements.
func (a AwsCloudTrailCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindAmazonWebServicesCloudTrail
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AwsCloudTrailCheckRequirements.
func (a *AwsCloudTrailCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type AwsCloudTrailDataConnector.
func (a *AwsCloudTrailDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type AwsCloudTrailDataConnector.
func (a AwsCloudTrailDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = DataConnectorKindAmazonWebServicesCloudTrail
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AwsCloudTrailDataConnector.
func (a *AwsCloudTrailDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type AwsS3CheckRequirements.
func (a *AwsS3CheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: a.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type AwsS3CheckRequirements.
func (a AwsS3CheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindAmazonWebServicesS3
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AwsS3CheckRequirements.
func (a *AwsS3CheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type AwsS3DataConnector.
func (a *AwsS3DataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: a.Kind,
Etag: a.Etag,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type AwsS3DataConnector.
func (a AwsS3DataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", a.Etag)
populate(objectMap, "id", a.ID)
objectMap["kind"] = DataConnectorKindAmazonWebServicesS3
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AwsS3DataConnector.
func (a *AwsS3DataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &a.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AwsS3DataConnectorProperties.
func (a AwsS3DataConnectorProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "dataTypes", a.DataTypes)
populate(objectMap, "destinationTable", a.DestinationTable)
populate(objectMap, "roleArn", a.RoleArn)
populate(objectMap, "sqsUrls", a.SqsUrls)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type AzureResourceEntity.
func (a *AzureResourceEntity) GetEntity() *Entity {
return &Entity{
Kind: a.Kind,
ID: a.ID,
Name: a.Name,
Type: a.Type,
SystemData: a.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type AzureResourceEntity.
func (a AzureResourceEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", a.ID)
objectMap["kind"] = EntityKindAzureResource
populate(objectMap, "name", a.Name)
populate(objectMap, "properties", a.Properties)
populate(objectMap, "systemData", a.SystemData)
populate(objectMap, "type", a.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type AzureResourceEntity.
func (a *AzureResourceEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &a.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &a.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &a.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &a.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &a.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &a.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type AzureResourceEntityProperties.
func (a AzureResourceEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", a.AdditionalData)
populate(objectMap, "friendlyName", a.FriendlyName)
populate(objectMap, "resourceId", a.ResourceID)
populate(objectMap, "subscriptionId", a.SubscriptionID)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type BookmarkEntityMappings.
func (b BookmarkEntityMappings) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "entityType", b.EntityType)
populate(objectMap, "fieldMappings", b.FieldMappings)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type BookmarkExpandParameters.
func (b BookmarkExpandParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "endTime", b.EndTime)
populate(objectMap, "expansionId", b.ExpansionID)
populateTimeRFC3339(objectMap, "startTime", b.StartTime)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type BookmarkExpandParameters.
func (b *BookmarkExpandParameters) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "endTime":
err = unpopulateTimeRFC3339(val, &b.EndTime)
delete(rawMsg, key)
case "expansionId":
err = unpopulate(val, &b.ExpansionID)
delete(rawMsg, key)
case "startTime":
err = unpopulateTimeRFC3339(val, &b.StartTime)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type BookmarkExpandResponseValue.
func (b BookmarkExpandResponseValue) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "edges", b.Edges)
populate(objectMap, "entities", b.Entities)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type BookmarkExpandResponseValue.
func (b *BookmarkExpandResponseValue) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "edges":
err = unpopulate(val, &b.Edges)
delete(rawMsg, key)
case "entities":
b.Entities, err = unmarshalEntityClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type BookmarkList.
func (b BookmarkList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", b.NextLink)
populate(objectMap, "value", b.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type BookmarkProperties.
func (b BookmarkProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "created", b.Created)
populate(objectMap, "createdBy", b.CreatedBy)
populate(objectMap, "displayName", b.DisplayName)
populate(objectMap, "entityMappings", b.EntityMappings)
populateTimeRFC3339(objectMap, "eventTime", b.EventTime)
populate(objectMap, "incidentInfo", b.IncidentInfo)
populate(objectMap, "labels", b.Labels)
populate(objectMap, "notes", b.Notes)
populate(objectMap, "query", b.Query)
populateTimeRFC3339(objectMap, "queryEndTime", b.QueryEndTime)
populate(objectMap, "queryResult", b.QueryResult)
populateTimeRFC3339(objectMap, "queryStartTime", b.QueryStartTime)
populate(objectMap, "tactics", b.Tactics)
populate(objectMap, "techniques", b.Techniques)
populateTimeRFC3339(objectMap, "updated", b.Updated)
populate(objectMap, "updatedBy", b.UpdatedBy)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type BookmarkProperties.
func (b *BookmarkProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "created":
err = unpopulateTimeRFC3339(val, &b.Created)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &b.CreatedBy)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &b.DisplayName)
delete(rawMsg, key)
case "entityMappings":
err = unpopulate(val, &b.EntityMappings)
delete(rawMsg, key)
case "eventTime":
err = unpopulateTimeRFC3339(val, &b.EventTime)
delete(rawMsg, key)
case "incidentInfo":
err = unpopulate(val, &b.IncidentInfo)
delete(rawMsg, key)
case "labels":
err = unpopulate(val, &b.Labels)
delete(rawMsg, key)
case "notes":
err = unpopulate(val, &b.Notes)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &b.Query)
delete(rawMsg, key)
case "queryEndTime":
err = unpopulateTimeRFC3339(val, &b.QueryEndTime)
delete(rawMsg, key)
case "queryResult":
err = unpopulate(val, &b.QueryResult)
delete(rawMsg, key)
case "queryStartTime":
err = unpopulateTimeRFC3339(val, &b.QueryStartTime)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &b.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &b.Techniques)
delete(rawMsg, key)
case "updated":
err = unpopulateTimeRFC3339(val, &b.Updated)
delete(rawMsg, key)
case "updatedBy":
err = unpopulate(val, &b.UpdatedBy)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntityTimelineItem implements the EntityTimelineItemClassification interface for type BookmarkTimelineItem.
func (b *BookmarkTimelineItem) GetEntityTimelineItem() *EntityTimelineItem {
return &EntityTimelineItem{
Kind: b.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type BookmarkTimelineItem.
func (b BookmarkTimelineItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "azureResourceId", b.AzureResourceID)
populate(objectMap, "createdBy", b.CreatedBy)
populate(objectMap, "displayName", b.DisplayName)
populateTimeRFC3339(objectMap, "endTimeUtc", b.EndTimeUTC)
populateTimeRFC3339(objectMap, "eventTime", b.EventTime)
objectMap["kind"] = EntityTimelineKindBookmark
populate(objectMap, "labels", b.Labels)
populate(objectMap, "notes", b.Notes)
populateTimeRFC3339(objectMap, "startTimeUtc", b.StartTimeUTC)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type BookmarkTimelineItem.
func (b *BookmarkTimelineItem) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "azureResourceId":
err = unpopulate(val, &b.AzureResourceID)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &b.CreatedBy)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &b.DisplayName)
delete(rawMsg, key)
case "endTimeUtc":
err = unpopulateTimeRFC3339(val, &b.EndTimeUTC)
delete(rawMsg, key)
case "eventTime":
err = unpopulateTimeRFC3339(val, &b.EventTime)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &b.Kind)
delete(rawMsg, key)
case "labels":
err = unpopulate(val, &b.Labels)
delete(rawMsg, key)
case "notes":
err = unpopulate(val, &b.Notes)
delete(rawMsg, key)
case "startTimeUtc":
err = unpopulateTimeRFC3339(val, &b.StartTimeUTC)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type CloudApplicationEntity.
func (c *CloudApplicationEntity) GetEntity() *Entity {
return &Entity{
Kind: c.Kind,
ID: c.ID,
Name: c.Name,
Type: c.Type,
SystemData: c.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type CloudApplicationEntity.
func (c CloudApplicationEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", c.ID)
objectMap["kind"] = EntityKindCloudApplication
populate(objectMap, "name", c.Name)
populate(objectMap, "properties", c.Properties)
populate(objectMap, "systemData", c.SystemData)
populate(objectMap, "type", c.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type CloudApplicationEntity.
func (c *CloudApplicationEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &c.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &c.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &c.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &c.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &c.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &c.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type CloudApplicationEntityProperties.
func (c CloudApplicationEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", c.AdditionalData)
populate(objectMap, "appId", c.AppID)
populate(objectMap, "appName", c.AppName)
populate(objectMap, "friendlyName", c.FriendlyName)
populate(objectMap, "instanceName", c.InstanceName)
return json.Marshal(objectMap)
}
// GetDataConnector implements the DataConnectorClassification interface for type CodelessAPIPollingDataConnector.
func (c *CodelessAPIPollingDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: c.Kind,
Etag: c.Etag,
ID: c.ID,
Name: c.Name,
Type: c.Type,
SystemData: c.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type CodelessAPIPollingDataConnector.
func (c CodelessAPIPollingDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", c.Etag)
populate(objectMap, "id", c.ID)
objectMap["kind"] = DataConnectorKindAPIPolling
populate(objectMap, "name", c.Name)
populate(objectMap, "properties", c.Properties)
populate(objectMap, "systemData", c.SystemData)
populate(objectMap, "type", c.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type CodelessAPIPollingDataConnector.
func (c *CodelessAPIPollingDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &c.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &c.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &c.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &c.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &c.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &c.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &c.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type CodelessConnectorPollingResponseProperties.
func (c CodelessConnectorPollingResponseProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "eventsJsonPaths", c.EventsJSONPaths)
populate(objectMap, "isGzipCompressed", c.IsGzipCompressed)
populate(objectMap, "successStatusJsonPath", c.SuccessStatusJSONPath)
populate(objectMap, "successStatusValue", c.SuccessStatusValue)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type CodelessUIConnectorConfigProperties.
func (c CodelessUIConnectorConfigProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "availability", c.Availability)
populate(objectMap, "connectivityCriteria", c.ConnectivityCriteria)
populate(objectMap, "customImage", c.CustomImage)
populate(objectMap, "dataTypes", c.DataTypes)
populate(objectMap, "descriptionMarkdown", c.DescriptionMarkdown)
populate(objectMap, "graphQueries", c.GraphQueries)
populate(objectMap, "graphQueriesTableName", c.GraphQueriesTableName)
populate(objectMap, "instructionSteps", c.InstructionSteps)
populate(objectMap, "permissions", c.Permissions)
populate(objectMap, "publisher", c.Publisher)
populate(objectMap, "sampleQueries", c.SampleQueries)
populate(objectMap, "title", c.Title)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type CodelessUIConnectorConfigPropertiesConnectivityCriteriaItem.
func (c CodelessUIConnectorConfigPropertiesConnectivityCriteriaItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "type", c.Type)
populate(objectMap, "value", c.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type CodelessUIConnectorConfigPropertiesInstructionStepsItem.
func (c CodelessUIConnectorConfigPropertiesInstructionStepsItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "description", c.Description)
populate(objectMap, "instructions", c.Instructions)
populate(objectMap, "title", c.Title)
return json.Marshal(objectMap)
}
// GetDataConnector implements the DataConnectorClassification interface for type CodelessUIDataConnector.
func (c *CodelessUIDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: c.Kind,
Etag: c.Etag,
ID: c.ID,
Name: c.Name,
Type: c.Type,
SystemData: c.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type CodelessUIDataConnector.
func (c CodelessUIDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", c.Etag)
populate(objectMap, "id", c.ID)
objectMap["kind"] = DataConnectorKindGenericUI
populate(objectMap, "name", c.Name)
populate(objectMap, "properties", c.Properties)
populate(objectMap, "systemData", c.SystemData)
populate(objectMap, "type", c.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type CodelessUIDataConnector.
func (c *CodelessUIDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &c.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &c.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &c.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &c.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &c.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &c.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &c.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ConnectivityCriteria.
func (c ConnectivityCriteria) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "type", c.Type)
populate(objectMap, "value", c.Value)
return json.Marshal(objectMap)
}
// GetCustomEntityQuery implements the CustomEntityQueryClassification interface for type CustomEntityQuery.
func (c *CustomEntityQuery) GetCustomEntityQuery() *CustomEntityQuery { return c }
// GetEntity implements the EntityClassification interface for type DNSEntity.
func (d *DNSEntity) GetEntity() *Entity {
return &Entity{
Kind: d.Kind,
ID: d.ID,
Name: d.Name,
Type: d.Type,
SystemData: d.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type DNSEntity.
func (d DNSEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", d.ID)
objectMap["kind"] = EntityKindDNSResolution
populate(objectMap, "name", d.Name)
populate(objectMap, "properties", d.Properties)
populate(objectMap, "systemData", d.SystemData)
populate(objectMap, "type", d.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type DNSEntity.
func (d *DNSEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &d.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &d.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &d.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &d.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &d.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &d.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type DNSEntityProperties.
func (d DNSEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", d.AdditionalData)
populate(objectMap, "dnsServerIpEntityId", d.DNSServerIPEntityID)
populate(objectMap, "domainName", d.DomainName)
populate(objectMap, "friendlyName", d.FriendlyName)
populate(objectMap, "hostIpAddressEntityId", d.HostIPAddressEntityID)
populate(objectMap, "ipAddressEntityIds", d.IPAddressEntityIDs)
return json.Marshal(objectMap)
}
// GetDataConnector implements the DataConnectorClassification interface for type DataConnector.
func (d *DataConnector) GetDataConnector() *DataConnector { return d }
// MarshalJSON implements the json.Marshaller interface for type DataConnectorConnectBody.
func (d DataConnectorConnectBody) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "apiKey", d.APIKey)
populate(objectMap, "authorizationCode", d.AuthorizationCode)
populate(objectMap, "clientId", d.ClientID)
populate(objectMap, "clientSecret", d.ClientSecret)
populate(objectMap, "kind", d.Kind)
populate(objectMap, "password", <PASSWORD>)
populate(objectMap, "requestConfigUserInputValues", d.RequestConfigUserInputValues)
populate(objectMap, "userName", d.UserName)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type DataConnectorList.
func (d DataConnectorList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", d.NextLink)
populate(objectMap, "value", d.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type DataConnectorList.
func (d *DataConnectorList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &d.NextLink)
delete(rawMsg, key)
case "value":
d.Value, err = unmarshalDataConnectorClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type DataConnectorsCheckRequirements.
func (d *DataConnectorsCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return d
}
// MarshalJSON implements the json.Marshaller interface for type Deployment.
func (d Deployment) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "deploymentId", d.DeploymentID)
populate(objectMap, "deploymentLogsUrl", d.DeploymentLogsURL)
populate(objectMap, "deploymentResult", d.DeploymentResult)
populate(objectMap, "deploymentState", d.DeploymentState)
populateTimeRFC3339(objectMap, "deploymentTime", d.DeploymentTime)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Deployment.
func (d *Deployment) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "deploymentId":
err = unpopulate(val, &d.DeploymentID)
delete(rawMsg, key)
case "deploymentLogsUrl":
err = unpopulate(val, &d.DeploymentLogsURL)
delete(rawMsg, key)
case "deploymentResult":
err = unpopulate(val, &d.DeploymentResult)
delete(rawMsg, key)
case "deploymentState":
err = unpopulate(val, &d.DeploymentState)
delete(rawMsg, key)
case "deploymentTime":
err = unpopulateTimeRFC3339(val, &d.DeploymentTime)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type Dynamics365CheckRequirements.
func (d *Dynamics365CheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: d.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type Dynamics365CheckRequirements.
func (d Dynamics365CheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindDynamics365
populate(objectMap, "properties", d.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Dynamics365CheckRequirements.
func (d *Dynamics365CheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &d.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &d.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type Dynamics365DataConnector.
func (d *Dynamics365DataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: d.Kind,
Etag: d.Etag,
ID: d.ID,
Name: d.Name,
Type: d.Type,
SystemData: d.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type Dynamics365DataConnector.
func (d Dynamics365DataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", d.Etag)
populate(objectMap, "id", d.ID)
objectMap["kind"] = DataConnectorKindDynamics365
populate(objectMap, "name", d.Name)
populate(objectMap, "properties", d.Properties)
populate(objectMap, "systemData", d.SystemData)
populate(objectMap, "type", d.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Dynamics365DataConnector.
func (d *Dynamics365DataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &d.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &d.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &d.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &d.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &d.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &d.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &d.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EnrichmentDomainWhois.
func (e EnrichmentDomainWhois) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "created", e.Created)
populate(objectMap, "domain", e.Domain)
populateTimeRFC3339(objectMap, "expires", e.Expires)
populate(objectMap, "parsedWhois", e.ParsedWhois)
populate(objectMap, "server", e.Server)
populateTimeRFC3339(objectMap, "updated", e.Updated)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EnrichmentDomainWhois.
func (e *EnrichmentDomainWhois) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "created":
err = unpopulateTimeRFC3339(val, &e.Created)
delete(rawMsg, key)
case "domain":
err = unpopulate(val, &e.Domain)
delete(rawMsg, key)
case "expires":
err = unpopulateTimeRFC3339(val, &e.Expires)
delete(rawMsg, key)
case "parsedWhois":
err = unpopulate(val, &e.ParsedWhois)
delete(rawMsg, key)
case "server":
err = unpopulate(val, &e.Server)
delete(rawMsg, key)
case "updated":
err = unpopulateTimeRFC3339(val, &e.Updated)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EnrichmentDomainWhoisContact.
func (e EnrichmentDomainWhoisContact) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "city", e.City)
populate(objectMap, "country", e.Country)
populate(objectMap, "email", e.Email)
populate(objectMap, "fax", e.Fax)
populate(objectMap, "name", e.Name)
populate(objectMap, "org", e.Org)
populate(objectMap, "phone", e.Phone)
populate(objectMap, "postal", e.Postal)
populate(objectMap, "state", e.State)
populate(objectMap, "street", e.Street)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EnrichmentDomainWhoisDetails.
func (e EnrichmentDomainWhoisDetails) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "contacts", e.Contacts)
populate(objectMap, "nameServers", e.NameServers)
populate(objectMap, "registrar", e.Registrar)
populate(objectMap, "statuses", e.Statuses)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type Entity.
func (e *Entity) GetEntity() *Entity { return e }
// GetSettings implements the SettingsClassification interface for type EntityAnalytics.
func (e *EntityAnalytics) GetSettings() *Settings {
return &Settings{
Kind: e.Kind,
Etag: e.Etag,
ID: e.ID,
Name: e.Name,
Type: e.Type,
SystemData: e.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type EntityAnalytics.
func (e EntityAnalytics) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", e.Etag)
populate(objectMap, "id", e.ID)
objectMap["kind"] = SettingKindEntityAnalytics
populate(objectMap, "name", e.Name)
populate(objectMap, "properties", e.Properties)
populate(objectMap, "systemData", e.SystemData)
populate(objectMap, "type", e.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityAnalytics.
func (e *EntityAnalytics) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &e.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &e.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &e.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &e.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &e.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &e.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &e.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityAnalyticsProperties.
func (e EntityAnalyticsProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "entityProviders", e.EntityProviders)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EntityCommonProperties.
func (e EntityCommonProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", e.AdditionalData)
populate(objectMap, "friendlyName", e.FriendlyName)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EntityEdges.
func (e EntityEdges) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", e.AdditionalData)
populate(objectMap, "targetEntityId", e.TargetEntityID)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EntityExpandParameters.
func (e EntityExpandParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "endTime", e.EndTime)
populate(objectMap, "expansionId", e.ExpansionID)
populateTimeRFC3339(objectMap, "startTime", e.StartTime)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityExpandParameters.
func (e *EntityExpandParameters) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "endTime":
err = unpopulateTimeRFC3339(val, &e.EndTime)
delete(rawMsg, key)
case "expansionId":
err = unpopulate(val, &e.ExpansionID)
delete(rawMsg, key)
case "startTime":
err = unpopulateTimeRFC3339(val, &e.StartTime)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityExpandResponseValue.
func (e EntityExpandResponseValue) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "edges", e.Edges)
populate(objectMap, "entities", e.Entities)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityExpandResponseValue.
func (e *EntityExpandResponseValue) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "edges":
err = unpopulate(val, &e.Edges)
delete(rawMsg, key)
case "entities":
e.Entities, err = unmarshalEntityClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityGetInsightsParameters.
func (e EntityGetInsightsParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "addDefaultExtendedTimeRange", e.AddDefaultExtendedTimeRange)
populateTimeRFC3339(objectMap, "endTime", e.EndTime)
populate(objectMap, "insightQueryIds", e.InsightQueryIDs)
populateTimeRFC3339(objectMap, "startTime", e.StartTime)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityGetInsightsParameters.
func (e *EntityGetInsightsParameters) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "addDefaultExtendedTimeRange":
err = unpopulate(val, &e.AddDefaultExtendedTimeRange)
delete(rawMsg, key)
case "endTime":
err = unpopulateTimeRFC3339(val, &e.EndTime)
delete(rawMsg, key)
case "insightQueryIds":
err = unpopulate(val, &e.InsightQueryIDs)
delete(rawMsg, key)
case "startTime":
err = unpopulateTimeRFC3339(val, &e.StartTime)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityGetInsightsResponse.
func (e EntityGetInsightsResponse) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "metaData", e.MetaData)
populate(objectMap, "value", e.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EntityInsightItem.
func (e EntityInsightItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "chartQueryResults", e.ChartQueryResults)
populate(objectMap, "queryId", e.QueryID)
populate(objectMap, "queryTimeInterval", e.QueryTimeInterval)
populate(objectMap, "tableQueryResults", e.TableQueryResults)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EntityInsightItemQueryTimeInterval.
func (e EntityInsightItemQueryTimeInterval) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "endTime", e.EndTime)
populateTimeRFC3339(objectMap, "startTime", e.StartTime)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityInsightItemQueryTimeInterval.
func (e *EntityInsightItemQueryTimeInterval) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "endTime":
err = unpopulateTimeRFC3339(val, &e.EndTime)
delete(rawMsg, key)
case "startTime":
err = unpopulateTimeRFC3339(val, &e.StartTime)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityList.
func (e EntityList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", e.NextLink)
populate(objectMap, "value", e.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityList.
func (e *EntityList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &e.NextLink)
delete(rawMsg, key)
case "value":
e.Value, err = unmarshalEntityClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityMapping.
func (e EntityMapping) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "entityType", e.EntityType)
populate(objectMap, "fieldMappings", e.FieldMappings)
return json.Marshal(objectMap)
}
// GetEntityQuery implements the EntityQueryClassification interface for type EntityQuery.
func (e *EntityQuery) GetEntityQuery() *EntityQuery { return e }
// GetEntityQueryItem implements the EntityQueryItemClassification interface for type EntityQueryItem.
func (e *EntityQueryItem) GetEntityQueryItem() *EntityQueryItem { return e }
// MarshalJSON implements the json.Marshaller interface for type EntityQueryItemProperties.
func (e EntityQueryItemProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "dataTypes", e.DataTypes)
populate(objectMap, "entitiesFilter", &e.EntitiesFilter)
populate(objectMap, "inputEntityType", e.InputEntityType)
populate(objectMap, "requiredInputFieldsSets", e.RequiredInputFieldsSets)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type EntityQueryList.
func (e EntityQueryList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", e.NextLink)
populate(objectMap, "value", e.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityQueryList.
func (e *EntityQueryList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &e.NextLink)
delete(rawMsg, key)
case "value":
e.Value, err = unmarshalEntityQueryClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntityQueryTemplate implements the EntityQueryTemplateClassification interface for type EntityQueryTemplate.
func (e *EntityQueryTemplate) GetEntityQueryTemplate() *EntityQueryTemplate { return e }
// MarshalJSON implements the json.Marshaller interface for type EntityQueryTemplateList.
func (e EntityQueryTemplateList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", e.NextLink)
populate(objectMap, "value", e.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityQueryTemplateList.
func (e *EntityQueryTemplateList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &e.NextLink)
delete(rawMsg, key)
case "value":
e.Value, err = unmarshalEntityQueryTemplateClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntityTimelineItem implements the EntityTimelineItemClassification interface for type EntityTimelineItem.
func (e *EntityTimelineItem) GetEntityTimelineItem() *EntityTimelineItem { return e }
// MarshalJSON implements the json.Marshaller interface for type EntityTimelineParameters.
func (e EntityTimelineParameters) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "endTime", e.EndTime)
populate(objectMap, "kinds", e.Kinds)
populate(objectMap, "numberOfBucket", e.NumberOfBucket)
populateTimeRFC3339(objectMap, "startTime", e.StartTime)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityTimelineParameters.
func (e *EntityTimelineParameters) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "endTime":
err = unpopulateTimeRFC3339(val, &e.EndTime)
delete(rawMsg, key)
case "kinds":
err = unpopulate(val, &e.Kinds)
delete(rawMsg, key)
case "numberOfBucket":
err = unpopulate(val, &e.NumberOfBucket)
delete(rawMsg, key)
case "startTime":
err = unpopulateTimeRFC3339(val, &e.StartTime)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type EntityTimelineResponse.
func (e EntityTimelineResponse) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "metaData", e.MetaData)
populate(objectMap, "value", e.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EntityTimelineResponse.
func (e *EntityTimelineResponse) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "metaData":
err = unpopulate(val, &e.MetaData)
delete(rawMsg, key)
case "value":
e.Value, err = unmarshalEntityTimelineItemClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ExpansionEntityQueriesProperties.
func (e ExpansionEntityQueriesProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "dataSources", e.DataSources)
populate(objectMap, "displayName", e.DisplayName)
populate(objectMap, "inputEntityType", e.InputEntityType)
populate(objectMap, "inputFields", e.InputFields)
populate(objectMap, "outputEntityTypes", e.OutputEntityTypes)
populate(objectMap, "queryTemplate", e.QueryTemplate)
return json.Marshal(objectMap)
}
// GetEntityQuery implements the EntityQueryClassification interface for type ExpansionEntityQuery.
func (e *ExpansionEntityQuery) GetEntityQuery() *EntityQuery {
return &EntityQuery{
Kind: e.Kind,
Etag: e.Etag,
ID: e.ID,
Name: e.Name,
Type: e.Type,
SystemData: e.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ExpansionEntityQuery.
func (e ExpansionEntityQuery) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", e.Etag)
populate(objectMap, "id", e.ID)
objectMap["kind"] = EntityQueryKindExpansion
populate(objectMap, "name", e.Name)
populate(objectMap, "properties", e.Properties)
populate(objectMap, "systemData", e.SystemData)
populate(objectMap, "type", e.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ExpansionEntityQuery.
func (e *ExpansionEntityQuery) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &e.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &e.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &e.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &e.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &e.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &e.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &e.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ExpansionResultsMetadata.
func (e ExpansionResultsMetadata) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "aggregations", e.Aggregations)
return json.Marshal(objectMap)
}
// GetSettings implements the SettingsClassification interface for type EyesOn.
func (e *EyesOn) GetSettings() *Settings {
return &Settings{
Kind: e.Kind,
Etag: e.Etag,
ID: e.ID,
Name: e.Name,
Type: e.Type,
SystemData: e.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type EyesOn.
func (e EyesOn) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", e.Etag)
populate(objectMap, "id", e.ID)
objectMap["kind"] = SettingKindEyesOn
populate(objectMap, "name", e.Name)
populate(objectMap, "properties", e.Properties)
populate(objectMap, "systemData", e.SystemData)
populate(objectMap, "type", e.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type EyesOn.
func (e *EyesOn) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &e.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &e.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &e.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &e.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &e.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &e.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &e.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type FileEntity.
func (f *FileEntity) GetEntity() *Entity {
return &Entity{
Kind: f.Kind,
ID: f.ID,
Name: f.Name,
Type: f.Type,
SystemData: f.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type FileEntity.
func (f FileEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", f.ID)
objectMap["kind"] = EntityKindFile
populate(objectMap, "name", f.Name)
populate(objectMap, "properties", f.Properties)
populate(objectMap, "systemData", f.SystemData)
populate(objectMap, "type", f.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type FileEntity.
func (f *FileEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &f.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &f.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &f.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &f.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &f.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &f.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type FileEntityProperties.
func (f FileEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", f.AdditionalData)
populate(objectMap, "directory", f.Directory)
populate(objectMap, "fileHashEntityIds", f.FileHashEntityIDs)
populate(objectMap, "fileName", f.FileName)
populate(objectMap, "friendlyName", f.FriendlyName)
populate(objectMap, "hostEntityId", f.HostEntityID)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type FileHashEntity.
func (f *FileHashEntity) GetEntity() *Entity {
return &Entity{
Kind: f.Kind,
ID: f.ID,
Name: f.Name,
Type: f.Type,
SystemData: f.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type FileHashEntity.
func (f FileHashEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", f.ID)
objectMap["kind"] = EntityKindFileHash
populate(objectMap, "name", f.Name)
populate(objectMap, "properties", f.Properties)
populate(objectMap, "systemData", f.SystemData)
populate(objectMap, "type", f.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type FileHashEntity.
func (f *FileHashEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &f.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &f.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &f.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &f.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &f.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &f.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type FileHashEntityProperties.
func (f FileHashEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", f.AdditionalData)
populate(objectMap, "algorithm", f.Algorithm)
populate(objectMap, "friendlyName", f.FriendlyName)
populate(objectMap, "hashValue", f.HashValue)
return json.Marshal(objectMap)
}
// GetAlertRule implements the AlertRuleClassification interface for type FusionAlertRule.
func (f *FusionAlertRule) GetAlertRule() *AlertRule {
return &AlertRule{
Kind: f.Kind,
Etag: f.Etag,
ID: f.ID,
Name: f.Name,
Type: f.Type,
SystemData: f.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type FusionAlertRule.
func (f FusionAlertRule) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", f.Etag)
populate(objectMap, "id", f.ID)
objectMap["kind"] = AlertRuleKindFusion
populate(objectMap, "name", f.Name)
populate(objectMap, "properties", f.Properties)
populate(objectMap, "systemData", f.SystemData)
populate(objectMap, "type", f.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type FusionAlertRule.
func (f *FusionAlertRule) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &f.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &f.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &f.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &f.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &f.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &f.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &f.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type FusionAlertRuleProperties.
func (f FusionAlertRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRuleTemplateName", f.AlertRuleTemplateName)
populate(objectMap, "description", f.Description)
populate(objectMap, "displayName", f.DisplayName)
populate(objectMap, "enabled", f.Enabled)
populateTimeRFC3339(objectMap, "lastModifiedUtc", f.LastModifiedUTC)
populate(objectMap, "scenarioExclusionPatterns", f.ScenarioExclusionPatterns)
populate(objectMap, "severity", f.Severity)
populate(objectMap, "sourceSettings", f.SourceSettings)
populate(objectMap, "tactics", f.Tactics)
populate(objectMap, "techniques", f.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type FusionAlertRuleProperties.
func (f *FusionAlertRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRuleTemplateName":
err = unpopulate(val, &f.AlertRuleTemplateName)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &f.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &f.DisplayName)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &f.Enabled)
delete(rawMsg, key)
case "lastModifiedUtc":
err = unpopulateTimeRFC3339(val, &f.LastModifiedUTC)
delete(rawMsg, key)
case "scenarioExclusionPatterns":
err = unpopulate(val, &f.ScenarioExclusionPatterns)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &f.Severity)
delete(rawMsg, key)
case "sourceSettings":
err = unpopulate(val, &f.SourceSettings)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &f.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &f.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type FusionAlertRuleTemplate.
func (f *FusionAlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate {
return &AlertRuleTemplate{
Kind: f.Kind,
ID: f.ID,
Name: f.Name,
Type: f.Type,
SystemData: f.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type FusionAlertRuleTemplate.
func (f FusionAlertRuleTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", f.ID)
objectMap["kind"] = AlertRuleKindFusion
populate(objectMap, "name", f.Name)
populate(objectMap, "properties", f.Properties)
populate(objectMap, "systemData", f.SystemData)
populate(objectMap, "type", f.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type FusionAlertRuleTemplate.
func (f *FusionAlertRuleTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &f.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &f.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &f.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &f.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &f.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &f.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type FusionAlertRuleTemplateProperties.
func (f FusionAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRulesCreatedByTemplateCount", f.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", f.CreatedDateUTC)
populate(objectMap, "description", f.Description)
populate(objectMap, "displayName", f.DisplayName)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", f.LastUpdatedDateUTC)
populate(objectMap, "requiredDataConnectors", f.RequiredDataConnectors)
populate(objectMap, "severity", f.Severity)
populate(objectMap, "sourceSettings", f.SourceSettings)
populate(objectMap, "status", f.Status)
populate(objectMap, "tactics", f.Tactics)
populate(objectMap, "techniques", f.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type FusionAlertRuleTemplateProperties.
func (f *FusionAlertRuleTemplateProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &f.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &f.CreatedDateUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &f.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &f.DisplayName)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &f.LastUpdatedDateUTC)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &f.RequiredDataConnectors)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &f.Severity)
delete(rawMsg, key)
case "sourceSettings":
err = unpopulate(val, &f.SourceSettings)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &f.Status)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &f.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &f.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type FusionSourceSettings.
func (f FusionSourceSettings) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "enabled", f.Enabled)
populate(objectMap, "sourceName", f.SourceName)
populate(objectMap, "sourceSubTypes", f.SourceSubTypes)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type FusionSubTypeSeverityFilter.
func (f FusionSubTypeSeverityFilter) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "filters", f.Filters)
populate(objectMap, "isSupported", f.IsSupported)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type FusionTemplateSourceSetting.
func (f FusionTemplateSourceSetting) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "sourceName", f.SourceName)
populate(objectMap, "sourceSubTypes", f.SourceSubTypes)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type FusionTemplateSubTypeSeverityFilter.
func (f FusionTemplateSubTypeSeverityFilter) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "isSupported", f.IsSupported)
populate(objectMap, "severityFilters", f.SeverityFilters)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type GetInsightsResultsMetadata.
func (g GetInsightsResultsMetadata) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "errors", g.Errors)
populate(objectMap, "totalCount", g.TotalCount)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type GetQueriesResponse.
func (g GetQueriesResponse) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "value", g.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type GetQueriesResponse.
func (g *GetQueriesResponse) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "value":
g.Value, err = unmarshalEntityQueryItemClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type GroupingConfiguration.
func (g GroupingConfiguration) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "enabled", g.Enabled)
populate(objectMap, "groupByAlertDetails", g.GroupByAlertDetails)
populate(objectMap, "groupByCustomDetails", g.GroupByCustomDetails)
populate(objectMap, "groupByEntities", g.GroupByEntities)
populate(objectMap, "lookbackDuration", g.LookbackDuration)
populate(objectMap, "matchingMethod", g.MatchingMethod)
populate(objectMap, "reopenClosedIncident", g.ReopenClosedIncident)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type HostEntity.
func (h *HostEntity) GetEntity() *Entity {
return &Entity{
Kind: h.Kind,
ID: h.ID,
Name: h.Name,
Type: h.Type,
SystemData: h.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type HostEntity.
func (h HostEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", h.ID)
objectMap["kind"] = EntityKindHost
populate(objectMap, "name", h.Name)
populate(objectMap, "properties", h.Properties)
populate(objectMap, "systemData", h.SystemData)
populate(objectMap, "type", h.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type HostEntity.
func (h *HostEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &h.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &h.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &h.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &h.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &h.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &h.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type HostEntityProperties.
func (h HostEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", h.AdditionalData)
populate(objectMap, "azureID", h.AzureID)
populate(objectMap, "dnsDomain", h.DNSDomain)
populate(objectMap, "friendlyName", h.FriendlyName)
populate(objectMap, "hostName", h.HostName)
populate(objectMap, "isDomainJoined", h.IsDomainJoined)
populate(objectMap, "netBiosName", h.NetBiosName)
populate(objectMap, "ntDomain", h.NtDomain)
populate(objectMap, "osFamily", h.OSFamily)
populate(objectMap, "osVersion", h.OSVersion)
populate(objectMap, "omsAgentID", h.OmsAgentID)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type HuntingBookmark.
func (h *HuntingBookmark) GetEntity() *Entity {
return &Entity{
Kind: h.Kind,
ID: h.ID,
Name: h.Name,
Type: h.Type,
SystemData: h.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type HuntingBookmark.
func (h HuntingBookmark) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", h.ID)
objectMap["kind"] = EntityKindBookmark
populate(objectMap, "name", h.Name)
populate(objectMap, "properties", h.Properties)
populate(objectMap, "systemData", h.SystemData)
populate(objectMap, "type", h.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type HuntingBookmark.
func (h *HuntingBookmark) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &h.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &h.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &h.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &h.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &h.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &h.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type HuntingBookmarkProperties.
func (h HuntingBookmarkProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", h.AdditionalData)
populateTimeRFC3339(objectMap, "created", h.Created)
populate(objectMap, "createdBy", h.CreatedBy)
populate(objectMap, "displayName", h.DisplayName)
populateTimeRFC3339(objectMap, "eventTime", h.EventTime)
populate(objectMap, "friendlyName", h.FriendlyName)
populate(objectMap, "incidentInfo", h.IncidentInfo)
populate(objectMap, "labels", h.Labels)
populate(objectMap, "notes", h.Notes)
populate(objectMap, "query", h.Query)
populate(objectMap, "queryResult", h.QueryResult)
populateTimeRFC3339(objectMap, "updated", h.Updated)
populate(objectMap, "updatedBy", h.UpdatedBy)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type HuntingBookmarkProperties.
func (h *HuntingBookmarkProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "additionalData":
err = unpopulate(val, &h.AdditionalData)
delete(rawMsg, key)
case "created":
err = unpopulateTimeRFC3339(val, &h.Created)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &h.CreatedBy)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &h.DisplayName)
delete(rawMsg, key)
case "eventTime":
err = unpopulateTimeRFC3339(val, &h.EventTime)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &h.FriendlyName)
delete(rawMsg, key)
case "incidentInfo":
err = unpopulate(val, &h.IncidentInfo)
delete(rawMsg, key)
case "labels":
err = unpopulate(val, &h.Labels)
delete(rawMsg, key)
case "notes":
err = unpopulate(val, &h.Notes)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &h.Query)
delete(rawMsg, key)
case "queryResult":
err = unpopulate(val, &h.QueryResult)
delete(rawMsg, key)
case "updated":
err = unpopulateTimeRFC3339(val, &h.Updated)
delete(rawMsg, key)
case "updatedBy":
err = unpopulate(val, &h.UpdatedBy)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type IPEntity.
func (i *IPEntity) GetEntity() *Entity {
return &Entity{
Kind: i.Kind,
ID: i.ID,
Name: i.Name,
Type: i.Type,
SystemData: i.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type IPEntity.
func (i IPEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", i.ID)
objectMap["kind"] = EntityKindIP
populate(objectMap, "name", i.Name)
populate(objectMap, "properties", i.Properties)
populate(objectMap, "systemData", i.SystemData)
populate(objectMap, "type", i.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IPEntity.
func (i *IPEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &i.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &i.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &i.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &i.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &i.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &i.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type IPEntityProperties.
func (i IPEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", i.AdditionalData)
populate(objectMap, "address", i.Address)
populate(objectMap, "friendlyName", i.FriendlyName)
populate(objectMap, "location", i.Location)
populate(objectMap, "threatIntelligence", i.ThreatIntelligence)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type IncidentAdditionalData.
func (i IncidentAdditionalData) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertProductNames", i.AlertProductNames)
populate(objectMap, "alertsCount", i.AlertsCount)
populate(objectMap, "bookmarksCount", i.BookmarksCount)
populate(objectMap, "commentsCount", i.CommentsCount)
populate(objectMap, "providerIncidentUrl", i.ProviderIncidentURL)
populate(objectMap, "tactics", i.Tactics)
populate(objectMap, "techniques", i.Techniques)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type IncidentAlertList.
func (i IncidentAlertList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "value", i.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type IncidentBookmarkList.
func (i IncidentBookmarkList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "value", i.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type IncidentCommentList.
func (i IncidentCommentList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", i.NextLink)
populate(objectMap, "value", i.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type IncidentCommentProperties.
func (i IncidentCommentProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "author", i.Author)
populateTimeRFC3339(objectMap, "createdTimeUtc", i.CreatedTimeUTC)
populateTimeRFC3339(objectMap, "lastModifiedTimeUtc", i.LastModifiedTimeUTC)
populate(objectMap, "message", i.Message)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IncidentCommentProperties.
func (i *IncidentCommentProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "author":
err = unpopulate(val, &i.Author)
delete(rawMsg, key)
case "createdTimeUtc":
err = unpopulateTimeRFC3339(val, &i.CreatedTimeUTC)
delete(rawMsg, key)
case "lastModifiedTimeUtc":
err = unpopulateTimeRFC3339(val, &i.LastModifiedTimeUTC)
delete(rawMsg, key)
case "message":
err = unpopulate(val, &i.Message)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type IncidentEntitiesResponse.
func (i IncidentEntitiesResponse) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "entities", i.Entities)
populate(objectMap, "metaData", i.MetaData)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IncidentEntitiesResponse.
func (i *IncidentEntitiesResponse) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "entities":
i.Entities, err = unmarshalEntityClassificationArray(val)
delete(rawMsg, key)
case "metaData":
err = unpopulate(val, &i.MetaData)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type IncidentList.
func (i IncidentList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", i.NextLink)
populate(objectMap, "value", i.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type IncidentProperties.
func (i IncidentProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", i.AdditionalData)
populate(objectMap, "classification", i.Classification)
populate(objectMap, "classificationComment", i.ClassificationComment)
populate(objectMap, "classificationReason", i.ClassificationReason)
populateTimeRFC3339(objectMap, "createdTimeUtc", i.CreatedTimeUTC)
populate(objectMap, "description", i.Description)
populateTimeRFC3339(objectMap, "firstActivityTimeUtc", i.FirstActivityTimeUTC)
populate(objectMap, "incidentNumber", i.IncidentNumber)
populate(objectMap, "incidentUrl", i.IncidentURL)
populate(objectMap, "labels", i.Labels)
populateTimeRFC3339(objectMap, "lastActivityTimeUtc", i.LastActivityTimeUTC)
populateTimeRFC3339(objectMap, "lastModifiedTimeUtc", i.LastModifiedTimeUTC)
populate(objectMap, "owner", i.Owner)
populate(objectMap, "providerIncidentId", i.ProviderIncidentID)
populate(objectMap, "providerName", i.ProviderName)
populate(objectMap, "relatedAnalyticRuleIds", i.RelatedAnalyticRuleIDs)
populate(objectMap, "severity", i.Severity)
populate(objectMap, "status", i.Status)
populate(objectMap, "teamInformation", i.TeamInformation)
populate(objectMap, "title", i.Title)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IncidentProperties.
func (i *IncidentProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "additionalData":
err = unpopulate(val, &i.AdditionalData)
delete(rawMsg, key)
case "classification":
err = unpopulate(val, &i.Classification)
delete(rawMsg, key)
case "classificationComment":
err = unpopulate(val, &i.ClassificationComment)
delete(rawMsg, key)
case "classificationReason":
err = unpopulate(val, &i.ClassificationReason)
delete(rawMsg, key)
case "createdTimeUtc":
err = unpopulateTimeRFC3339(val, &i.CreatedTimeUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &i.Description)
delete(rawMsg, key)
case "firstActivityTimeUtc":
err = unpopulateTimeRFC3339(val, &i.FirstActivityTimeUTC)
delete(rawMsg, key)
case "incidentNumber":
err = unpopulate(val, &i.IncidentNumber)
delete(rawMsg, key)
case "incidentUrl":
err = unpopulate(val, &i.IncidentURL)
delete(rawMsg, key)
case "labels":
err = unpopulate(val, &i.Labels)
delete(rawMsg, key)
case "lastActivityTimeUtc":
err = unpopulateTimeRFC3339(val, &i.LastActivityTimeUTC)
delete(rawMsg, key)
case "lastModifiedTimeUtc":
err = unpopulateTimeRFC3339(val, &i.LastModifiedTimeUTC)
delete(rawMsg, key)
case "owner":
err = unpopulate(val, &i.Owner)
delete(rawMsg, key)
case "providerIncidentId":
err = unpopulate(val, &i.ProviderIncidentID)
delete(rawMsg, key)
case "providerName":
err = unpopulate(val, &i.ProviderName)
delete(rawMsg, key)
case "relatedAnalyticRuleIds":
err = unpopulate(val, &i.RelatedAnalyticRuleIDs)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &i.Severity)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &i.Status)
delete(rawMsg, key)
case "teamInformation":
err = unpopulate(val, &i.TeamInformation)
delete(rawMsg, key)
case "title":
err = unpopulate(val, &i.Title)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type IncidentPropertiesAction.
func (i IncidentPropertiesAction) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "classification", i.Classification)
populate(objectMap, "classificationComment", i.ClassificationComment)
populate(objectMap, "classificationReason", i.ClassificationReason)
populate(objectMap, "labels", i.Labels)
populate(objectMap, "owner", i.Owner)
populate(objectMap, "severity", i.Severity)
populate(objectMap, "status", i.Status)
return json.Marshal(objectMap)
}
// GetEntityQueryItem implements the EntityQueryItemClassification interface for type InsightQueryItem.
func (i *InsightQueryItem) GetEntityQueryItem() *EntityQueryItem {
return &EntityQueryItem{
ID: i.ID,
Name: i.Name,
Type: i.Type,
Kind: i.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type InsightQueryItem.
func (i InsightQueryItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", i.ID)
objectMap["kind"] = EntityQueryKindInsight
populate(objectMap, "name", i.Name)
populate(objectMap, "properties", i.Properties)
populate(objectMap, "type", i.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type InsightQueryItem.
func (i *InsightQueryItem) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &i.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &i.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &i.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &i.Properties)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &i.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type InsightQueryItemProperties.
func (i InsightQueryItemProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalQuery", i.AdditionalQuery)
populate(objectMap, "baseQuery", i.BaseQuery)
populate(objectMap, "chartQuery", &i.ChartQuery)
populate(objectMap, "dataTypes", i.DataTypes)
populate(objectMap, "defaultTimeRange", i.DefaultTimeRange)
populate(objectMap, "description", i.Description)
populate(objectMap, "displayName", i.DisplayName)
populate(objectMap, "entitiesFilter", &i.EntitiesFilter)
populate(objectMap, "inputEntityType", i.InputEntityType)
populate(objectMap, "referenceTimeRange", i.ReferenceTimeRange)
populate(objectMap, "requiredInputFieldsSets", i.RequiredInputFieldsSets)
populate(objectMap, "tableQuery", i.TableQuery)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type InsightQueryItemPropertiesTableQuery.
func (i InsightQueryItemPropertiesTableQuery) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "columnsDefinitions", i.ColumnsDefinitions)
populate(objectMap, "queriesDefinitions", i.QueriesDefinitions)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type InsightQueryItemPropertiesTableQueryQueriesDefinitionsItem.
func (i InsightQueryItemPropertiesTableQueryQueriesDefinitionsItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "filter", i.Filter)
populate(objectMap, "linkColumnsDefinitions", i.LinkColumnsDefinitions)
populate(objectMap, "project", i.Project)
populate(objectMap, "summarize", i.Summarize)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type InsightsTableResult.
func (i InsightsTableResult) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "columns", i.Columns)
populate(objectMap, "rows", i.Rows)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type InstructionSteps.
func (i InstructionSteps) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "description", i.Description)
populate(objectMap, "instructions", i.Instructions)
populate(objectMap, "title", i.Title)
return json.Marshal(objectMap)
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type IoTCheckRequirements.
func (i *IoTCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: i.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type IoTCheckRequirements.
func (i IoTCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindIOT
populate(objectMap, "properties", i.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IoTCheckRequirements.
func (i *IoTCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &i.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &i.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type IoTDataConnector.
func (i *IoTDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: i.Kind,
Etag: i.Etag,
ID: i.ID,
Name: i.Name,
Type: i.Type,
SystemData: i.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type IoTDataConnector.
func (i IoTDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", i.Etag)
populate(objectMap, "id", i.ID)
objectMap["kind"] = DataConnectorKindIOT
populate(objectMap, "name", i.Name)
populate(objectMap, "properties", i.Properties)
populate(objectMap, "systemData", i.SystemData)
populate(objectMap, "type", i.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IoTDataConnector.
func (i *IoTDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &i.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &i.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &i.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &i.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &i.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &i.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &i.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type IoTDeviceEntity.
func (i *IoTDeviceEntity) GetEntity() *Entity {
return &Entity{
Kind: i.Kind,
ID: i.ID,
Name: i.Name,
Type: i.Type,
SystemData: i.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type IoTDeviceEntity.
func (i IoTDeviceEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", i.ID)
objectMap["kind"] = EntityKindIoTDevice
populate(objectMap, "name", i.Name)
populate(objectMap, "properties", i.Properties)
populate(objectMap, "systemData", i.SystemData)
populate(objectMap, "type", i.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type IoTDeviceEntity.
func (i *IoTDeviceEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &i.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &i.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &i.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &i.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &i.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &i.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type IoTDeviceEntityProperties.
func (i IoTDeviceEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", i.AdditionalData)
populate(objectMap, "deviceId", i.DeviceID)
populate(objectMap, "deviceName", i.DeviceName)
populate(objectMap, "deviceType", i.DeviceType)
populate(objectMap, "edgeId", i.EdgeID)
populate(objectMap, "firmwareVersion", i.FirmwareVersion)
populate(objectMap, "friendlyName", i.FriendlyName)
populate(objectMap, "hostEntityId", i.HostEntityID)
populate(objectMap, "ipAddressEntityId", i.IPAddressEntityID)
populate(objectMap, "iotHubEntityId", i.IotHubEntityID)
populate(objectMap, "iotSecurityAgentId", i.IotSecurityAgentID)
populate(objectMap, "macAddress", i.MacAddress)
populate(objectMap, "model", i.Model)
populate(objectMap, "operatingSystem", i.OperatingSystem)
populate(objectMap, "protocols", i.Protocols)
populate(objectMap, "serialNumber", i.SerialNumber)
populate(objectMap, "source", i.Source)
populate(objectMap, "threatIntelligence", i.ThreatIntelligence)
populate(objectMap, "vendor", i.Vendor)
return json.Marshal(objectMap)
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type MCASCheckRequirements.
func (m *MCASCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: m.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type MCASCheckRequirements.
func (m MCASCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindMicrosoftCloudAppSecurity
populate(objectMap, "properties", m.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MCASCheckRequirements.
func (m *MCASCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type MCASDataConnector.
func (m *MCASDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: m.Kind,
Etag: m.Etag,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MCASDataConnector.
func (m MCASDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
objectMap["kind"] = DataConnectorKindMicrosoftCloudAppSecurity
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MCASDataConnector.
func (m *MCASDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &m.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type MDATPCheckRequirements.
func (m *MDATPCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: m.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type MDATPCheckRequirements.
func (m MDATPCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindMicrosoftDefenderAdvancedThreatProtection
populate(objectMap, "properties", m.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MDATPCheckRequirements.
func (m *MDATPCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type MDATPDataConnector.
func (m *MDATPDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: m.Kind,
Etag: m.Etag,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MDATPDataConnector.
func (m MDATPDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
objectMap["kind"] = DataConnectorKindMicrosoftDefenderAdvancedThreatProtection
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MDATPDataConnector.
func (m *MDATPDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &m.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRule implements the AlertRuleClassification interface for type MLBehaviorAnalyticsAlertRule.
func (m *MLBehaviorAnalyticsAlertRule) GetAlertRule() *AlertRule {
return &AlertRule{
Kind: m.Kind,
Etag: m.Etag,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MLBehaviorAnalyticsAlertRule.
func (m MLBehaviorAnalyticsAlertRule) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
objectMap["kind"] = AlertRuleKindMLBehaviorAnalytics
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MLBehaviorAnalyticsAlertRule.
func (m *MLBehaviorAnalyticsAlertRule) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &m.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MLBehaviorAnalyticsAlertRuleProperties.
func (m MLBehaviorAnalyticsAlertRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRuleTemplateName", m.AlertRuleTemplateName)
populate(objectMap, "description", m.Description)
populate(objectMap, "displayName", m.DisplayName)
populate(objectMap, "enabled", m.Enabled)
populateTimeRFC3339(objectMap, "lastModifiedUtc", m.LastModifiedUTC)
populate(objectMap, "severity", m.Severity)
populate(objectMap, "tactics", m.Tactics)
populate(objectMap, "techniques", m.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MLBehaviorAnalyticsAlertRuleProperties.
func (m *MLBehaviorAnalyticsAlertRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRuleTemplateName":
err = unpopulate(val, &m.AlertRuleTemplateName)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &m.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &m.DisplayName)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &m.Enabled)
delete(rawMsg, key)
case "lastModifiedUtc":
err = unpopulateTimeRFC3339(val, &m.LastModifiedUTC)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &m.Severity)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &m.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &m.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type MLBehaviorAnalyticsAlertRuleTemplate.
func (m *MLBehaviorAnalyticsAlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate {
return &AlertRuleTemplate{
Kind: m.Kind,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MLBehaviorAnalyticsAlertRuleTemplate.
func (m MLBehaviorAnalyticsAlertRuleTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", m.ID)
objectMap["kind"] = AlertRuleKindMLBehaviorAnalytics
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MLBehaviorAnalyticsAlertRuleTemplate.
func (m *MLBehaviorAnalyticsAlertRuleTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MLBehaviorAnalyticsAlertRuleTemplateProperties.
func (m MLBehaviorAnalyticsAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRulesCreatedByTemplateCount", m.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", m.CreatedDateUTC)
populate(objectMap, "description", m.Description)
populate(objectMap, "displayName", m.DisplayName)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", m.LastUpdatedDateUTC)
populate(objectMap, "requiredDataConnectors", m.RequiredDataConnectors)
populate(objectMap, "severity", m.Severity)
populate(objectMap, "status", m.Status)
populate(objectMap, "tactics", m.Tactics)
populate(objectMap, "techniques", m.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MLBehaviorAnalyticsAlertRuleTemplateProperties.
func (m *MLBehaviorAnalyticsAlertRuleTemplateProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &m.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &m.CreatedDateUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &m.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &m.DisplayName)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &m.LastUpdatedDateUTC)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &m.RequiredDataConnectors)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &m.Severity)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &m.Status)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &m.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &m.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type MSTICheckRequirements.
func (m *MSTICheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: m.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type MSTICheckRequirements.
func (m MSTICheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindMicrosoftThreatIntelligence
populate(objectMap, "properties", m.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MSTICheckRequirements.
func (m *MSTICheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type MSTIDataConnector.
func (m *MSTIDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: m.Kind,
Etag: m.Etag,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MSTIDataConnector.
func (m MSTIDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
objectMap["kind"] = DataConnectorKindMicrosoftThreatIntelligence
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MSTIDataConnector.
func (m *MSTIDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &m.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type MTPDataConnector.
func (m *MTPDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: m.Kind,
Etag: m.Etag,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MTPDataConnector.
func (m MTPDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
objectMap["kind"] = DataConnectorKindMicrosoftThreatProtection
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MTPDataConnector.
func (m *MTPDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &m.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type MailClusterEntity.
func (m *MailClusterEntity) GetEntity() *Entity {
return &Entity{
Kind: m.Kind,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MailClusterEntity.
func (m MailClusterEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", m.ID)
objectMap["kind"] = EntityKindMailCluster
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MailClusterEntity.
func (m *MailClusterEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MailClusterEntityProperties.
func (m MailClusterEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", m.AdditionalData)
populate(objectMap, "clusterGroup", m.ClusterGroup)
populateTimeRFC3339(objectMap, "clusterQueryEndTime", m.ClusterQueryEndTime)
populateTimeRFC3339(objectMap, "clusterQueryStartTime", m.ClusterQueryStartTime)
populate(objectMap, "clusterSourceIdentifier", m.ClusterSourceIdentifier)
populate(objectMap, "clusterSourceType", m.ClusterSourceType)
populate(objectMap, "countByDeliveryStatus", &m.CountByDeliveryStatus)
populate(objectMap, "countByProtectionStatus", &m.CountByProtectionStatus)
populate(objectMap, "countByThreatType", &m.CountByThreatType)
populate(objectMap, "friendlyName", m.FriendlyName)
populate(objectMap, "isVolumeAnomaly", m.IsVolumeAnomaly)
populate(objectMap, "mailCount", m.MailCount)
populate(objectMap, "networkMessageIds", m.NetworkMessageIDs)
populate(objectMap, "query", m.Query)
populateTimeRFC3339(objectMap, "queryTime", m.QueryTime)
populate(objectMap, "source", m.Source)
populate(objectMap, "threats", m.Threats)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MailClusterEntityProperties.
func (m *MailClusterEntityProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "additionalData":
err = unpopulate(val, &m.AdditionalData)
delete(rawMsg, key)
case "clusterGroup":
err = unpopulate(val, &m.ClusterGroup)
delete(rawMsg, key)
case "clusterQueryEndTime":
err = unpopulateTimeRFC3339(val, &m.ClusterQueryEndTime)
delete(rawMsg, key)
case "clusterQueryStartTime":
err = unpopulateTimeRFC3339(val, &m.ClusterQueryStartTime)
delete(rawMsg, key)
case "clusterSourceIdentifier":
err = unpopulate(val, &m.ClusterSourceIdentifier)
delete(rawMsg, key)
case "clusterSourceType":
err = unpopulate(val, &m.ClusterSourceType)
delete(rawMsg, key)
case "countByDeliveryStatus":
err = unpopulate(val, &m.CountByDeliveryStatus)
delete(rawMsg, key)
case "countByProtectionStatus":
err = unpopulate(val, &m.CountByProtectionStatus)
delete(rawMsg, key)
case "countByThreatType":
err = unpopulate(val, &m.CountByThreatType)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &m.FriendlyName)
delete(rawMsg, key)
case "isVolumeAnomaly":
err = unpopulate(val, &m.IsVolumeAnomaly)
delete(rawMsg, key)
case "mailCount":
err = unpopulate(val, &m.MailCount)
delete(rawMsg, key)
case "networkMessageIds":
err = unpopulate(val, &m.NetworkMessageIDs)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &m.Query)
delete(rawMsg, key)
case "queryTime":
err = unpopulateTimeRFC3339(val, &m.QueryTime)
delete(rawMsg, key)
case "source":
err = unpopulate(val, &m.Source)
delete(rawMsg, key)
case "threats":
err = unpopulate(val, &m.Threats)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type MailMessageEntity.
func (m *MailMessageEntity) GetEntity() *Entity {
return &Entity{
Kind: m.Kind,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MailMessageEntity.
func (m MailMessageEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", m.ID)
objectMap["kind"] = EntityKindMailMessage
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MailMessageEntity.
func (m *MailMessageEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MailMessageEntityProperties.
func (m MailMessageEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", m.AdditionalData)
populate(objectMap, "antispamDirection", m.AntispamDirection)
populate(objectMap, "bodyFingerprintBin1", m.BodyFingerprintBin1)
populate(objectMap, "bodyFingerprintBin2", m.BodyFingerprintBin2)
populate(objectMap, "bodyFingerprintBin3", m.BodyFingerprintBin3)
populate(objectMap, "bodyFingerprintBin4", m.BodyFingerprintBin4)
populate(objectMap, "bodyFingerprintBin5", m.BodyFingerprintBin5)
populate(objectMap, "deliveryAction", m.DeliveryAction)
populate(objectMap, "deliveryLocation", m.DeliveryLocation)
populate(objectMap, "fileEntityIds", m.FileEntityIDs)
populate(objectMap, "friendlyName", m.FriendlyName)
populate(objectMap, "internetMessageId", m.InternetMessageID)
populate(objectMap, "language", m.Language)
populate(objectMap, "networkMessageId", m.NetworkMessageID)
populate(objectMap, "p1Sender", m.P1Sender)
populate(objectMap, "p1SenderDisplayName", m.P1SenderDisplayName)
populate(objectMap, "p1SenderDomain", m.P1SenderDomain)
populate(objectMap, "p2Sender", m.P2Sender)
populate(objectMap, "p2SenderDisplayName", m.P2SenderDisplayName)
populate(objectMap, "p2SenderDomain", m.P2SenderDomain)
populateTimeRFC3339(objectMap, "receiveDate", m.ReceiveDate)
populate(objectMap, "recipient", m.Recipient)
populate(objectMap, "senderIP", m.SenderIP)
populate(objectMap, "subject", m.Subject)
populate(objectMap, "threatDetectionMethods", m.ThreatDetectionMethods)
populate(objectMap, "threats", m.Threats)
populate(objectMap, "urls", m.Urls)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MailMessageEntityProperties.
func (m *MailMessageEntityProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "additionalData":
err = unpopulate(val, &m.AdditionalData)
delete(rawMsg, key)
case "antispamDirection":
err = unpopulate(val, &m.AntispamDirection)
delete(rawMsg, key)
case "bodyFingerprintBin1":
err = unpopulate(val, &m.BodyFingerprintBin1)
delete(rawMsg, key)
case "bodyFingerprintBin2":
err = unpopulate(val, &m.BodyFingerprintBin2)
delete(rawMsg, key)
case "bodyFingerprintBin3":
err = unpopulate(val, &m.BodyFingerprintBin3)
delete(rawMsg, key)
case "bodyFingerprintBin4":
err = unpopulate(val, &m.BodyFingerprintBin4)
delete(rawMsg, key)
case "bodyFingerprintBin5":
err = unpopulate(val, &m.BodyFingerprintBin5)
delete(rawMsg, key)
case "deliveryAction":
err = unpopulate(val, &m.DeliveryAction)
delete(rawMsg, key)
case "deliveryLocation":
err = unpopulate(val, &m.DeliveryLocation)
delete(rawMsg, key)
case "fileEntityIds":
err = unpopulate(val, &m.FileEntityIDs)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &m.FriendlyName)
delete(rawMsg, key)
case "internetMessageId":
err = unpopulate(val, &m.InternetMessageID)
delete(rawMsg, key)
case "language":
err = unpopulate(val, &m.Language)
delete(rawMsg, key)
case "networkMessageId":
err = unpopulate(val, &m.NetworkMessageID)
delete(rawMsg, key)
case "p1Sender":
err = unpopulate(val, &m.P1Sender)
delete(rawMsg, key)
case "p1SenderDisplayName":
err = unpopulate(val, &m.P1SenderDisplayName)
delete(rawMsg, key)
case "p1SenderDomain":
err = unpopulate(val, &m.P1SenderDomain)
delete(rawMsg, key)
case "p2Sender":
err = unpopulate(val, &m.P2Sender)
delete(rawMsg, key)
case "p2SenderDisplayName":
err = unpopulate(val, &m.P2SenderDisplayName)
delete(rawMsg, key)
case "p2SenderDomain":
err = unpopulate(val, &m.P2SenderDomain)
delete(rawMsg, key)
case "receiveDate":
err = unpopulateTimeRFC3339(val, &m.ReceiveDate)
delete(rawMsg, key)
case "recipient":
err = unpopulate(val, &m.Recipient)
delete(rawMsg, key)
case "senderIP":
err = unpopulate(val, &m.SenderIP)
delete(rawMsg, key)
case "subject":
err = unpopulate(val, &m.Subject)
delete(rawMsg, key)
case "threatDetectionMethods":
err = unpopulate(val, &m.ThreatDetectionMethods)
delete(rawMsg, key)
case "threats":
err = unpopulate(val, &m.Threats)
delete(rawMsg, key)
case "urls":
err = unpopulate(val, &m.Urls)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type MailboxEntity.
func (m *MailboxEntity) GetEntity() *Entity {
return &Entity{
Kind: m.Kind,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MailboxEntity.
func (m MailboxEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", m.ID)
objectMap["kind"] = EntityKindMailbox
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MailboxEntity.
func (m *MailboxEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MailboxEntityProperties.
func (m MailboxEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", m.AdditionalData)
populate(objectMap, "displayName", m.DisplayName)
populate(objectMap, "externalDirectoryObjectId", m.ExternalDirectoryObjectID)
populate(objectMap, "friendlyName", m.FriendlyName)
populate(objectMap, "mailboxPrimaryAddress", m.MailboxPrimaryAddress)
populate(objectMap, "upn", m.Upn)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type MalwareEntity.
func (m *MalwareEntity) GetEntity() *Entity {
return &Entity{
Kind: m.Kind,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MalwareEntity.
func (m MalwareEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", m.ID)
objectMap["kind"] = EntityKindMalware
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MalwareEntity.
func (m *MalwareEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MalwareEntityProperties.
func (m MalwareEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", m.AdditionalData)
populate(objectMap, "category", m.Category)
populate(objectMap, "fileEntityIds", m.FileEntityIDs)
populate(objectMap, "friendlyName", m.FriendlyName)
populate(objectMap, "malwareName", m.MalwareName)
populate(objectMap, "processEntityIds", m.ProcessEntityIDs)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type MetadataCategories.
func (m MetadataCategories) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "domains", m.Domains)
populate(objectMap, "verticals", m.Verticals)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type MetadataDependencies.
func (m MetadataDependencies) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "contentId", m.ContentID)
populate(objectMap, "criteria", m.Criteria)
populate(objectMap, "kind", m.Kind)
populate(objectMap, "name", m.Name)
populate(objectMap, "operator", m.Operator)
populate(objectMap, "version", m.Version)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type MetadataList.
func (m MetadataList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", m.NextLink)
populate(objectMap, "value", m.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type MetadataPatch.
func (m MetadataPatch) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type MetadataProperties.
func (m MetadataProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "author", m.Author)
populate(objectMap, "categories", m.Categories)
populate(objectMap, "contentId", m.ContentID)
populate(objectMap, "contentSchemaVersion", m.ContentSchemaVersion)
populate(objectMap, "customVersion", m.CustomVersion)
populate(objectMap, "dependencies", m.Dependencies)
populateDateType(objectMap, "firstPublishDate", m.FirstPublishDate)
populate(objectMap, "icon", m.Icon)
populate(objectMap, "kind", m.Kind)
populateDateType(objectMap, "lastPublishDate", m.LastPublishDate)
populate(objectMap, "parentId", m.ParentID)
populate(objectMap, "previewImages", m.PreviewImages)
populate(objectMap, "previewImagesDark", m.PreviewImagesDark)
populate(objectMap, "providers", m.Providers)
populate(objectMap, "source", m.Source)
populate(objectMap, "support", m.Support)
populate(objectMap, "threatAnalysisTactics", m.ThreatAnalysisTactics)
populate(objectMap, "threatAnalysisTechniques", m.ThreatAnalysisTechniques)
populate(objectMap, "version", m.Version)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MetadataProperties.
func (m *MetadataProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "author":
err = unpopulate(val, &m.Author)
delete(rawMsg, key)
case "categories":
err = unpopulate(val, &m.Categories)
delete(rawMsg, key)
case "contentId":
err = unpopulate(val, &m.ContentID)
delete(rawMsg, key)
case "contentSchemaVersion":
err = unpopulate(val, &m.ContentSchemaVersion)
delete(rawMsg, key)
case "customVersion":
err = unpopulate(val, &m.CustomVersion)
delete(rawMsg, key)
case "dependencies":
err = unpopulate(val, &m.Dependencies)
delete(rawMsg, key)
case "firstPublishDate":
err = unpopulateDateType(val, &m.FirstPublishDate)
delete(rawMsg, key)
case "icon":
err = unpopulate(val, &m.Icon)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "lastPublishDate":
err = unpopulateDateType(val, &m.LastPublishDate)
delete(rawMsg, key)
case "parentId":
err = unpopulate(val, &m.ParentID)
delete(rawMsg, key)
case "previewImages":
err = unpopulate(val, &m.PreviewImages)
delete(rawMsg, key)
case "previewImagesDark":
err = unpopulate(val, &m.PreviewImagesDark)
delete(rawMsg, key)
case "providers":
err = unpopulate(val, &m.Providers)
delete(rawMsg, key)
case "source":
err = unpopulate(val, &m.Source)
delete(rawMsg, key)
case "support":
err = unpopulate(val, &m.Support)
delete(rawMsg, key)
case "threatAnalysisTactics":
err = unpopulate(val, &m.ThreatAnalysisTactics)
delete(rawMsg, key)
case "threatAnalysisTechniques":
err = unpopulate(val, &m.ThreatAnalysisTechniques)
delete(rawMsg, key)
case "version":
err = unpopulate(val, &m.Version)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MetadataPropertiesPatch.
func (m MetadataPropertiesPatch) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "author", m.Author)
populate(objectMap, "categories", m.Categories)
populate(objectMap, "contentId", m.ContentID)
populate(objectMap, "contentSchemaVersion", m.ContentSchemaVersion)
populate(objectMap, "customVersion", m.CustomVersion)
populate(objectMap, "dependencies", m.Dependencies)
populateDateType(objectMap, "firstPublishDate", m.FirstPublishDate)
populate(objectMap, "icon", m.Icon)
populate(objectMap, "kind", m.Kind)
populateDateType(objectMap, "lastPublishDate", m.LastPublishDate)
populate(objectMap, "parentId", m.ParentID)
populate(objectMap, "previewImages", m.PreviewImages)
populate(objectMap, "previewImagesDark", m.PreviewImagesDark)
populate(objectMap, "providers", m.Providers)
populate(objectMap, "source", m.Source)
populate(objectMap, "support", m.Support)
populate(objectMap, "threatAnalysisTactics", m.ThreatAnalysisTactics)
populate(objectMap, "threatAnalysisTechniques", m.ThreatAnalysisTechniques)
populate(objectMap, "version", m.Version)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MetadataPropertiesPatch.
func (m *MetadataPropertiesPatch) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "author":
err = unpopulate(val, &m.Author)
delete(rawMsg, key)
case "categories":
err = unpopulate(val, &m.Categories)
delete(rawMsg, key)
case "contentId":
err = unpopulate(val, &m.ContentID)
delete(rawMsg, key)
case "contentSchemaVersion":
err = unpopulate(val, &m.ContentSchemaVersion)
delete(rawMsg, key)
case "customVersion":
err = unpopulate(val, &m.CustomVersion)
delete(rawMsg, key)
case "dependencies":
err = unpopulate(val, &m.Dependencies)
delete(rawMsg, key)
case "firstPublishDate":
err = unpopulateDateType(val, &m.FirstPublishDate)
delete(rawMsg, key)
case "icon":
err = unpopulate(val, &m.Icon)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "lastPublishDate":
err = unpopulateDateType(val, &m.LastPublishDate)
delete(rawMsg, key)
case "parentId":
err = unpopulate(val, &m.ParentID)
delete(rawMsg, key)
case "previewImages":
err = unpopulate(val, &m.PreviewImages)
delete(rawMsg, key)
case "previewImagesDark":
err = unpopulate(val, &m.PreviewImagesDark)
delete(rawMsg, key)
case "providers":
err = unpopulate(val, &m.Providers)
delete(rawMsg, key)
case "source":
err = unpopulate(val, &m.Source)
delete(rawMsg, key)
case "support":
err = unpopulate(val, &m.Support)
delete(rawMsg, key)
case "threatAnalysisTactics":
err = unpopulate(val, &m.ThreatAnalysisTactics)
delete(rawMsg, key)
case "threatAnalysisTechniques":
err = unpopulate(val, &m.ThreatAnalysisTechniques)
delete(rawMsg, key)
case "version":
err = unpopulate(val, &m.Version)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRule implements the AlertRuleClassification interface for type MicrosoftSecurityIncidentCreationAlertRule.
func (m *MicrosoftSecurityIncidentCreationAlertRule) GetAlertRule() *AlertRule {
return &AlertRule{
Kind: m.Kind,
Etag: m.Etag,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MicrosoftSecurityIncidentCreationAlertRule.
func (m MicrosoftSecurityIncidentCreationAlertRule) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", m.Etag)
populate(objectMap, "id", m.ID)
objectMap["kind"] = AlertRuleKindMicrosoftSecurityIncidentCreation
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MicrosoftSecurityIncidentCreationAlertRule.
func (m *MicrosoftSecurityIncidentCreationAlertRule) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &m.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleCommonProperties.
func (m MicrosoftSecurityIncidentCreationAlertRuleCommonProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "displayNamesExcludeFilter", m.DisplayNamesExcludeFilter)
populate(objectMap, "displayNamesFilter", m.DisplayNamesFilter)
populate(objectMap, "productFilter", m.ProductFilter)
populate(objectMap, "severitiesFilter", m.SeveritiesFilter)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleProperties.
func (m MicrosoftSecurityIncidentCreationAlertRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRuleTemplateName", m.AlertRuleTemplateName)
populate(objectMap, "description", m.Description)
populate(objectMap, "displayName", m.DisplayName)
populate(objectMap, "displayNamesExcludeFilter", m.DisplayNamesExcludeFilter)
populate(objectMap, "displayNamesFilter", m.DisplayNamesFilter)
populate(objectMap, "enabled", m.Enabled)
populateTimeRFC3339(objectMap, "lastModifiedUtc", m.LastModifiedUTC)
populate(objectMap, "productFilter", m.ProductFilter)
populate(objectMap, "severitiesFilter", m.SeveritiesFilter)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleProperties.
func (m *MicrosoftSecurityIncidentCreationAlertRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRuleTemplateName":
err = unpopulate(val, &m.AlertRuleTemplateName)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &m.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &m.DisplayName)
delete(rawMsg, key)
case "displayNamesExcludeFilter":
err = unpopulate(val, &m.DisplayNamesExcludeFilter)
delete(rawMsg, key)
case "displayNamesFilter":
err = unpopulate(val, &m.DisplayNamesFilter)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &m.Enabled)
delete(rawMsg, key)
case "lastModifiedUtc":
err = unpopulateTimeRFC3339(val, &m.LastModifiedUTC)
delete(rawMsg, key)
case "productFilter":
err = unpopulate(val, &m.ProductFilter)
delete(rawMsg, key)
case "severitiesFilter":
err = unpopulate(val, &m.SeveritiesFilter)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type MicrosoftSecurityIncidentCreationAlertRuleTemplate.
func (m *MicrosoftSecurityIncidentCreationAlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate {
return &AlertRuleTemplate{
Kind: m.Kind,
ID: m.ID,
Name: m.Name,
Type: m.Type,
SystemData: m.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleTemplate.
func (m MicrosoftSecurityIncidentCreationAlertRuleTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", m.ID)
objectMap["kind"] = AlertRuleKindMicrosoftSecurityIncidentCreation
populate(objectMap, "name", m.Name)
populate(objectMap, "properties", m.Properties)
populate(objectMap, "systemData", m.SystemData)
populate(objectMap, "type", m.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleTemplate.
func (m *MicrosoftSecurityIncidentCreationAlertRuleTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &m.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &m.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &m.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &m.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleTemplateProperties.
func (m MicrosoftSecurityIncidentCreationAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRulesCreatedByTemplateCount", m.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", m.CreatedDateUTC)
populate(objectMap, "description", m.Description)
populate(objectMap, "displayName", m.DisplayName)
populate(objectMap, "displayNamesExcludeFilter", m.DisplayNamesExcludeFilter)
populate(objectMap, "displayNamesFilter", m.DisplayNamesFilter)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", m.LastUpdatedDateUTC)
populate(objectMap, "productFilter", m.ProductFilter)
populate(objectMap, "requiredDataConnectors", m.RequiredDataConnectors)
populate(objectMap, "severitiesFilter", m.SeveritiesFilter)
populate(objectMap, "status", m.Status)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MicrosoftSecurityIncidentCreationAlertRuleTemplateProperties.
func (m *MicrosoftSecurityIncidentCreationAlertRuleTemplateProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &m.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &m.CreatedDateUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &m.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &m.DisplayName)
delete(rawMsg, key)
case "displayNamesExcludeFilter":
err = unpopulate(val, &m.DisplayNamesExcludeFilter)
delete(rawMsg, key)
case "displayNamesFilter":
err = unpopulate(val, &m.DisplayNamesFilter)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &m.LastUpdatedDateUTC)
delete(rawMsg, key)
case "productFilter":
err = unpopulate(val, &m.ProductFilter)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &m.RequiredDataConnectors)
delete(rawMsg, key)
case "severitiesFilter":
err = unpopulate(val, &m.SeveritiesFilter)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &m.Status)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type MtpCheckRequirements.
func (m *MtpCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: m.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type MtpCheckRequirements.
func (m MtpCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindMicrosoftThreatProtection
populate(objectMap, "properties", m.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type MtpCheckRequirements.
func (m *MtpCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &m.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &m.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRule implements the AlertRuleClassification interface for type NrtAlertRule.
func (n *NrtAlertRule) GetAlertRule() *AlertRule {
return &AlertRule{
Kind: n.Kind,
Etag: n.Etag,
ID: n.ID,
Name: n.Name,
Type: n.Type,
SystemData: n.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type NrtAlertRule.
func (n NrtAlertRule) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", n.Etag)
populate(objectMap, "id", n.ID)
objectMap["kind"] = AlertRuleKindNRT
populate(objectMap, "name", n.Name)
populate(objectMap, "properties", n.Properties)
populate(objectMap, "systemData", n.SystemData)
populate(objectMap, "type", n.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type NrtAlertRule.
func (n *NrtAlertRule) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &n.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &n.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &n.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &n.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &n.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &n.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &n.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type NrtAlertRuleProperties.
func (n NrtAlertRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertDetailsOverride", n.AlertDetailsOverride)
populate(objectMap, "alertRuleTemplateName", n.AlertRuleTemplateName)
populate(objectMap, "customDetails", n.CustomDetails)
populate(objectMap, "description", n.Description)
populate(objectMap, "displayName", n.DisplayName)
populate(objectMap, "enabled", n.Enabled)
populate(objectMap, "entityMappings", n.EntityMappings)
populate(objectMap, "incidentConfiguration", n.IncidentConfiguration)
populateTimeRFC3339(objectMap, "lastModifiedUtc", n.LastModifiedUTC)
populate(objectMap, "query", n.Query)
populate(objectMap, "severity", n.Severity)
populate(objectMap, "suppressionDuration", n.SuppressionDuration)
populate(objectMap, "suppressionEnabled", n.SuppressionEnabled)
populate(objectMap, "tactics", n.Tactics)
populate(objectMap, "techniques", n.Techniques)
populate(objectMap, "templateVersion", n.TemplateVersion)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type NrtAlertRuleProperties.
func (n *NrtAlertRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertDetailsOverride":
err = unpopulate(val, &n.AlertDetailsOverride)
delete(rawMsg, key)
case "alertRuleTemplateName":
err = unpopulate(val, &n.AlertRuleTemplateName)
delete(rawMsg, key)
case "customDetails":
err = unpopulate(val, &n.CustomDetails)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &n.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &n.DisplayName)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &n.Enabled)
delete(rawMsg, key)
case "entityMappings":
err = unpopulate(val, &n.EntityMappings)
delete(rawMsg, key)
case "incidentConfiguration":
err = unpopulate(val, &n.IncidentConfiguration)
delete(rawMsg, key)
case "lastModifiedUtc":
err = unpopulateTimeRFC3339(val, &n.LastModifiedUTC)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &n.Query)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &n.Severity)
delete(rawMsg, key)
case "suppressionDuration":
err = unpopulate(val, &n.SuppressionDuration)
delete(rawMsg, key)
case "suppressionEnabled":
err = unpopulate(val, &n.SuppressionEnabled)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &n.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &n.Techniques)
delete(rawMsg, key)
case "templateVersion":
err = unpopulate(val, &n.TemplateVersion)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type NrtAlertRuleTemplate.
func (n *NrtAlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate {
return &AlertRuleTemplate{
Kind: n.Kind,
ID: n.ID,
Name: n.Name,
Type: n.Type,
SystemData: n.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type NrtAlertRuleTemplate.
func (n NrtAlertRuleTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", n.ID)
objectMap["kind"] = AlertRuleKindNRT
populate(objectMap, "name", n.Name)
populate(objectMap, "properties", n.Properties)
populate(objectMap, "systemData", n.SystemData)
populate(objectMap, "type", n.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type NrtAlertRuleTemplate.
func (n *NrtAlertRuleTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &n.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &n.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &n.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &n.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &n.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &n.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type NrtAlertRuleTemplateProperties.
func (n NrtAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertDetailsOverride", n.AlertDetailsOverride)
populate(objectMap, "alertRulesCreatedByTemplateCount", n.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", n.CreatedDateUTC)
populate(objectMap, "customDetails", n.CustomDetails)
populate(objectMap, "description", n.Description)
populate(objectMap, "displayName", n.DisplayName)
populate(objectMap, "entityMappings", n.EntityMappings)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", n.LastUpdatedDateUTC)
populate(objectMap, "query", n.Query)
populate(objectMap, "requiredDataConnectors", n.RequiredDataConnectors)
populate(objectMap, "severity", n.Severity)
populate(objectMap, "status", n.Status)
populate(objectMap, "tactics", n.Tactics)
populate(objectMap, "techniques", n.Techniques)
populate(objectMap, "version", n.Version)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type NrtAlertRuleTemplateProperties.
func (n *NrtAlertRuleTemplateProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertDetailsOverride":
err = unpopulate(val, &n.AlertDetailsOverride)
delete(rawMsg, key)
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &n.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &n.CreatedDateUTC)
delete(rawMsg, key)
case "customDetails":
err = unpopulate(val, &n.CustomDetails)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &n.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &n.DisplayName)
delete(rawMsg, key)
case "entityMappings":
err = unpopulate(val, &n.EntityMappings)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &n.LastUpdatedDateUTC)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &n.Query)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &n.RequiredDataConnectors)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &n.Severity)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &n.Status)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &n.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &n.Techniques)
delete(rawMsg, key)
case "version":
err = unpopulate(val, &n.Version)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type Office365ProjectCheckRequirements.
func (o *Office365ProjectCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: o.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type Office365ProjectCheckRequirements.
func (o Office365ProjectCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindOffice365Project
populate(objectMap, "properties", o.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Office365ProjectCheckRequirements.
func (o *Office365ProjectCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type Office365ProjectDataConnector.
func (o *Office365ProjectDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: o.Kind,
Etag: o.Etag,
ID: o.ID,
Name: o.Name,
Type: o.Type,
SystemData: o.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type Office365ProjectDataConnector.
func (o Office365ProjectDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", o.Etag)
populate(objectMap, "id", o.ID)
objectMap["kind"] = DataConnectorKindOffice365Project
populate(objectMap, "name", o.Name)
populate(objectMap, "properties", o.Properties)
populate(objectMap, "systemData", o.SystemData)
populate(objectMap, "type", o.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Office365ProjectDataConnector.
func (o *Office365ProjectDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &o.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &o.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &o.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &o.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &o.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type OfficeATPCheckRequirements.
func (o *OfficeATPCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: o.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficeATPCheckRequirements.
func (o OfficeATPCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindOfficeATP
populate(objectMap, "properties", o.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficeATPCheckRequirements.
func (o *OfficeATPCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type OfficeATPDataConnector.
func (o *OfficeATPDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: o.Kind,
Etag: o.Etag,
ID: o.ID,
Name: o.Name,
Type: o.Type,
SystemData: o.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficeATPDataConnector.
func (o OfficeATPDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", o.Etag)
populate(objectMap, "id", o.ID)
objectMap["kind"] = DataConnectorKindOfficeATP
populate(objectMap, "name", o.Name)
populate(objectMap, "properties", o.Properties)
populate(objectMap, "systemData", o.SystemData)
populate(objectMap, "type", o.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficeATPDataConnector.
func (o *OfficeATPDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &o.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &o.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &o.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &o.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &o.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type OfficeConsentList.
func (o OfficeConsentList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", o.NextLink)
populate(objectMap, "value", o.Value)
return json.Marshal(objectMap)
}
// GetDataConnector implements the DataConnectorClassification interface for type OfficeDataConnector.
func (o *OfficeDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: o.Kind,
Etag: o.Etag,
ID: o.ID,
Name: o.Name,
Type: o.Type,
SystemData: o.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficeDataConnector.
func (o OfficeDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", o.Etag)
populate(objectMap, "id", o.ID)
objectMap["kind"] = DataConnectorKindOffice365
populate(objectMap, "name", o.Name)
populate(objectMap, "properties", o.Properties)
populate(objectMap, "systemData", o.SystemData)
populate(objectMap, "type", o.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficeDataConnector.
func (o *OfficeDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &o.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &o.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &o.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &o.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &o.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type OfficeIRMCheckRequirements.
func (o *OfficeIRMCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: o.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficeIRMCheckRequirements.
func (o OfficeIRMCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindOfficeIRM
populate(objectMap, "properties", o.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficeIRMCheckRequirements.
func (o *OfficeIRMCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type OfficeIRMDataConnector.
func (o *OfficeIRMDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: o.Kind,
Etag: o.Etag,
ID: o.ID,
Name: o.Name,
Type: o.Type,
SystemData: o.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficeIRMDataConnector.
func (o OfficeIRMDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", o.Etag)
populate(objectMap, "id", o.ID)
objectMap["kind"] = DataConnectorKindOfficeIRM
populate(objectMap, "name", o.Name)
populate(objectMap, "properties", o.Properties)
populate(objectMap, "systemData", o.SystemData)
populate(objectMap, "type", o.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficeIRMDataConnector.
func (o *OfficeIRMDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &o.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &o.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &o.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &o.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &o.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type OfficePowerBICheckRequirements.
func (o *OfficePowerBICheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: o.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficePowerBICheckRequirements.
func (o OfficePowerBICheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindOfficePowerBI
populate(objectMap, "properties", o.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficePowerBICheckRequirements.
func (o *OfficePowerBICheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type OfficePowerBIDataConnector.
func (o *OfficePowerBIDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: o.Kind,
Etag: o.Etag,
ID: o.ID,
Name: o.Name,
Type: o.Type,
SystemData: o.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type OfficePowerBIDataConnector.
func (o OfficePowerBIDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", o.Etag)
populate(objectMap, "id", o.ID)
objectMap["kind"] = DataConnectorKindOfficePowerBI
populate(objectMap, "name", o.Name)
populate(objectMap, "properties", o.Properties)
populate(objectMap, "systemData", o.SystemData)
populate(objectMap, "type", o.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type OfficePowerBIDataConnector.
func (o *OfficePowerBIDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &o.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &o.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &o.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &o.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &o.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &o.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &o.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type OperationsList.
func (o OperationsList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", o.NextLink)
populate(objectMap, "value", o.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type Permissions.
func (p Permissions) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "customs", p.Customs)
populate(objectMap, "resourceProvider", p.ResourceProvider)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type ProcessEntity.
func (p *ProcessEntity) GetEntity() *Entity {
return &Entity{
Kind: p.Kind,
ID: p.ID,
Name: p.Name,
Type: p.Type,
SystemData: p.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ProcessEntity.
func (p ProcessEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", p.ID)
objectMap["kind"] = EntityKindProcess
populate(objectMap, "name", p.Name)
populate(objectMap, "properties", p.Properties)
populate(objectMap, "systemData", p.SystemData)
populate(objectMap, "type", p.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ProcessEntity.
func (p *ProcessEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &p.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &p.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &p.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &p.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &p.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &p.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ProcessEntityProperties.
func (p ProcessEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "accountEntityId", p.AccountEntityID)
populate(objectMap, "additionalData", p.AdditionalData)
populate(objectMap, "commandLine", p.CommandLine)
populateTimeRFC3339(objectMap, "creationTimeUtc", p.CreationTimeUTC)
populate(objectMap, "elevationToken", p.ElevationToken)
populate(objectMap, "friendlyName", p.FriendlyName)
populate(objectMap, "hostEntityId", p.HostEntityID)
populate(objectMap, "hostLogonSessionEntityId", p.HostLogonSessionEntityID)
populate(objectMap, "imageFileEntityId", p.ImageFileEntityID)
populate(objectMap, "parentProcessEntityId", p.ParentProcessEntityID)
populate(objectMap, "processId", p.ProcessID)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ProcessEntityProperties.
func (p *ProcessEntityProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "accountEntityId":
err = unpopulate(val, &p.AccountEntityID)
delete(rawMsg, key)
case "additionalData":
err = unpopulate(val, &p.AdditionalData)
delete(rawMsg, key)
case "commandLine":
err = unpopulate(val, &p.CommandLine)
delete(rawMsg, key)
case "creationTimeUtc":
err = unpopulateTimeRFC3339(val, &p.CreationTimeUTC)
delete(rawMsg, key)
case "elevationToken":
err = unpopulate(val, &p.ElevationToken)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &p.FriendlyName)
delete(rawMsg, key)
case "hostEntityId":
err = unpopulate(val, &p.HostEntityID)
delete(rawMsg, key)
case "hostLogonSessionEntityId":
err = unpopulate(val, &p.HostLogonSessionEntityID)
delete(rawMsg, key)
case "imageFileEntityId":
err = unpopulate(val, &p.ImageFileEntityID)
delete(rawMsg, key)
case "parentProcessEntityId":
err = unpopulate(val, &p.ParentProcessEntityID)
delete(rawMsg, key)
case "processId":
err = unpopulate(val, &p.ProcessID)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAutomationRuleCondition implements the AutomationRuleConditionClassification interface for type PropertyConditionProperties.
func (p *PropertyConditionProperties) GetAutomationRuleCondition() *AutomationRuleCondition {
return &AutomationRuleCondition{
ConditionType: p.ConditionType,
}
}
// MarshalJSON implements the json.Marshaller interface for type PropertyConditionProperties.
func (p PropertyConditionProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "conditionProperties", p.ConditionProperties)
objectMap["conditionType"] = ConditionTypeProperty
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type PropertyConditionProperties.
func (p *PropertyConditionProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "conditionProperties":
err = unpopulate(val, &p.ConditionProperties)
delete(rawMsg, key)
case "conditionType":
err = unpopulate(val, &p.ConditionType)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type QueryBasedAlertRuleTemplateProperties.
func (q QueryBasedAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertDetailsOverride", q.AlertDetailsOverride)
populate(objectMap, "customDetails", q.CustomDetails)
populate(objectMap, "entityMappings", q.EntityMappings)
populate(objectMap, "query", q.Query)
populate(objectMap, "severity", q.Severity)
populate(objectMap, "version", q.Version)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type RegistryKeyEntity.
func (r *RegistryKeyEntity) GetEntity() *Entity {
return &Entity{
Kind: r.Kind,
ID: r.ID,
Name: r.Name,
Type: r.Type,
SystemData: r.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type RegistryKeyEntity.
func (r RegistryKeyEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", r.ID)
objectMap["kind"] = EntityKindRegistryKey
populate(objectMap, "name", r.Name)
populate(objectMap, "properties", r.Properties)
populate(objectMap, "systemData", r.SystemData)
populate(objectMap, "type", r.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type RegistryKeyEntity.
func (r *RegistryKeyEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &r.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &r.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &r.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &r.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &r.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &r.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type RegistryKeyEntityProperties.
func (r RegistryKeyEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", r.AdditionalData)
populate(objectMap, "friendlyName", r.FriendlyName)
populate(objectMap, "hive", r.Hive)
populate(objectMap, "key", r.Key)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type RegistryValueEntity.
func (r *RegistryValueEntity) GetEntity() *Entity {
return &Entity{
Kind: r.Kind,
ID: r.ID,
Name: r.Name,
Type: r.Type,
SystemData: r.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type RegistryValueEntity.
func (r RegistryValueEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", r.ID)
objectMap["kind"] = EntityKindRegistryValue
populate(objectMap, "name", r.Name)
populate(objectMap, "properties", r.Properties)
populate(objectMap, "systemData", r.SystemData)
populate(objectMap, "type", r.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type RegistryValueEntity.
func (r *RegistryValueEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &r.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &r.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &r.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &r.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &r.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &r.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type RegistryValueEntityProperties.
func (r RegistryValueEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", r.AdditionalData)
populate(objectMap, "friendlyName", r.FriendlyName)
populate(objectMap, "keyEntityId", r.KeyEntityID)
populate(objectMap, "valueData", r.ValueData)
populate(objectMap, "valueName", r.ValueName)
populate(objectMap, "valueType", r.ValueType)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type RelationList.
func (r RelationList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", r.NextLink)
populate(objectMap, "value", r.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type Repo.
func (r Repo) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "branches", r.Branches)
populate(objectMap, "fullName", r.FullName)
populate(objectMap, "url", r.URL)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type RepoList.
func (r RepoList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", r.NextLink)
populate(objectMap, "value", r.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type Repository.
func (r Repository) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "branch", r.Branch)
populate(objectMap, "deploymentLogsUrl", r.DeploymentLogsURL)
populate(objectMap, "displayUrl", r.DisplayURL)
populate(objectMap, "pathMapping", r.PathMapping)
populate(objectMap, "url", r.URL)
return json.Marshal(objectMap)
}
// GetAlertRule implements the AlertRuleClassification interface for type ScheduledAlertRule.
func (s *ScheduledAlertRule) GetAlertRule() *AlertRule {
return &AlertRule{
Kind: s.Kind,
Etag: s.Etag,
ID: s.ID,
Name: s.Name,
Type: s.Type,
SystemData: s.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ScheduledAlertRule.
func (s ScheduledAlertRule) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", s.Etag)
populate(objectMap, "id", s.ID)
objectMap["kind"] = AlertRuleKindScheduled
populate(objectMap, "name", s.Name)
populate(objectMap, "properties", s.Properties)
populate(objectMap, "systemData", s.SystemData)
populate(objectMap, "type", s.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ScheduledAlertRule.
func (s *ScheduledAlertRule) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &s.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &s.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &s.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &s.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &s.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &s.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &s.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ScheduledAlertRuleCommonProperties.
func (s ScheduledAlertRuleCommonProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertDetailsOverride", s.AlertDetailsOverride)
populate(objectMap, "customDetails", s.CustomDetails)
populate(objectMap, "entityMappings", s.EntityMappings)
populate(objectMap, "eventGroupingSettings", s.EventGroupingSettings)
populate(objectMap, "query", s.Query)
populate(objectMap, "queryFrequency", s.QueryFrequency)
populate(objectMap, "queryPeriod", s.QueryPeriod)
populate(objectMap, "severity", s.Severity)
populate(objectMap, "triggerOperator", s.TriggerOperator)
populate(objectMap, "triggerThreshold", s.TriggerThreshold)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ScheduledAlertRuleProperties.
func (s ScheduledAlertRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertDetailsOverride", s.AlertDetailsOverride)
populate(objectMap, "alertRuleTemplateName", s.AlertRuleTemplateName)
populate(objectMap, "customDetails", s.CustomDetails)
populate(objectMap, "description", s.Description)
populate(objectMap, "displayName", s.DisplayName)
populate(objectMap, "enabled", s.Enabled)
populate(objectMap, "entityMappings", s.EntityMappings)
populate(objectMap, "eventGroupingSettings", s.EventGroupingSettings)
populate(objectMap, "incidentConfiguration", s.IncidentConfiguration)
populateTimeRFC3339(objectMap, "lastModifiedUtc", s.LastModifiedUTC)
populate(objectMap, "query", s.Query)
populate(objectMap, "queryFrequency", s.QueryFrequency)
populate(objectMap, "queryPeriod", s.QueryPeriod)
populate(objectMap, "severity", s.Severity)
populate(objectMap, "suppressionDuration", s.SuppressionDuration)
populate(objectMap, "suppressionEnabled", s.SuppressionEnabled)
populate(objectMap, "tactics", s.Tactics)
populate(objectMap, "techniques", s.Techniques)
populate(objectMap, "templateVersion", s.TemplateVersion)
populate(objectMap, "triggerOperator", s.TriggerOperator)
populate(objectMap, "triggerThreshold", s.TriggerThreshold)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ScheduledAlertRuleProperties.
func (s *ScheduledAlertRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertDetailsOverride":
err = unpopulate(val, &s.AlertDetailsOverride)
delete(rawMsg, key)
case "alertRuleTemplateName":
err = unpopulate(val, &s.AlertRuleTemplateName)
delete(rawMsg, key)
case "customDetails":
err = unpopulate(val, &s.CustomDetails)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &s.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &s.DisplayName)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &s.Enabled)
delete(rawMsg, key)
case "entityMappings":
err = unpopulate(val, &s.EntityMappings)
delete(rawMsg, key)
case "eventGroupingSettings":
err = unpopulate(val, &s.EventGroupingSettings)
delete(rawMsg, key)
case "incidentConfiguration":
err = unpopulate(val, &s.IncidentConfiguration)
delete(rawMsg, key)
case "lastModifiedUtc":
err = unpopulateTimeRFC3339(val, &s.LastModifiedUTC)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &s.Query)
delete(rawMsg, key)
case "queryFrequency":
err = unpopulate(val, &s.QueryFrequency)
delete(rawMsg, key)
case "queryPeriod":
err = unpopulate(val, &s.QueryPeriod)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &s.Severity)
delete(rawMsg, key)
case "suppressionDuration":
err = unpopulate(val, &s.SuppressionDuration)
delete(rawMsg, key)
case "suppressionEnabled":
err = unpopulate(val, &s.SuppressionEnabled)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &s.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &s.Techniques)
delete(rawMsg, key)
case "templateVersion":
err = unpopulate(val, &s.TemplateVersion)
delete(rawMsg, key)
case "triggerOperator":
err = unpopulate(val, &s.TriggerOperator)
delete(rawMsg, key)
case "triggerThreshold":
err = unpopulate(val, &s.TriggerThreshold)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type ScheduledAlertRuleTemplate.
func (s *ScheduledAlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate {
return &AlertRuleTemplate{
Kind: s.Kind,
ID: s.ID,
Name: s.Name,
Type: s.Type,
SystemData: s.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ScheduledAlertRuleTemplate.
func (s ScheduledAlertRuleTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", s.ID)
objectMap["kind"] = AlertRuleKindScheduled
populate(objectMap, "name", s.Name)
populate(objectMap, "properties", s.Properties)
populate(objectMap, "systemData", s.SystemData)
populate(objectMap, "type", s.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ScheduledAlertRuleTemplate.
func (s *ScheduledAlertRuleTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &s.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &s.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &s.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &s.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &s.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &s.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ScheduledAlertRuleTemplateProperties.
func (s ScheduledAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertDetailsOverride", s.AlertDetailsOverride)
populate(objectMap, "alertRulesCreatedByTemplateCount", s.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", s.CreatedDateUTC)
populate(objectMap, "customDetails", s.CustomDetails)
populate(objectMap, "description", s.Description)
populate(objectMap, "displayName", s.DisplayName)
populate(objectMap, "entityMappings", s.EntityMappings)
populate(objectMap, "eventGroupingSettings", s.EventGroupingSettings)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", s.LastUpdatedDateUTC)
populate(objectMap, "query", s.Query)
populate(objectMap, "queryFrequency", s.QueryFrequency)
populate(objectMap, "queryPeriod", s.QueryPeriod)
populate(objectMap, "requiredDataConnectors", s.RequiredDataConnectors)
populate(objectMap, "severity", s.Severity)
populate(objectMap, "status", s.Status)
populate(objectMap, "tactics", s.Tactics)
populate(objectMap, "techniques", s.Techniques)
populate(objectMap, "triggerOperator", s.TriggerOperator)
populate(objectMap, "triggerThreshold", s.TriggerThreshold)
populate(objectMap, "version", s.Version)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ScheduledAlertRuleTemplateProperties.
func (s *ScheduledAlertRuleTemplateProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertDetailsOverride":
err = unpopulate(val, &s.AlertDetailsOverride)
delete(rawMsg, key)
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &s.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &s.CreatedDateUTC)
delete(rawMsg, key)
case "customDetails":
err = unpopulate(val, &s.CustomDetails)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &s.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &s.DisplayName)
delete(rawMsg, key)
case "entityMappings":
err = unpopulate(val, &s.EntityMappings)
delete(rawMsg, key)
case "eventGroupingSettings":
err = unpopulate(val, &s.EventGroupingSettings)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &s.LastUpdatedDateUTC)
delete(rawMsg, key)
case "query":
err = unpopulate(val, &s.Query)
delete(rawMsg, key)
case "queryFrequency":
err = unpopulate(val, &s.QueryFrequency)
delete(rawMsg, key)
case "queryPeriod":
err = unpopulate(val, &s.QueryPeriod)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &s.RequiredDataConnectors)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &s.Severity)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &s.Status)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &s.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &s.Techniques)
delete(rawMsg, key)
case "triggerOperator":
err = unpopulate(val, &s.TriggerOperator)
delete(rawMsg, key)
case "triggerThreshold":
err = unpopulate(val, &s.TriggerThreshold)
delete(rawMsg, key)
case "version":
err = unpopulate(val, &s.Version)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type SecurityAlert.
func (s *SecurityAlert) GetEntity() *Entity {
return &Entity{
Kind: s.Kind,
ID: s.ID,
Name: s.Name,
Type: s.Type,
SystemData: s.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type SecurityAlert.
func (s SecurityAlert) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", s.ID)
objectMap["kind"] = EntityKindSecurityAlert
populate(objectMap, "name", s.Name)
populate(objectMap, "properties", s.Properties)
populate(objectMap, "systemData", s.SystemData)
populate(objectMap, "type", s.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SecurityAlert.
func (s *SecurityAlert) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &s.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &s.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &s.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &s.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &s.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &s.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type SecurityAlertProperties.
func (s SecurityAlertProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", s.AdditionalData)
populate(objectMap, "alertDisplayName", s.AlertDisplayName)
populate(objectMap, "alertLink", s.AlertLink)
populate(objectMap, "alertType", s.AlertType)
populate(objectMap, "compromisedEntity", s.CompromisedEntity)
populate(objectMap, "confidenceLevel", s.ConfidenceLevel)
populate(objectMap, "confidenceReasons", s.ConfidenceReasons)
populate(objectMap, "confidenceScore", s.ConfidenceScore)
populate(objectMap, "confidenceScoreStatus", s.ConfidenceScoreStatus)
populate(objectMap, "description", s.Description)
populateTimeRFC3339(objectMap, "endTimeUtc", s.EndTimeUTC)
populate(objectMap, "friendlyName", s.FriendlyName)
populate(objectMap, "intent", s.Intent)
populateTimeRFC3339(objectMap, "processingEndTime", s.ProcessingEndTime)
populate(objectMap, "productComponentName", s.ProductComponentName)
populate(objectMap, "productName", s.ProductName)
populate(objectMap, "productVersion", s.ProductVersion)
populate(objectMap, "providerAlertId", s.ProviderAlertID)
populate(objectMap, "remediationSteps", s.RemediationSteps)
populate(objectMap, "resourceIdentifiers", s.ResourceIdentifiers)
populate(objectMap, "severity", s.Severity)
populateTimeRFC3339(objectMap, "startTimeUtc", s.StartTimeUTC)
populate(objectMap, "status", s.Status)
populate(objectMap, "systemAlertId", s.SystemAlertID)
populate(objectMap, "tactics", s.Tactics)
populateTimeRFC3339(objectMap, "timeGenerated", s.TimeGenerated)
populate(objectMap, "vendorName", s.VendorName)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SecurityAlertProperties.
func (s *SecurityAlertProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "additionalData":
err = unpopulate(val, &s.AdditionalData)
delete(rawMsg, key)
case "alertDisplayName":
err = unpopulate(val, &s.AlertDisplayName)
delete(rawMsg, key)
case "alertLink":
err = unpopulate(val, &s.AlertLink)
delete(rawMsg, key)
case "alertType":
err = unpopulate(val, &s.AlertType)
delete(rawMsg, key)
case "compromisedEntity":
err = unpopulate(val, &s.CompromisedEntity)
delete(rawMsg, key)
case "confidenceLevel":
err = unpopulate(val, &s.ConfidenceLevel)
delete(rawMsg, key)
case "confidenceReasons":
err = unpopulate(val, &s.ConfidenceReasons)
delete(rawMsg, key)
case "confidenceScore":
err = unpopulate(val, &s.ConfidenceScore)
delete(rawMsg, key)
case "confidenceScoreStatus":
err = unpopulate(val, &s.ConfidenceScoreStatus)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &s.Description)
delete(rawMsg, key)
case "endTimeUtc":
err = unpopulateTimeRFC3339(val, &s.EndTimeUTC)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &s.FriendlyName)
delete(rawMsg, key)
case "intent":
err = unpopulate(val, &s.Intent)
delete(rawMsg, key)
case "processingEndTime":
err = unpopulateTimeRFC3339(val, &s.ProcessingEndTime)
delete(rawMsg, key)
case "productComponentName":
err = unpopulate(val, &s.ProductComponentName)
delete(rawMsg, key)
case "productName":
err = unpopulate(val, &s.ProductName)
delete(rawMsg, key)
case "productVersion":
err = unpopulate(val, &s.ProductVersion)
delete(rawMsg, key)
case "providerAlertId":
err = unpopulate(val, &s.ProviderAlertID)
delete(rawMsg, key)
case "remediationSteps":
err = unpopulate(val, &s.RemediationSteps)
delete(rawMsg, key)
case "resourceIdentifiers":
err = unpopulate(val, &s.ResourceIdentifiers)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &s.Severity)
delete(rawMsg, key)
case "startTimeUtc":
err = unpopulateTimeRFC3339(val, &s.StartTimeUTC)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &s.Status)
delete(rawMsg, key)
case "systemAlertId":
err = unpopulate(val, &s.SystemAlertID)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &s.Tactics)
delete(rawMsg, key)
case "timeGenerated":
err = unpopulateTimeRFC3339(val, &s.TimeGenerated)
delete(rawMsg, key)
case "vendorName":
err = unpopulate(val, &s.VendorName)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntityTimelineItem implements the EntityTimelineItemClassification interface for type SecurityAlertTimelineItem.
func (s *SecurityAlertTimelineItem) GetEntityTimelineItem() *EntityTimelineItem {
return &EntityTimelineItem{
Kind: s.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type SecurityAlertTimelineItem.
func (s SecurityAlertTimelineItem) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertType", s.AlertType)
populate(objectMap, "azureResourceId", s.AzureResourceID)
populate(objectMap, "description", s.Description)
populate(objectMap, "displayName", s.DisplayName)
populateTimeRFC3339(objectMap, "endTimeUtc", s.EndTimeUTC)
objectMap["kind"] = EntityTimelineKindSecurityAlert
populate(objectMap, "productName", s.ProductName)
populate(objectMap, "severity", s.Severity)
populateTimeRFC3339(objectMap, "startTimeUtc", s.StartTimeUTC)
populateTimeRFC3339(objectMap, "timeGenerated", s.TimeGenerated)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SecurityAlertTimelineItem.
func (s *SecurityAlertTimelineItem) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertType":
err = unpopulate(val, &s.AlertType)
delete(rawMsg, key)
case "azureResourceId":
err = unpopulate(val, &s.AzureResourceID)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &s.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &s.DisplayName)
delete(rawMsg, key)
case "endTimeUtc":
err = unpopulateTimeRFC3339(val, &s.EndTimeUTC)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &s.Kind)
delete(rawMsg, key)
case "productName":
err = unpopulate(val, &s.ProductName)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &s.Severity)
delete(rawMsg, key)
case "startTimeUtc":
err = unpopulateTimeRFC3339(val, &s.StartTimeUTC)
delete(rawMsg, key)
case "timeGenerated":
err = unpopulateTimeRFC3339(val, &s.TimeGenerated)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetEntity implements the EntityClassification interface for type SecurityGroupEntity.
func (s *SecurityGroupEntity) GetEntity() *Entity {
return &Entity{
Kind: s.Kind,
ID: s.ID,
Name: s.Name,
Type: s.Type,
SystemData: s.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type SecurityGroupEntity.
func (s SecurityGroupEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", s.ID)
objectMap["kind"] = EntityKindSecurityGroup
populate(objectMap, "name", s.Name)
populate(objectMap, "properties", s.Properties)
populate(objectMap, "systemData", s.SystemData)
populate(objectMap, "type", s.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SecurityGroupEntity.
func (s *SecurityGroupEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &s.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &s.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &s.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &s.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &s.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &s.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type SecurityGroupEntityProperties.
func (s SecurityGroupEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", s.AdditionalData)
populate(objectMap, "distinguishedName", s.DistinguishedName)
populate(objectMap, "friendlyName", s.FriendlyName)
populate(objectMap, "objectGuid", s.ObjectGUID)
populate(objectMap, "sid", s.Sid)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type SentinelOnboardingStatesList.
func (s SentinelOnboardingStatesList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "value", s.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type SettingList.
func (s SettingList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "value", s.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SettingList.
func (s *SettingList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "value":
s.Value, err = unmarshalSettingsClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetSettings implements the SettingsClassification interface for type Settings.
func (s *Settings) GetSettings() *Settings { return s }
// MarshalJSON implements the json.Marshaller interface for type SourceControlList.
func (s SourceControlList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", s.NextLink)
populate(objectMap, "value", s.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type SourceControlProperties.
func (s SourceControlProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "contentTypes", s.ContentTypes)
populate(objectMap, "description", s.Description)
populate(objectMap, "displayName", s.DisplayName)
populate(objectMap, "id", s.ID)
populate(objectMap, "lastDeploymentInfo", s.LastDeploymentInfo)
populate(objectMap, "repoType", s.RepoType)
populate(objectMap, "repository", s.Repository)
populate(objectMap, "repositoryResourceInfo", s.RepositoryResourceInfo)
populate(objectMap, "version", s.Version)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type SubmissionMailEntity.
func (s *SubmissionMailEntity) GetEntity() *Entity {
return &Entity{
Kind: s.Kind,
ID: s.ID,
Name: s.Name,
Type: s.Type,
SystemData: s.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type SubmissionMailEntity.
func (s SubmissionMailEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", s.ID)
objectMap["kind"] = EntityKindSubmissionMail
populate(objectMap, "name", s.Name)
populate(objectMap, "properties", s.Properties)
populate(objectMap, "systemData", s.SystemData)
populate(objectMap, "type", s.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SubmissionMailEntity.
func (s *SubmissionMailEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &s.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &s.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &s.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &s.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &s.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &s.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type SubmissionMailEntityProperties.
func (s SubmissionMailEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", s.AdditionalData)
populate(objectMap, "friendlyName", s.FriendlyName)
populate(objectMap, "networkMessageId", s.NetworkMessageID)
populate(objectMap, "recipient", s.Recipient)
populate(objectMap, "reportType", s.ReportType)
populate(objectMap, "sender", s.Sender)
populate(objectMap, "senderIp", s.SenderIP)
populate(objectMap, "subject", s.Subject)
populateTimeRFC3339(objectMap, "submissionDate", s.SubmissionDate)
populate(objectMap, "submissionId", s.SubmissionID)
populate(objectMap, "submitter", s.Submitter)
populateTimeRFC3339(objectMap, "timestamp", s.Timestamp)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SubmissionMailEntityProperties.
func (s *SubmissionMailEntityProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "additionalData":
err = unpopulate(val, &s.AdditionalData)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &s.FriendlyName)
delete(rawMsg, key)
case "networkMessageId":
err = unpopulate(val, &s.NetworkMessageID)
delete(rawMsg, key)
case "recipient":
err = unpopulate(val, &s.Recipient)
delete(rawMsg, key)
case "reportType":
err = unpopulate(val, &s.ReportType)
delete(rawMsg, key)
case "sender":
err = unpopulate(val, &s.Sender)
delete(rawMsg, key)
case "senderIp":
err = unpopulate(val, &s.SenderIP)
delete(rawMsg, key)
case "subject":
err = unpopulate(val, &s.Subject)
delete(rawMsg, key)
case "submissionDate":
err = unpopulateTimeRFC3339(val, &s.SubmissionDate)
delete(rawMsg, key)
case "submissionId":
err = unpopulate(val, &s.SubmissionID)
delete(rawMsg, key)
case "submitter":
err = unpopulate(val, &s.Submitter)
delete(rawMsg, key)
case "timestamp":
err = unpopulateTimeRFC3339(val, &s.Timestamp)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type SystemData.
func (s SystemData) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "createdAt", s.CreatedAt)
populate(objectMap, "createdBy", s.CreatedBy)
populate(objectMap, "createdByType", s.CreatedByType)
populateTimeRFC3339(objectMap, "lastModifiedAt", s.LastModifiedAt)
populate(objectMap, "lastModifiedBy", s.LastModifiedBy)
populate(objectMap, "lastModifiedByType", s.LastModifiedByType)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type SystemData.
func (s *SystemData) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "createdAt":
err = unpopulateTimeRFC3339(val, &s.CreatedAt)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &s.CreatedBy)
delete(rawMsg, key)
case "createdByType":
err = unpopulate(val, &s.CreatedByType)
delete(rawMsg, key)
case "lastModifiedAt":
err = unpopulateTimeRFC3339(val, &s.LastModifiedAt)
delete(rawMsg, key)
case "lastModifiedBy":
err = unpopulate(val, &s.LastModifiedBy)
delete(rawMsg, key)
case "lastModifiedByType":
err = unpopulate(val, &s.LastModifiedByType)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type TICheckRequirements.
func (t *TICheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: t.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type TICheckRequirements.
func (t TICheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindThreatIntelligence
populate(objectMap, "properties", t.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TICheckRequirements.
func (t *TICheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type TIDataConnector.
func (t *TIDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: t.Kind,
Etag: t.Etag,
ID: t.ID,
Name: t.Name,
Type: t.Type,
SystemData: t.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type TIDataConnector.
func (t TIDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", t.Etag)
populate(objectMap, "id", t.ID)
objectMap["kind"] = DataConnectorKindThreatIntelligence
populate(objectMap, "name", t.Name)
populate(objectMap, "properties", t.Properties)
populate(objectMap, "systemData", t.SystemData)
populate(objectMap, "type", t.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TIDataConnector.
func (t *TIDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &t.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &t.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &t.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &t.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &t.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type TIDataConnectorProperties.
func (t TIDataConnectorProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "dataTypes", t.DataTypes)
populate(objectMap, "tenantId", t.TenantID)
populateTimeRFC3339(objectMap, "tipLookbackPeriod", t.TipLookbackPeriod)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TIDataConnectorProperties.
func (t *TIDataConnectorProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "dataTypes":
err = unpopulate(val, &t.DataTypes)
delete(rawMsg, key)
case "tenantId":
err = unpopulate(val, &t.TenantID)
delete(rawMsg, key)
case "tipLookbackPeriod":
err = unpopulateTimeRFC3339(val, &t.TipLookbackPeriod)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type TeamInformation.
func (t TeamInformation) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "description", t.Description)
populate(objectMap, "name", t.Name)
populate(objectMap, "primaryChannelUrl", t.PrimaryChannelURL)
populateTimeRFC3339(objectMap, "teamCreationTimeUtc", t.TeamCreationTimeUTC)
populate(objectMap, "teamId", t.TeamID)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TeamInformation.
func (t *TeamInformation) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "description":
err = unpopulate(val, &t.Description)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &t.Name)
delete(rawMsg, key)
case "primaryChannelUrl":
err = unpopulate(val, &t.PrimaryChannelURL)
delete(rawMsg, key)
case "teamCreationTimeUtc":
err = unpopulateTimeRFC3339(val, &t.TeamCreationTimeUTC)
delete(rawMsg, key)
case "teamId":
err = unpopulate(val, &t.TeamID)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type TeamProperties.
func (t TeamProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "groupIds", t.GroupIDs)
populate(objectMap, "memberIds", t.MemberIDs)
populate(objectMap, "teamDescription", t.TeamDescription)
populate(objectMap, "teamName", t.TeamName)
return json.Marshal(objectMap)
}
// GetAlertRule implements the AlertRuleClassification interface for type ThreatIntelligenceAlertRule.
func (t *ThreatIntelligenceAlertRule) GetAlertRule() *AlertRule {
return &AlertRule{
Kind: t.Kind,
Etag: t.Etag,
ID: t.ID,
Name: t.Name,
Type: t.Type,
SystemData: t.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceAlertRule.
func (t ThreatIntelligenceAlertRule) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", t.Etag)
populate(objectMap, "id", t.ID)
objectMap["kind"] = AlertRuleKindThreatIntelligence
populate(objectMap, "name", t.Name)
populate(objectMap, "properties", t.Properties)
populate(objectMap, "systemData", t.SystemData)
populate(objectMap, "type", t.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ThreatIntelligenceAlertRule.
func (t *ThreatIntelligenceAlertRule) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &t.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &t.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &t.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &t.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &t.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceAlertRuleProperties.
func (t ThreatIntelligenceAlertRuleProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRuleTemplateName", t.AlertRuleTemplateName)
populate(objectMap, "description", t.Description)
populate(objectMap, "displayName", t.DisplayName)
populate(objectMap, "enabled", t.Enabled)
populateTimeRFC3339(objectMap, "lastModifiedUtc", t.LastModifiedUTC)
populate(objectMap, "severity", t.Severity)
populate(objectMap, "tactics", t.Tactics)
populate(objectMap, "techniques", t.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ThreatIntelligenceAlertRuleProperties.
func (t *ThreatIntelligenceAlertRuleProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRuleTemplateName":
err = unpopulate(val, &t.AlertRuleTemplateName)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &t.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &t.DisplayName)
delete(rawMsg, key)
case "enabled":
err = unpopulate(val, &t.Enabled)
delete(rawMsg, key)
case "lastModifiedUtc":
err = unpopulateTimeRFC3339(val, &t.LastModifiedUTC)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &t.Severity)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &t.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &t.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetAlertRuleTemplate implements the AlertRuleTemplateClassification interface for type ThreatIntelligenceAlertRuleTemplate.
func (t *ThreatIntelligenceAlertRuleTemplate) GetAlertRuleTemplate() *AlertRuleTemplate {
return &AlertRuleTemplate{
Kind: t.Kind,
ID: t.ID,
Name: t.Name,
Type: t.Type,
SystemData: t.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceAlertRuleTemplate.
func (t ThreatIntelligenceAlertRuleTemplate) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", t.ID)
objectMap["kind"] = AlertRuleKindThreatIntelligence
populate(objectMap, "name", t.Name)
populate(objectMap, "properties", t.Properties)
populate(objectMap, "systemData", t.SystemData)
populate(objectMap, "type", t.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ThreatIntelligenceAlertRuleTemplate.
func (t *ThreatIntelligenceAlertRuleTemplate) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &t.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &t.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &t.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &t.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceAlertRuleTemplateProperties.
func (t ThreatIntelligenceAlertRuleTemplateProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "alertRulesCreatedByTemplateCount", t.AlertRulesCreatedByTemplateCount)
populateTimeRFC3339(objectMap, "createdDateUTC", t.CreatedDateUTC)
populate(objectMap, "description", t.Description)
populate(objectMap, "displayName", t.DisplayName)
populateTimeRFC3339(objectMap, "lastUpdatedDateUTC", t.LastUpdatedDateUTC)
populate(objectMap, "requiredDataConnectors", t.RequiredDataConnectors)
populate(objectMap, "severity", t.Severity)
populate(objectMap, "status", t.Status)
populate(objectMap, "tactics", t.Tactics)
populate(objectMap, "techniques", t.Techniques)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ThreatIntelligenceAlertRuleTemplateProperties.
func (t *ThreatIntelligenceAlertRuleTemplateProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "alertRulesCreatedByTemplateCount":
err = unpopulate(val, &t.AlertRulesCreatedByTemplateCount)
delete(rawMsg, key)
case "createdDateUTC":
err = unpopulateTimeRFC3339(val, &t.CreatedDateUTC)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &t.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &t.DisplayName)
delete(rawMsg, key)
case "lastUpdatedDateUTC":
err = unpopulateTimeRFC3339(val, &t.LastUpdatedDateUTC)
delete(rawMsg, key)
case "requiredDataConnectors":
err = unpopulate(val, &t.RequiredDataConnectors)
delete(rawMsg, key)
case "severity":
err = unpopulate(val, &t.Severity)
delete(rawMsg, key)
case "status":
err = unpopulate(val, &t.Status)
delete(rawMsg, key)
case "tactics":
err = unpopulate(val, &t.Tactics)
delete(rawMsg, key)
case "techniques":
err = unpopulate(val, &t.Techniques)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceAppendTags.
func (t ThreatIntelligenceAppendTags) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "threatIntelligenceTags", t.ThreatIntelligenceTags)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceExternalReference.
func (t ThreatIntelligenceExternalReference) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "description", t.Description)
populate(objectMap, "externalId", t.ExternalID)
populate(objectMap, "hashes", t.Hashes)
populate(objectMap, "sourceName", t.SourceName)
populate(objectMap, "url", t.URL)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceFilteringCriteria.
func (t ThreatIntelligenceFilteringCriteria) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "ids", t.IDs)
populate(objectMap, "includeDisabled", t.IncludeDisabled)
populate(objectMap, "keywords", t.Keywords)
populate(objectMap, "maxConfidence", t.MaxConfidence)
populate(objectMap, "maxValidUntil", t.MaxValidUntil)
populate(objectMap, "minConfidence", t.MinConfidence)
populate(objectMap, "minValidUntil", t.MinValidUntil)
populate(objectMap, "pageSize", t.PageSize)
populate(objectMap, "patternTypes", t.PatternTypes)
populate(objectMap, "skipToken", t.SkipToken)
populate(objectMap, "sortBy", t.SortBy)
populate(objectMap, "sources", t.Sources)
populate(objectMap, "threatTypes", t.ThreatTypes)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceGranularMarkingModel.
func (t ThreatIntelligenceGranularMarkingModel) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "language", t.Language)
populate(objectMap, "markingRef", t.MarkingRef)
populate(objectMap, "selectors", t.Selectors)
return json.Marshal(objectMap)
}
// GetThreatIntelligenceInformation implements the ThreatIntelligenceInformationClassification interface for type ThreatIntelligenceIndicatorModel.
func (t *ThreatIntelligenceIndicatorModel) GetThreatIntelligenceInformation() *ThreatIntelligenceInformation {
return &ThreatIntelligenceInformation{
Kind: t.Kind,
Etag: t.Etag,
ID: t.ID,
Name: t.Name,
Type: t.Type,
SystemData: t.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceIndicatorModel.
func (t ThreatIntelligenceIndicatorModel) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", t.Etag)
populate(objectMap, "id", t.ID)
objectMap["kind"] = ThreatIntelligenceResourceKindEnumIndicator
populate(objectMap, "name", t.Name)
populate(objectMap, "properties", t.Properties)
populate(objectMap, "systemData", t.SystemData)
populate(objectMap, "type", t.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ThreatIntelligenceIndicatorModel.
func (t *ThreatIntelligenceIndicatorModel) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &t.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &t.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &t.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &t.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &t.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceIndicatorProperties.
func (t ThreatIntelligenceIndicatorProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", t.AdditionalData)
populate(objectMap, "confidence", t.Confidence)
populate(objectMap, "created", t.Created)
populate(objectMap, "createdByRef", t.CreatedByRef)
populate(objectMap, "defanged", t.Defanged)
populate(objectMap, "description", t.Description)
populate(objectMap, "displayName", t.DisplayName)
populate(objectMap, "extensions", t.Extensions)
populate(objectMap, "externalId", t.ExternalID)
populate(objectMap, "externalLastUpdatedTimeUtc", t.ExternalLastUpdatedTimeUTC)
populate(objectMap, "externalReferences", t.ExternalReferences)
populate(objectMap, "friendlyName", t.FriendlyName)
populate(objectMap, "granularMarkings", t.GranularMarkings)
populate(objectMap, "indicatorTypes", t.IndicatorTypes)
populate(objectMap, "killChainPhases", t.KillChainPhases)
populate(objectMap, "labels", t.Labels)
populate(objectMap, "language", t.Language)
populate(objectMap, "lastUpdatedTimeUtc", t.LastUpdatedTimeUTC)
populate(objectMap, "modified", t.Modified)
populate(objectMap, "objectMarkingRefs", t.ObjectMarkingRefs)
populate(objectMap, "parsedPattern", t.ParsedPattern)
populate(objectMap, "pattern", t.Pattern)
populate(objectMap, "patternType", t.PatternType)
populate(objectMap, "patternVersion", t.PatternVersion)
populate(objectMap, "revoked", t.Revoked)
populate(objectMap, "source", t.Source)
populate(objectMap, "threatIntelligenceTags", t.ThreatIntelligenceTags)
populate(objectMap, "threatTypes", t.ThreatTypes)
populate(objectMap, "validFrom", t.ValidFrom)
populate(objectMap, "validUntil", t.ValidUntil)
return json.Marshal(objectMap)
}
// GetThreatIntelligenceInformation implements the ThreatIntelligenceInformationClassification interface for type ThreatIntelligenceInformation.
func (t *ThreatIntelligenceInformation) GetThreatIntelligenceInformation() *ThreatIntelligenceInformation {
return t
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceInformationList.
func (t ThreatIntelligenceInformationList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", t.NextLink)
populate(objectMap, "value", t.Value)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type ThreatIntelligenceInformationList.
func (t *ThreatIntelligenceInformationList) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "nextLink":
err = unpopulate(val, &t.NextLink)
delete(rawMsg, key)
case "value":
t.Value, err = unmarshalThreatIntelligenceInformationClassificationArray(val)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceMetric.
func (t ThreatIntelligenceMetric) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "lastUpdatedTimeUtc", t.LastUpdatedTimeUTC)
populate(objectMap, "patternTypeMetrics", t.PatternTypeMetrics)
populate(objectMap, "sourceMetrics", t.SourceMetrics)
populate(objectMap, "threatTypeMetrics", t.ThreatTypeMetrics)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceMetricsList.
func (t ThreatIntelligenceMetricsList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "value", t.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type ThreatIntelligenceParsedPattern.
func (t ThreatIntelligenceParsedPattern) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "patternTypeKey", t.PatternTypeKey)
populate(objectMap, "patternTypeValues", t.PatternTypeValues)
return json.Marshal(objectMap)
}
// GetDataConnectorsCheckRequirements implements the DataConnectorsCheckRequirementsClassification interface for type TiTaxiiCheckRequirements.
func (t *TiTaxiiCheckRequirements) GetDataConnectorsCheckRequirements() *DataConnectorsCheckRequirements {
return &DataConnectorsCheckRequirements{
Kind: t.Kind,
}
}
// MarshalJSON implements the json.Marshaller interface for type TiTaxiiCheckRequirements.
func (t TiTaxiiCheckRequirements) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
objectMap["kind"] = DataConnectorKindThreatIntelligenceTaxii
populate(objectMap, "properties", t.Properties)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TiTaxiiCheckRequirements.
func (t *TiTaxiiCheckRequirements) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// GetDataConnector implements the DataConnectorClassification interface for type TiTaxiiDataConnector.
func (t *TiTaxiiDataConnector) GetDataConnector() *DataConnector {
return &DataConnector{
Kind: t.Kind,
Etag: t.Etag,
ID: t.ID,
Name: t.Name,
Type: t.Type,
SystemData: t.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type TiTaxiiDataConnector.
func (t TiTaxiiDataConnector) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", t.Etag)
populate(objectMap, "id", t.ID)
objectMap["kind"] = DataConnectorKindThreatIntelligenceTaxii
populate(objectMap, "name", t.Name)
populate(objectMap, "properties", t.Properties)
populate(objectMap, "systemData", t.SystemData)
populate(objectMap, "type", t.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TiTaxiiDataConnector.
func (t *TiTaxiiDataConnector) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &t.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &t.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &t.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &t.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &t.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &t.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &t.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type TiTaxiiDataConnectorProperties.
func (t TiTaxiiDataConnectorProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "collectionId", t.CollectionID)
populate(objectMap, "dataTypes", t.DataTypes)
populate(objectMap, "friendlyName", t.FriendlyName)
populate(objectMap, "password", <PASSWORD>)
populate(objectMap, "pollingFrequency", t.PollingFrequency)
populateTimeRFC3339(objectMap, "taxiiLookbackPeriod", t.TaxiiLookbackPeriod)
populate(objectMap, "taxiiServer", t.TaxiiServer)
populate(objectMap, "tenantId", t.TenantID)
populate(objectMap, "userName", t.UserName)
populate(objectMap, "workspaceId", t.WorkspaceID)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type TiTaxiiDataConnectorProperties.
func (t *TiTaxiiDataConnectorProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "collectionId":
err = unpopulate(val, &t.CollectionID)
delete(rawMsg, key)
case "dataTypes":
err = unpopulate(val, &t.DataTypes)
delete(rawMsg, key)
case "friendlyName":
err = unpopulate(val, &t.FriendlyName)
delete(rawMsg, key)
case "password":
err = unpopulate(val, &t.Password)
delete(rawMsg, key)
case "pollingFrequency":
err = unpopulate(val, &t.PollingFrequency)
delete(rawMsg, key)
case "taxiiLookbackPeriod":
err = unpopulateTimeRFC3339(val, &t.TaxiiLookbackPeriod)
delete(rawMsg, key)
case "taxiiServer":
err = unpopulate(val, &t.TaxiiServer)
delete(rawMsg, key)
case "tenantId":
err = unpopulate(val, &t.TenantID)
delete(rawMsg, key)
case "userName":
err = unpopulate(val, &t.UserName)
delete(rawMsg, key)
case "workspaceId":
err = unpopulate(val, &t.WorkspaceID)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type TimelineResultsMetadata.
func (t TimelineResultsMetadata) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "aggregations", t.Aggregations)
populate(objectMap, "errors", t.Errors)
populate(objectMap, "totalCount", t.TotalCount)
return json.Marshal(objectMap)
}
// GetEntity implements the EntityClassification interface for type URLEntity.
func (u *URLEntity) GetEntity() *Entity {
return &Entity{
Kind: u.Kind,
ID: u.ID,
Name: u.Name,
Type: u.Type,
SystemData: u.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type URLEntity.
func (u URLEntity) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "id", u.ID)
objectMap["kind"] = EntityKindURL
populate(objectMap, "name", u.Name)
populate(objectMap, "properties", u.Properties)
populate(objectMap, "systemData", u.SystemData)
populate(objectMap, "type", u.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type URLEntity.
func (u *URLEntity) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "id":
err = unpopulate(val, &u.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &u.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &u.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &u.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &u.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &u.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type URLEntityProperties.
func (u URLEntityProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "additionalData", u.AdditionalData)
populate(objectMap, "friendlyName", u.FriendlyName)
populate(objectMap, "url", u.URL)
return json.Marshal(objectMap)
}
// GetSettings implements the SettingsClassification interface for type Ueba.
func (u *Ueba) GetSettings() *Settings {
return &Settings{
Kind: u.Kind,
Etag: u.Etag,
ID: u.ID,
Name: u.Name,
Type: u.Type,
SystemData: u.SystemData,
}
}
// MarshalJSON implements the json.Marshaller interface for type Ueba.
func (u Ueba) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "etag", u.Etag)
populate(objectMap, "id", u.ID)
objectMap["kind"] = SettingKindUeba
populate(objectMap, "name", u.Name)
populate(objectMap, "properties", u.Properties)
populate(objectMap, "systemData", u.SystemData)
populate(objectMap, "type", u.Type)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type Ueba.
func (u *Ueba) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "etag":
err = unpopulate(val, &u.Etag)
delete(rawMsg, key)
case "id":
err = unpopulate(val, &u.ID)
delete(rawMsg, key)
case "kind":
err = unpopulate(val, &u.Kind)
delete(rawMsg, key)
case "name":
err = unpopulate(val, &u.Name)
delete(rawMsg, key)
case "properties":
err = unpopulate(val, &u.Properties)
delete(rawMsg, key)
case "systemData":
err = unpopulate(val, &u.SystemData)
delete(rawMsg, key)
case "type":
err = unpopulate(val, &u.Type)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type UebaProperties.
func (u UebaProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "dataSources", u.DataSources)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type WatchlistItemList.
func (w WatchlistItemList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", w.NextLink)
populate(objectMap, "value", w.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type WatchlistItemProperties.
func (w WatchlistItemProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populateTimeRFC3339(objectMap, "created", w.Created)
populate(objectMap, "createdBy", w.CreatedBy)
populate(objectMap, "entityMapping", w.EntityMapping)
populate(objectMap, "isDeleted", w.IsDeleted)
populate(objectMap, "itemsKeyValue", w.ItemsKeyValue)
populate(objectMap, "tenantId", w.TenantID)
populateTimeRFC3339(objectMap, "updated", w.Updated)
populate(objectMap, "updatedBy", w.UpdatedBy)
populate(objectMap, "watchlistItemId", w.WatchlistItemID)
populate(objectMap, "watchlistItemType", w.WatchlistItemType)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type WatchlistItemProperties.
func (w *WatchlistItemProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "created":
err = unpopulateTimeRFC3339(val, &w.Created)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &w.CreatedBy)
delete(rawMsg, key)
case "entityMapping":
err = unpopulate(val, &w.EntityMapping)
delete(rawMsg, key)
case "isDeleted":
err = unpopulate(val, &w.IsDeleted)
delete(rawMsg, key)
case "itemsKeyValue":
err = unpopulate(val, &w.ItemsKeyValue)
delete(rawMsg, key)
case "tenantId":
err = unpopulate(val, &w.TenantID)
delete(rawMsg, key)
case "updated":
err = unpopulateTimeRFC3339(val, &w.Updated)
delete(rawMsg, key)
case "updatedBy":
err = unpopulate(val, &w.UpdatedBy)
delete(rawMsg, key)
case "watchlistItemId":
err = unpopulate(val, &w.WatchlistItemID)
delete(rawMsg, key)
case "watchlistItemType":
err = unpopulate(val, &w.WatchlistItemType)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
// MarshalJSON implements the json.Marshaller interface for type WatchlistList.
func (w WatchlistList) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "nextLink", w.NextLink)
populate(objectMap, "value", w.Value)
return json.Marshal(objectMap)
}
// MarshalJSON implements the json.Marshaller interface for type WatchlistProperties.
func (w WatchlistProperties) MarshalJSON() ([]byte, error) {
objectMap := make(map[string]interface{})
populate(objectMap, "contentType", w.ContentType)
populateTimeRFC3339(objectMap, "created", w.Created)
populate(objectMap, "createdBy", w.CreatedBy)
populate(objectMap, "defaultDuration", w.DefaultDuration)
populate(objectMap, "description", w.Description)
populate(objectMap, "displayName", w.DisplayName)
populate(objectMap, "isDeleted", w.IsDeleted)
populate(objectMap, "itemsSearchKey", w.ItemsSearchKey)
populate(objectMap, "labels", w.Labels)
populate(objectMap, "numberOfLinesToSkip", w.NumberOfLinesToSkip)
populate(objectMap, "provider", w.Provider)
populate(objectMap, "rawContent", w.RawContent)
populate(objectMap, "source", w.Source)
populate(objectMap, "sourceType", w.SourceType)
populate(objectMap, "tenantId", w.TenantID)
populateTimeRFC3339(objectMap, "updated", w.Updated)
populate(objectMap, "updatedBy", w.UpdatedBy)
populate(objectMap, "uploadStatus", w.UploadStatus)
populate(objectMap, "watchlistAlias", w.WatchlistAlias)
populate(objectMap, "watchlistId", w.WatchlistID)
populate(objectMap, "watchlistType", w.WatchlistType)
return json.Marshal(objectMap)
}
// UnmarshalJSON implements the json.Unmarshaller interface for type WatchlistProperties.
func (w *WatchlistProperties) UnmarshalJSON(data []byte) error {
var rawMsg map[string]json.RawMessage
if err := json.Unmarshal(data, &rawMsg); err != nil {
return err
}
for key, val := range rawMsg {
var err error
switch key {
case "contentType":
err = unpopulate(val, &w.ContentType)
delete(rawMsg, key)
case "created":
err = unpopulateTimeRFC3339(val, &w.Created)
delete(rawMsg, key)
case "createdBy":
err = unpopulate(val, &w.CreatedBy)
delete(rawMsg, key)
case "defaultDuration":
err = unpopulate(val, &w.DefaultDuration)
delete(rawMsg, key)
case "description":
err = unpopulate(val, &w.Description)
delete(rawMsg, key)
case "displayName":
err = unpopulate(val, &w.DisplayName)
delete(rawMsg, key)
case "isDeleted":
err = unpopulate(val, &w.IsDeleted)
delete(rawMsg, key)
case "itemsSearchKey":
err = unpopulate(val, &w.ItemsSearchKey)
delete(rawMsg, key)
case "labels":
err = unpopulate(val, &w.Labels)
delete(rawMsg, key)
case "numberOfLinesToSkip":
err = unpopulate(val, &w.NumberOfLinesToSkip)
delete(rawMsg, key)
case "provider":
err = unpopulate(val, &w.Provider)
delete(rawMsg, key)
case "rawContent":
err = unpopulate(val, &w.RawContent)
delete(rawMsg, key)
case "source":
err = unpopulate(val, &w.Source)
delete(rawMsg, key)
case "sourceType":
err = unpopulate(val, &w.SourceType)
delete(rawMsg, key)
case "tenantId":
err = unpopulate(val, &w.TenantID)
delete(rawMsg, key)
case "updated":
err = unpopulateTimeRFC3339(val, &w.Updated)
delete(rawMsg, key)
case "updatedBy":
err = unpopulate(val, &w.UpdatedBy)
delete(rawMsg, key)
case "uploadStatus":
err = unpopulate(val, &w.UploadStatus)
delete(rawMsg, key)
case "watchlistAlias":
err = unpopulate(val, &w.WatchlistAlias)
delete(rawMsg, key)
case "watchlistId":
err = unpopulate(val, &w.WatchlistID)
delete(rawMsg, key)
case "watchlistType":
err = unpopulate(val, &w.WatchlistType)
delete(rawMsg, key)
}
if err != nil {
return err
}
}
return nil
}
func populate(m map[string]interface{}, k string, v interface{}) {
if v == nil {
return
} else if azcore.IsNullValue(v) {
m[k] = nil
} else if !reflect.ValueOf(v).IsNil() {
m[k] = v
}
}
func unpopulate(data json.RawMessage, v interface{}) error {
if data == nil {
return nil
}
return json.Unmarshal(data, v)
}
|
webdevhub42/Lambda | WEEKS/CD_Sata-Structures/_RESOURCES/python-prac/Intro_to_Python/assignment_answers/A17_deployment.py | """ Assignment 17
For this assignment you should design, build and deploy a Python3 package
and answer the following:
1. What is the fundamental difference between a python package and a module?
2. What changes do we need to make in the `setup.py` script in relation to
developing a package vs a module?
"""
# Answer
"""
1. A module is a single python file where a package is a directory of one or
more python files. Packages are marked by the existence of the
`__init__.py` file. Modules do not require this file.
2. The setup file for a module uses the py_modules=['my_module'] keyword argument,
where a package requires this to be changed to the packages=['my_package'] keyword argument.
"""
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.