repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
psmware-ltd/nautobot | nautobot/dcim/api/urls.py | <filename>nautobot/dcim/api/urls.py<gh_stars>100-1000
from nautobot.core.api import OrderedDefaultRouter
from . import views
router = OrderedDefaultRouter()
router.APIRootView = views.DCIMRootView
# Sites
router.register("regions", views.RegionViewSet)
router.register("sites", views.SiteViewSet)
# Racks
router.register("rack-groups", views.RackGroupViewSet)
router.register("rack-roles", views.RackRoleViewSet)
router.register("racks", views.RackViewSet)
router.register("rack-reservations", views.RackReservationViewSet)
# Device types
router.register("manufacturers", views.ManufacturerViewSet)
router.register("device-types", views.DeviceTypeViewSet)
# Device type components
router.register("console-port-templates", views.ConsolePortTemplateViewSet)
router.register("console-server-port-templates", views.ConsoleServerPortTemplateViewSet)
router.register("power-port-templates", views.PowerPortTemplateViewSet)
router.register("power-outlet-templates", views.PowerOutletTemplateViewSet)
router.register("interface-templates", views.InterfaceTemplateViewSet)
router.register("front-port-templates", views.FrontPortTemplateViewSet)
router.register("rear-port-templates", views.RearPortTemplateViewSet)
router.register("device-bay-templates", views.DeviceBayTemplateViewSet)
# Devices
router.register("device-roles", views.DeviceRoleViewSet)
router.register("platforms", views.PlatformViewSet)
router.register("devices", views.DeviceViewSet)
# Device components
router.register("console-ports", views.ConsolePortViewSet)
router.register("console-server-ports", views.ConsoleServerPortViewSet)
router.register("power-ports", views.PowerPortViewSet)
router.register("power-outlets", views.PowerOutletViewSet)
router.register("interfaces", views.InterfaceViewSet)
router.register("front-ports", views.FrontPortViewSet)
router.register("rear-ports", views.RearPortViewSet)
router.register("device-bays", views.DeviceBayViewSet)
router.register("inventory-items", views.InventoryItemViewSet)
# Connections
router.register("console-connections", views.ConsoleConnectionViewSet, basename="consoleconnections")
router.register("power-connections", views.PowerConnectionViewSet, basename="powerconnections")
router.register(
"interface-connections",
views.InterfaceConnectionViewSet,
basename="interfaceconnections",
)
# Cables
router.register("cables", views.CableViewSet)
# Virtual chassis
router.register("virtual-chassis", views.VirtualChassisViewSet)
# Power
router.register("power-panels", views.PowerPanelViewSet)
router.register("power-feeds", views.PowerFeedViewSet)
# Miscellaneous
router.register("connected-device", views.ConnectedDeviceViewSet, basename="connected-device")
app_name = "dcim-api"
urlpatterns = router.urls
|
CallumNZ/fits | internal/valid/valid.go | <filename>internal/valid/valid.go
package valid
import (
"errors"
"fmt"
"net/http"
"net/url"
"regexp"
"strconv"
"strings"
"time"
)
//srsName e.g., EPSG:4326
//within e.g., POLYGON((177.18+-37.52,177.19+-37.52,177.20+-37.53,177.18+-37.52))
var (
textRE, textErr = regexp.Compile(`^[0-9a-zA-Z\-\,\.]+$`)
srsRE, srsErr = regexp.Compile(`^EPSG:[0-9]+$`)
withinRE, withinErr = regexp.Compile(`^POLYGON\(\([0-9\-\, \.\+]+\)\)$`)
bboxRE, bboxErr = regexp.Compile(`^[0-9\-\, \.\+]+$`)
)
type validator func(string) error
// implements weft.Error
type Error struct {
Code int
Err error
}
func (s Error) Error() string {
if s.Err == nil {
return "<nil>"
}
return s.Err.Error()
}
func (s Error) Status() int {
return s.Code
}
var valid = map[string]validator{
"days": days,
"start": start,
"siteID": text,
"networkID": text, // networkID has been dropped from the API but is still allowed in the query for backward compatibility.
"typeID": text,
"methodID": text,
"sites": text,
"srsName": srsName,
"within": within,
"width": width,
"type": validType,
"stddev": stddev,
"showMethod": showMethod,
"scheme": scheme,
"label": label,
"yrange": yRange,
"bbox": bbox,
"insetBbox": bbox,
}
//bbox
//days
//insetBbox
//label
//methodID
//networkID
//scheme
//showMethod
//siteID
//sites
//start
//stddev
//srsName
//typeID
//width
//within
//yrange
//type
//
// Implements weft.QueryValidator
func Query(values url.Values) error {
for k, v := range values {
if len(v) != 1 {
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("expected 1 value for %s got %d", k, len(v))}
}
f, ok := valid[k]
if !ok {
return Error{Code: http.StatusInternalServerError, Err: fmt.Errorf("no validator for %s", k)}
}
err := f(v[0])
if err != nil {
return err
}
}
return nil
}
// Parameter validates the same parameters as Query without the need to create url.Values.
func Parameter(key, value string) error {
f, ok := valid[key]
if !ok {
return Error{Code: http.StatusInternalServerError, Err: fmt.Errorf("no validator for %s", key)}
}
return f(value)
}
func bbox(s string) error {
if bboxErr != nil {
return bboxErr
}
switch s {
case "LakeTaupo":
return nil
case "WhiteIsland":
return nil
case "RaoulIsland":
return nil
case "ChathamIsland":
return nil
case "NewZealand":
return nil
case "NewZealandChathamIsland":
return nil
case "NewZealandRegion":
return nil
}
if bboxRE.MatchString(s) {
return nil
}
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid bbox: %s", s)}
}
func label(s string) error {
switch s {
case `none`, `latest`, `all`:
return nil
default:
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid label: %s", s)}
}
}
func scheme(s string) error {
switch s {
case `web`, `projector`:
return nil
default:
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid scheme: %s", s)}
}
}
func within(s string) error {
if withinErr != nil {
return withinErr
}
if withinRE.MatchString(s) {
return nil
}
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid string: %s", s)}
}
func ParseWidth(s string) (int, error) {
if s == "" {
return 0, nil
}
w, err := strconv.Atoi(s)
if err != nil {
return 0, Error{Code: http.StatusBadRequest, Err: err}
}
return w, nil
}
func width(s string) error {
_, err := ParseWidth(s)
return err
}
func text(s string) error {
if textErr != nil {
return textErr
}
if textRE.MatchString(s) {
return nil
}
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid string: %s", s)}
}
func ParseDays(s string) (int, error) {
if s == "" {
return 0, nil
}
d, err := strconv.Atoi(s)
if err != nil {
return 0, Error{Code: http.StatusBadRequest, Err: err}
}
if d > 365000 {
return 0, Error{Code: http.StatusBadRequest, Err: errors.New("invalid days query param")}
}
return d, nil
}
func days(s string) error {
_, err := ParseDays(s)
return err
}
func ParseStart(s string) (time.Time, error) {
if s == "" {
return time.Time{}, nil
}
d, err := time.Parse(time.RFC3339, s)
if err != nil {
return time.Time{}, Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid date: %s", s)}
}
return d, nil
}
func start(s string) error {
_, err := ParseStart(s)
return err
}
func srsName(s string) error {
if srsErr != nil {
return srsErr
}
if srsRE.MatchString(s) {
return nil
}
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid srsName: %s", s)}
}
func validType(s string) error {
switch s {
case `line`, `scatter`:
return nil
default:
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid type: %s", s)}
}
}
func stddev(s string) error {
switch s {
case `pop`:
return nil
default:
return Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid stddev: %s", s)}
}
}
func ParseShowMethod(s string) (bool, error) {
switch s {
case ``:
return false, nil
case `true`:
return true, nil
case `false`:
return false, nil
default:
return false, Error{Code: http.StatusBadRequest, Err: fmt.Errorf("invalid showMethod value: %s", s)}
}
}
func showMethod(s string) error {
_, err := ParseShowMethod(s)
return err
}
func yRange(s string) error {
_, _, err := ParseYrange(s)
return err
}
func ParseYrange(s string) (float64, float64, error) {
var ymin, ymax float64
var err error
switch {
case s == "":
return 0.0, 0.0, nil
case strings.Contains(s, `,`):
y := strings.Split(s, `,`)
if len(y) != 2 {
return 0.0, 0.0, Error{Code: http.StatusBadRequest, Err: errors.New("invalid yrange query param")}
}
ymin, err = strconv.ParseFloat(y[0], 64)
if err != nil {
return 0.0, 0.0, Error{Code: http.StatusBadRequest, Err: errors.New("invalid yrange query param")}
}
ymax, err = strconv.ParseFloat(y[1], 64)
if err != nil {
return 0.0, 0.0, Error{Code: http.StatusBadRequest, Err: errors.New("invalid yrange query param")}
}
default:
ymin, err = strconv.ParseFloat(s, 64)
if err != nil || ymin <= 0 {
return 0.0, 0.0, Error{Code: http.StatusBadRequest, Err: errors.New("invalid yrange query param")}
}
ymax = ymin
}
return ymin, ymax, nil
}
|
RFSH/chaise | record/record.utils.js | <filename>record/record.utils.js<gh_stars>0
(function() {
'use strict';
angular.module('chaise.record')
.factory('constants', [function(){
return {
defaultPageSize: 25,
MAX_CONCURENT_REQUEST: 6
};
}])
.factory('recordAppUtils',
['constants', 'DataUtils', 'Errors', 'ErrorService', '$log', 'logService', 'messageMap', 'modalBox', '$q', 'recordsetDisplayModes', 'recordTableUtils', '$rootScope', 'UriUtils',
function (constants, DataUtils, Errors, ErrorService, $log, logService, messageMap, modalBox, $q, recordsetDisplayModes, recordTableUtils, $rootScope, UriUtils) {
/**
* returns true if we have free slots for requests.
* @private
*/
function _haveFreeSlot() {
var res = $rootScope.recordFlowControl.occupiedSlots < $rootScope.recordFlowControl.maxRequests;
if (!res) {
$log.debug("No free slot available.");
}
return res;
}
/**
* @private
* Flow-control logic for record app.
* This will go through different sections of the page and will update them
* if it's needed to.
* @param {Boolean} isUpdate indicates that the function has been triggered for update and not load.
*/
function _processRequests(isUpdate) {
if (!_haveFreeSlot() || $rootScope.pauseRequests) return;
isUpdate = (typeof isUpdate === "boolean") ? isUpdate : false;
if ($rootScope.isMainDirty) {
readMainEntity(isUpdate).then(function (tuple) {
$rootScope.isMainDirty = false;
_processRequests(isUpdate);
}).catch(genericErrorCatch);
return;
}
var i = 0, model, reqModel, activeListModel;
// requests (inline, aggrgates, entityset, related)
// please refer to Reference.activeList documentation for the order of requests
for (i = 0; i < $rootScope.requestModels.length; i++) {
if (!_haveFreeSlot()) return;
reqModel = $rootScope.requestModels[i];
activeListModel = reqModel.activeListModel;
if (reqModel.processed) continue;
reqModel.processed = true;
// inline
if (activeListModel.inline) {
model = $rootScope.columnModels[activeListModel.index];
if (model.tableModel.dirtyResult) {
// will take care of adding to occpuied slots
recordTableUtils.updateMainEntity(model.tableModel, _processRequests, !isUpdate, true, _afterUpdateRelatedEntity(model));
}
continue;
}
// related
if (activeListModel.related) {
model = $rootScope.relatedTableModels[activeListModel.index];
if (model.tableModel.dirtyResult) {
// will take care of adding to occpuied slots
recordTableUtils.updateMainEntity(model.tableModel, _processRequests, !isUpdate, true, _afterUpdateRelatedEntity(model));
}
continue;
}
// entityset or aggregate
_updatePseudoColumn(reqModel, isUpdate, $rootScope.recordFlowControl.counter);
}
// aggregates in inline
for (i = 0; i < $rootScope.columnModels.length && $rootScope.hasInline; i++) {
model = $rootScope.columnModels[i];
if (!model.isInline || model.tableModel.dirtyResult) continue;
if (!_haveFreeSlot()) return;
recordTableUtils.updateColumnAggregates(model.tableModel, _processRequests, !isUpdate);
}
// aggregates in related
for (i = 0; i < $rootScope.relatedTableModels.length; i++) {
model = $rootScope.relatedTableModels[i];
if (model.tableModel.dirtyResult) continue;
if (!_haveFreeSlot()) return;
recordTableUtils.updateColumnAggregates(model.tableModel, _processRequests, !isUpdate);
}
}
/**
* When the data for inline or related entities are loaded,
* - if there's no wait for, or waitfor is loaded: sets the tableMarkdownContent value.
* - otherwise it will not do anyting.
*/
function _afterUpdateRelatedEntity(model) {
return function (tableModel, res) {
model.processed = !res;
/*
* the returned `res` boolean indicates whether we should consider this response final or not.
* it doesn't necessarily mean that the response was successful, so we should not use the page blindly.
* If the request errored out (timeout or other types of error) tableModel.page will be undefined.
*/
if (res && tableModel.page && (!model.hasWaitFor || model.waitForDataLoaded)) {
model.tableMarkdownContentInitialized = true;
model.tableMarkdownContent = tableModel.page.getContent($rootScope.templateVariables);
}
};
}
/**
* Read data for the main entity
* @param {boolean} isUpdate whether this is update request or load
* @param {Object} the extra information that we want to log with the main request
* @returns {Promise} It will be resolved with Page object.
*/
function readMainEntity(isUpdate, logObj) {
var defer = $q.defer();
// clear the value of citation, so we can fetch it again.
if (DataUtils.isObjectAndNotNull($rootScope.reference.citation)) {
$rootScope.citationReady = false;
} else {
$rootScope.citationReady = true;
$rootScope.citation = null;
}
logObj = logObj || {};
var action = isUpdate ? logService.logActions.RELOAD : logService.logActions.LOAD;
logObj.action = logService.getActionString(action);
logObj.stack = logService.getStackObject();
var causes = (Array.isArray($rootScope.reloadCauses) && $rootScope.reloadCauses.length > 0) ? $rootScope.reloadCauses : [];
if (causes.length > 0) {
logObj.stack = logService.addCausesToStack(logObj.stack, causes, $rootScope.reloadStartTime);
}
$rootScope.reference.read(1, logObj).then(function (page) {
$log.info("Page: ", page);
var recordSetLink;
var tableDisplayName = page.reference.displayname.value;
if (page.tuples.length < 1) {
// recordSetLink should be used to present user with an option in case of no data found
recordSetLink = page.reference.unfilteredReference.contextualize.compact.appLink;
throw new Errors.noRecordError({}, tableDisplayName, recordSetLink);
}
else if(page.hasNext || page.hasPrevious){
recordSetLink = page.reference.contextualize.compact.appLink;
throw new Errors.multipleRecordError(tableDisplayName, recordSetLink);
}
$rootScope.page = page;
var tuple = $rootScope.tuple = page.tuples[0];
// Used directly in the record-display directive
$rootScope.recordDisplayname = tuple.displayname;
// Collate tuple.isHTML and tuple.values into an array of objects
// i.e. {isHTML: false, value: 'sample'}
$rootScope.recordValues = [];
tuple.values.forEach(function(value, index) {
$rootScope.recordValues.push({
isHTML: tuple.isHTML[index],
value: value
});
});
// the initial values for the templateVariables
$rootScope.templateVariables = tuple.templateVariables.values;
// the aggregate values
$rootScope.aggregateResults = {};
// indicator that the entityset values are fetched
$rootScope.entitySetResults = {};
//whether citation is waiting for other data or we can show it on load
var citation = $rootScope.reference.citation;
if (DataUtils.isObjectAndNotNull(citation)) {
$rootScope.citationReady = !citation.hasWaitFor;
if ($rootScope.citationReady) {
$rootScope.citation = citation.compute(tuple, $rootScope.templateVariables);
}
} else {
$rootScope.citationReady = true;
$rootScope.citation = null;
}
$rootScope.displayReady = true;
$rootScope.reloadCauses = [];
$rootScope.reloadStartTime = -1;
defer.resolve(page);
}).catch(function (err) {
defer.reject(err);
});
return defer.promise;
}
/**
* @private
* _processRequests calls this to fetch the value of pseudo-columns (aggregate and entityset)
*/
function _updatePseudoColumn(reqModel, isUpdate, counter) {
_readPseudoColumn(reqModel, isUpdate, $rootScope.recordFlowControl.counter).then(function (res) {
$rootScope.recordFlowControl.occupiedSlots--;
reqModel.processed = res;
_processRequests(isUpdate);
}).catch(function (err) {
throw err;
});
}
/**
* @private
* Generate request for each individual aggregate columns.
* Returns a promise. The resolved value denotes the success or failure.
*/
function _readPseudoColumn(recordModel, isUpdate, current) {
var defer = $q.defer();
var activeListModel = recordModel.activeListModel;
// show spinner for all the dependent columns
activeListModel.objects.forEach(function (obj) {
if (obj.column || obj.inline) {
$rootScope.columnModels[obj.index].isLoading = true;
} else if (obj.related) {
$rootScope.relatedTableModels[obj.index].isLoading = true;
}
});
var action = isUpdate ? logService.logActions.RELOAD : logService.logActions.LOAD;
var stack = recordModel.logStack;
if (Array.isArray(recordModel.reloadCauses) && recordModel.reloadCauses.length > 0) {
stack = logService.addCausesToStack(stack, recordModel.reloadCauses, recordModel.reloadStartTime);
}
var logObj = {
action: logService.getActionString(action, recordModel.logStackPath),
stack: recordModel.logStack
};
var cb;
if (activeListModel.entityset) {
cb = recordModel.reference.read(getPageSize(recordModel.reference), logObj);
} else {
cb = activeListModel.column.getAggregatedValue($rootScope.page, logObj);
}
cb.then(function (values) {
if ($rootScope.recordFlowControl.counter !== current) {
return defer.resolve(false), defer.promise;
}
// remove the column error (they might retry)
activeListModel.objects.forEach(function (obj) {
if (obj.column) {
$rootScope.columnModels[obj.index].columnError = false;
}
})
//update the templateVariables
var sourceDefinitions = $rootScope.reference.table.sourceDefinitions;
var sm = sourceDefinitions.sourceMapping[activeListModel.column.name];
if (activeListModel.entityset) { // entitysets
// this check is unnecessary, otherwise ermrestjs wouldn't add them to the active list
// but for consistency I left this check here
// entitysets are fetched to be used in waitfor, so we don't need to do anything else with
// the returned object apart from updating the templateVariables
if (activeListModel.objects.length > 0 && Array.isArray(sm)) {
sm.forEach(function (k) {
// the returned values is a page object in this case
$rootScope.templateVariables[k] = values.templateVariables;
});
}
// update the entitySetResults (we're just using this to make sure it's done)
$rootScope.entitySetResults[activeListModel.column.name] = true;
} else { // aggregates
// use the returned value (assumption is that values is an array of 0)
var val = values[0];
if (activeListModel.objects.length > 0 && Array.isArray(sm)) {
sm.forEach(function (k) {
if (val.templateVariables["$self"]) {
$rootScope.templateVariables[k] = val.templateVariables["$self"];
}
if (val.templateVariables["$_self"]) {
$rootScope.templateVariables["_" + k] = val.templateVariables["$_self"];
}
});
}
//update the aggregateResults
$rootScope.aggregateResults[activeListModel.column.name] = val;
}
// attach the value if all has been returned
_attachPseudoColumnValue(activeListModel);
// clear the causes
recordModel.reloadCauses = [];
recordModel.reloadStartTime = -1;
return defer.resolve(true), defer.promise;
}).catch(function (err) {
if ($rootScope.recordFlowControl.counter !== current) {
return defer.resolve(false), defer.promise;
}
activeListModel.objects.forEach(function (obj) {
//remove the spinner from the dependent columns
if (obj.column || obj.inline) {
$rootScope.columnModels[obj.index].isLoading = false;
} else if (obj.related) {
$rootScope.relatedTableModels[obj.index].isLoading = false;
}
if (!obj.column) return;
// show the timeout error in dependent models
if (err instanceof ERMrest.QueryTimeoutError) {
// TODO what about inline and related ones that timed out?
$rootScope.columnModels[obj.index].columnError = true;
return defer.resolve(true), defer.promise;
}
});
defer.reject(err);
});
return defer.promise;
}
/**
* @private
* This function is called inside `_readPseudoColumn`, after
* the value is attached to the appropriate objects.
* The purpose of this function is to show value of a model,
* if all its dependencies are available.
* @param {Object} activeListModel - the model that ermrestjs returns
*/
function _attachPseudoColumnValue(activeListModel) {
activeListModel.objects.forEach(function (obj) {
var hasAll;
if (obj.citation) {
// we don't need to validate the .citation here because obj.citation means that the citation is available and not null
hasAll = $rootScope.reference.citation.waitFor.every(function (c) {
return c.isUnique || c.name in $rootScope.aggregateResults || c.name in $rootScope.entitySetResults;
});
// if all the waitfor values are fetched, we can change the citation value
if (hasAll) {
$rootScope.citationReady = true;
$rootScope.citation = $rootScope.reference.citation.compute($rootScope.tuple, $rootScope.templateVariables);
}
return;
} else if (obj.column) {
var cmodel = $rootScope.columnModels[obj.index];
var hasAll = cmodel.column.waitFor.every(function (col) {
return col.isUnique || col.name in $rootScope.aggregateResults || col.name in $rootScope.entitySetResults;
});
// we need the second check because ermrestjs is not adding the current column,
// NOTE I might be able to improve ermrestjs for this purpose
if (!(hasAll && (cmodel.column.name in $rootScope.aggregateResults || cmodel.column.name in $rootScope.entitySetResults || cmodel.column.isUnique))) return;
var displayValue = cmodel.column.sourceFormatPresentation(
$rootScope.templateVariables,
$rootScope.aggregateResults[cmodel.column.name],
$rootScope.tuple
);
cmodel.isLoading = false;
$rootScope.recordValues[obj.index] = displayValue;
} else if (obj.inline || obj.related) {
var model = obj.inline ? $rootScope.columnModels[obj.index] : $rootScope.relatedTableModels[obj.index];
var ref = model.tableModel.reference;
var hasAll = ref.display.sourceWaitFor.every(function (col) {
return col.isUnique || col.name in $rootScope.aggregateResults || col.name in $rootScope.entitySetResults;
});
if (!hasAll) return;
model.isLoading = false;
model.waitForDataLoaded = true;
// if the page data is already fetched, we can just popuplate the tableMarkdownContent value.
// otherwise we should just wait for the related/inline table data to get back to popuplate the tableMarkdownContent
if (model.tableModel.page && !model.tableModel.dirtyResult) {
model.tableMarkdownContent = model.tableModel.page.getContent($rootScope.templateVariables);
model.tableMarkdownContentInitialized = true;
}
}
});
}
/**
* Given an object and cause string, will add it to the list of reloadCauses of the object.
* It will also take care of adding reloadStartTime if it's necessary.
* reloadStartTime captures the time that the model becomes dirty.
*/
function _addCauseToModel(obj, cause) {
// the time that will be logged with the request
if (!Number.isInteger(obj.reloadStartTime) || obj.reloadStartTime === -1) {
obj.reloadStartTime = ERMrest.getElapsedTime();
}
if (cause && obj.reloadCauses.indexOf(cause) === -1) {
obj.reloadCauses.push(cause);
}
}
/**
* sets the flag and calls the flow-control function to update the record page.
* @param {Boolean} isUpdate indicates that the function has been triggered for update and not load.
* @param {String} cause the cause of this update (if it's update and not load)
* @param {Array} changedContainers If this function is called because of multiple
* changes on the page, then we cannot use a single "cause" and instead
* this attribute will return the different parts of the page that have caused this.
* Each array is an object with `cause`, `index`, and `isInline` attributes.
*/
function updateRecordPage(isUpdate, cause, changedContainers) {
if (!isUpdate) {
$rootScope.recordFlowControl.occupiedSlots = 0;
$rootScope.recordFlowControl.counter = 0;
} else {
// we want to update the main entity on update
$rootScope.isMainDirty = true;
_addCauseToModel($rootScope, cause);
}
$rootScope.recordFlowControl.counter++;
$rootScope.columnModels.forEach(function (m) {
if (m.isInline) {
m.tableModel.dirtyResult = true;
if (m.hasWaitFor) {
m.isLoading = true;
m.waitForDataLoaded = false;
}
_addCauseToModel(m.tableModel, cause);
} else if (m.hasWaitForOrNotUnique) {
m.isLoading = true;
}
})
$rootScope.relatedTableModels.forEach(function (m) {
m.tableModel.dirtyResult = true;
if (m.hasWaitFor) {
m.isLoading = true;
m.waitForDataLoaded = false;
}
_addCauseToModel(m.tableModel, cause);
});
$rootScope.requestModels.forEach(function (m) {
m.processed = false;
// the cause for related and inline are handled by columnModels and relatedTableModels
if (m.activeListModel.entityset || m.activeListModel.aggregate) {
_addCauseToModel(m, cause);
}
});
// update the cause list
var uc = logService.reloadCauses;
var selfCause = {};
selfCause[uc.RELATED_CREATE] = selfCause[uc.RELATED_INLINE_CREATE] = uc.ENTITY_CREATE;
selfCause[uc.RELATED_DELETE] = selfCause[uc.RELATED_INLINE_DELETE] = uc.ENTITY_DELETE;
selfCause[uc.RELATED_UPDATE] = selfCause[uc.RELATED_INLINE_UPDATE] = uc.ENTITY_UPDATE;
if (Array.isArray(changedContainers)) {
changedContainers.forEach(function (container) {
var c;
// add it to main causes
_addCauseToModel($rootScope, container.cause);
// add it to inline related
$rootScope.columnModels.forEach(function (m, index) {
if (!m.isInline) return;
c = container.cause;
if (container.isInline && container.index === index) {
c = selfCause[c];
}
_addCauseToModel(m.tableModel, c);
});
// add it to related
$rootScope.relatedTableModels.forEach(function (m, index) {
var c = container.cause;
if (!container.isInline && container.index === index) {
c = selfCause[c];
}
_addCauseToModel(m.tableModel, c);
});
// add it to request models for aggregate and entity set
// the cause for related and inline are handled by columnModels and relatedTableModels
$rootScope.requestModels.forEach(function (m) {
if (m.activeListModel.entityset || m.activeListModel.aggregate) {
_addCauseToModel(m, container.cause);
}
});
});
}
$rootScope.pauseRequests = false;
_processRequests(isUpdate);
}
/**
* will pause the requests that are pending for updating the page.
* Currently it's only setting a variable, but we might want to add
* more logic later.
*/
function pauseUpdateRecordPage() {
$rootScope.pauseRequests = true;
}
/**
* Resume the requests after pausing
*/
function resumeUpdateRecordPage() {
if (!$rootScope.pauseRequests) return;
$rootScope.pauseRequests = false;
_processRequests(true);
}
/**
* The genetic error catch for record app.
* @param {object} exception error object
*/
function genericErrorCatch(exception) {
// show modal with different text if 400 Query Timeout Error
if (exception instanceof ERMrest.QueryTimeoutError) {
exception.subMessage = exception.message;
exception.message = "The main entity cannot be retrieved. Refresh the page later to try again.";
ErrorService.handleException(exception, true);
} else {
if (DataUtils.isObjectAndKeyDefined(exception.errorData, 'redirectPath')) {
var redirectLink = UriUtils.createRedirectLinkFromPath(exception.errorData.redirectPath);
exception.errorData.redirectUrl = redirectLink.replace('record', 'recordset');
}
throw exception;
}
}
/**
* Given reference of related or inline, will create appropriate table model.
* @param {ERMrest.Reference} reference Reference object.
* @param {string} context the context string
* @param {boolean} isInline whether the table is inline or not
*/
function getTableModel (reference, index, isInline) {
var stackNode = logService.getStackNode(
logService.logStackTypes.RELATED,
reference.table,
{source: reference.compressedDataSource, entity: true}
);
var currentStackPath = isInline ? logService.logStackPaths.RELATED_INLINE : logService.logStackPaths.RELATED;
var logStackPath = logService.getStackPath("", currentStackPath);
return {
parentReference: $rootScope.reference,
parentTuple: $rootScope.tuple,
reference: reference,
pageLimit: getPageSize(reference),
isTableDisplay: reference.display.type == 'table',
enableSort: true,
rowValues: [],
selectedRows: [],//TODO migth not be needed
dirtyResult: true,
isLoading: true,
tableError: false,
config: {
viewable: true,
editable: $rootScope.modifyRecord,
deletable: $rootScope.modifyRecord && $rootScope.showDeleteButton,
selectMode: modalBox.noSelect,
displayMode: (isInline ? recordsetDisplayModes.inline : recordsetDisplayModes.related),
containerIndex: index // TODO (could be optimized) can this be done in a better way?
},
logStack: logService.getStackObject(stackNode),
logStackPath: logStackPath,
reloadCauses: [], // might not be needed
reloadStartTime: -1,
flowControlObject: $rootScope.recordFlowControl,
queryTimeoutTooltip: messageMap.queryTimeoutTooltip
};
}
/**
* @private
* returns page size of the display attribute in the object.
* @param {ERMrest.Reference} reference Object reference that has the display attribute
*/
function getPageSize(reference){
return ((!angular.isUndefined(reference) && reference.display.defaultPageSize) ? reference.display.defaultPageSize:constants.defaultPageSize);
}
function FlowControlObject (maxRequests) {
maxRequests = maxRequests || constants.MAX_CONCURENT_REQUEST;
recordTableUtils.FlowControlObject.call(this, maxRequests);
}
return {
updateRecordPage: updateRecordPage,
genericErrorCatch: genericErrorCatch,
readMainEntity: readMainEntity,
getTableModel: getTableModel,
FlowControlObject: FlowControlObject,
pauseUpdateRecordPage: pauseUpdateRecordPage,
resumeUpdateRecordPage: resumeUpdateRecordPage
};
}]);
})();
|
yjhexy/simple-report | src/main/java/com/orange/commons/cache/HessianJedisClusterMemcachedTemplate.java | <gh_stars>1-10
package com.orange.commons.cache;
import com.orange.commons.utils.Hessian2SerializerUtils;
import redis.clients.jedis.JedisCluster;
/**
* 使用 hessian2 协议序列化的 jedis 工具类
*
* @author 小天
* @date 2019/2/13 9:31
*/
public class HessianJedisClusterMemcachedTemplate extends JedisClusterMemcachedTemplate {
public HessianJedisClusterMemcachedTemplate(JedisCluster jedisCluster) {
super(jedisCluster);
}
@Override
protected Object deserialize(byte[] valueBytes) {
if (valueBytes == null) {
return null;
}
return Hessian2SerializerUtils.deserialize(valueBytes);
}
@Override
protected byte[] serialize(Object value) {
return Hessian2SerializerUtils.serialize(value);
}
}
|
CruGlobal/common_engine | app/models/ccc/mpd_letter_image.rb | <reponame>CruGlobal/common_engine
class Ccc::MpdLetterImage < ActiveRecord::Base
end
|
acidburn0zzz/llvm-project | clang/test/CodeGen/RISCV/rvv-intrinsics/vredor.c | // NOTE: Assertions have been autogenerated by utils/update_cc_test_checks.py
// REQUIRES: riscv-registered-target
// RUN: %clang_cc1 -triple riscv64 -target-feature +experimental-v -disable-O0-optnone -emit-llvm %s -o - | opt -S -mem2reg | FileCheck --check-prefix=CHECK-RV64 %s
#include <riscv_vector.h>
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8mf8_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv1i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 1 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8mf8_i8m1(vint8m1_t dst, vint8mf8_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8mf8_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8mf4_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv2i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 2 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8mf4_i8m1(vint8m1_t dst, vint8mf4_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8mf4_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8mf2_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv4i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 4 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8mf2_i8m1(vint8m1_t dst, vint8mf2_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8mf2_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m1_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 8 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m1_i8m1(vint8m1_t dst, vint8m1_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8m1_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m2_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv16i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 16 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m2_i8m1(vint8m1_t dst, vint8m2_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8m2_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m4_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv32i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 32 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m4_i8m1(vint8m1_t dst, vint8m4_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8m4_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m8_i8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv64i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 64 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m8_i8m1(vint8m1_t dst, vint8m8_t vector,
vint8m1_t scalar, size_t vl) {
return vredor_vs_i8m8_i8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16mf4_i16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv1i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 1 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16mf4_i16m1(vint16m1_t dst, vint16mf4_t vector,
vint16m1_t scalar, size_t vl) {
return vredor_vs_i16mf4_i16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16mf2_i16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv2i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 2 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16mf2_i16m1(vint16m1_t dst, vint16mf2_t vector,
vint16m1_t scalar, size_t vl) {
return vredor_vs_i16mf2_i16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m1_i16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 4 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m1_i16m1(vint16m1_t dst, vint16m1_t vector,
vint16m1_t scalar, size_t vl) {
return vredor_vs_i16m1_i16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m2_i16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv8i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 8 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m2_i16m1(vint16m1_t dst, vint16m2_t vector,
vint16m1_t scalar, size_t vl) {
return vredor_vs_i16m2_i16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m4_i16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv16i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 16 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m4_i16m1(vint16m1_t dst, vint16m4_t vector,
vint16m1_t scalar, size_t vl) {
return vredor_vs_i16m4_i16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m8_i16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv32i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 32 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m8_i16m1(vint16m1_t dst, vint16m8_t vector,
vint16m1_t scalar, size_t vl) {
return vredor_vs_i16m8_i16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32mf2_i32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv1i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 1 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32mf2_i32m1(vint32m1_t dst, vint32mf2_t vector,
vint32m1_t scalar, size_t vl) {
return vredor_vs_i32mf2_i32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m1_i32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 2 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m1_i32m1(vint32m1_t dst, vint32m1_t vector,
vint32m1_t scalar, size_t vl) {
return vredor_vs_i32m1_i32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m2_i32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv4i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 4 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m2_i32m1(vint32m1_t dst, vint32m2_t vector,
vint32m1_t scalar, size_t vl) {
return vredor_vs_i32m2_i32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m4_i32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv8i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 8 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m4_i32m1(vint32m1_t dst, vint32m4_t vector,
vint32m1_t scalar, size_t vl) {
return vredor_vs_i32m4_i32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m8_i32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv16i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 16 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m8_i32m1(vint32m1_t dst, vint32m8_t vector,
vint32m1_t scalar, size_t vl) {
return vredor_vs_i32m8_i32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m1_i64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 1 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m1_i64m1(vint64m1_t dst, vint64m1_t vector,
vint64m1_t scalar, size_t vl) {
return vredor_vs_i64m1_i64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m2_i64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv2i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 2 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m2_i64m1(vint64m1_t dst, vint64m2_t vector,
vint64m1_t scalar, size_t vl) {
return vredor_vs_i64m2_i64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m4_i64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv4i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 4 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m4_i64m1(vint64m1_t dst, vint64m4_t vector,
vint64m1_t scalar, size_t vl) {
return vredor_vs_i64m4_i64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m8_i64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv8i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 8 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m8_i64m1(vint64m1_t dst, vint64m8_t vector,
vint64m1_t scalar, size_t vl) {
return vredor_vs_i64m8_i64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8mf8_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv1i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 1 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8mf8_u8m1(vuint8m1_t dst, vuint8mf8_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8mf8_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8mf4_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv2i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 2 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8mf4_u8m1(vuint8m1_t dst, vuint8mf4_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8mf4_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8mf2_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv4i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 4 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8mf2_u8m1(vuint8m1_t dst, vuint8mf2_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8mf2_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m1_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 8 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m1_u8m1(vuint8m1_t dst, vuint8m1_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8m1_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m2_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv16i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 16 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m2_u8m1(vuint8m1_t dst, vuint8m2_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8m2_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m4_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv32i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 32 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m4_u8m1(vuint8m1_t dst, vuint8m4_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8m4_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m8_u8m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.nxv8i8.nxv64i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 64 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m8_u8m1(vuint8m1_t dst, vuint8m8_t vector,
vuint8m1_t scalar, size_t vl) {
return vredor_vs_u8m8_u8m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16mf4_u16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv1i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 1 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16mf4_u16m1(vuint16m1_t dst, vuint16mf4_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16mf4_u16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16mf2_u16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv2i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 2 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16mf2_u16m1(vuint16m1_t dst, vuint16mf2_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16mf2_u16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m1_u16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 4 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m1_u16m1(vuint16m1_t dst, vuint16m1_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16m1_u16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m2_u16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv8i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 8 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m2_u16m1(vuint16m1_t dst, vuint16m2_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16m2_u16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m4_u16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv16i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 16 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m4_u16m1(vuint16m1_t dst, vuint16m4_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16m4_u16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m8_u16m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.nxv4i16.nxv32i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 32 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m8_u16m1(vuint16m1_t dst, vuint16m8_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16m8_u16m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32mf2_u32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv1i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 1 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32mf2_u32m1(vuint32m1_t dst, vuint32mf2_t vector,
vuint32m1_t scalar, size_t vl) {
return vredor_vs_u32mf2_u32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m1_u32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 2 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m1_u32m1(vuint32m1_t dst, vuint32m1_t vector,
vuint32m1_t scalar, size_t vl) {
return vredor_vs_u32m1_u32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m2_u32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv4i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 4 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m2_u32m1(vuint32m1_t dst, vuint32m2_t vector,
vuint32m1_t scalar, size_t vl) {
return vredor_vs_u32m2_u32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m4_u32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv8i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 8 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m4_u32m1(vuint32m1_t dst, vuint32m4_t vector,
vuint32m1_t scalar, size_t vl) {
return vredor_vs_u32m4_u32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m8_u32m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.nxv2i32.nxv16i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 16 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m8_u32m1(vuint32m1_t dst, vuint32m8_t vector,
vuint32m1_t scalar, size_t vl) {
return vredor_vs_u32m8_u32m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m1_u64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 1 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m1_u64m1(vuint64m1_t dst, vuint64m1_t vector,
vuint64m1_t scalar, size_t vl) {
return vredor_vs_u64m1_u64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m2_u64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv2i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 2 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m2_u64m1(vuint64m1_t dst, vuint64m2_t vector,
vuint64m1_t scalar, size_t vl) {
return vredor_vs_u64m2_u64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m4_u64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv4i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 4 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m4_u64m1(vuint64m1_t dst, vuint64m4_t vector,
vuint64m1_t scalar, size_t vl) {
return vredor_vs_u64m4_u64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m8_u64m1(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.nxv1i64.nxv8i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 8 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m8_u64m1(vuint64m1_t dst, vuint64m8_t vector,
vuint64m1_t scalar, size_t vl) {
return vredor_vs_u64m8_u64m1(dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8mf8_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv1i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 1 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8mf8_i8m1_m(vbool64_t mask, vint8m1_t dst,
vint8mf8_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8mf8_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8mf4_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv2i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 2 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8mf4_i8m1_m(vbool32_t mask, vint8m1_t dst,
vint8mf4_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8mf4_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8mf2_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv4i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 4 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8mf2_i8m1_m(vbool16_t mask, vint8m1_t dst,
vint8mf2_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8mf2_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m1_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 8 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m1_i8m1_m(vbool8_t mask, vint8m1_t dst,
vint8m1_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8m1_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m2_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv16i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 16 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m2_i8m1_m(vbool4_t mask, vint8m1_t dst,
vint8m2_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8m2_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m4_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv32i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 32 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m4_i8m1_m(vbool2_t mask, vint8m1_t dst,
vint8m4_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8m4_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i8m8_i8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv64i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 64 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vint8m1_t test_vredor_vs_i8m8_i8m1_m(vbool1_t mask, vint8m1_t dst,
vint8m8_t vector, vint8m1_t scalar,
size_t vl) {
return vredor_vs_i8m8_i8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16mf4_i16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv1i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 1 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16mf4_i16m1_m(vbool64_t mask, vint16m1_t dst,
vint16mf4_t vector, vint16m1_t scalar,
size_t vl) {
return vredor_vs_i16mf4_i16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16mf2_i16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv2i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 2 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16mf2_i16m1_m(vbool32_t mask, vint16m1_t dst,
vint16mf2_t vector, vint16m1_t scalar,
size_t vl) {
return vredor_vs_i16mf2_i16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m1_i16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 4 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m1_i16m1_m(vbool16_t mask, vint16m1_t dst,
vint16m1_t vector, vint16m1_t scalar,
size_t vl) {
return vredor_vs_i16m1_i16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m2_i16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv8i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 8 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m2_i16m1_m(vbool8_t mask, vint16m1_t dst,
vint16m2_t vector, vint16m1_t scalar,
size_t vl) {
return vredor_vs_i16m2_i16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m4_i16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv16i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 16 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m4_i16m1_m(vbool4_t mask, vint16m1_t dst,
vint16m4_t vector, vint16m1_t scalar,
size_t vl) {
return vredor_vs_i16m4_i16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i16m8_i16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv32i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 32 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vint16m1_t test_vredor_vs_i16m8_i16m1_m(vbool2_t mask, vint16m1_t dst,
vint16m8_t vector, vint16m1_t scalar,
size_t vl) {
return vredor_vs_i16m8_i16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32mf2_i32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv1i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 1 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32mf2_i32m1_m(vbool64_t mask, vint32m1_t dst,
vint32mf2_t vector, vint32m1_t scalar,
size_t vl) {
return vredor_vs_i32mf2_i32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m1_i32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 2 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m1_i32m1_m(vbool32_t mask, vint32m1_t dst,
vint32m1_t vector, vint32m1_t scalar,
size_t vl) {
return vredor_vs_i32m1_i32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m2_i32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv4i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 4 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m2_i32m1_m(vbool16_t mask, vint32m1_t dst,
vint32m2_t vector, vint32m1_t scalar,
size_t vl) {
return vredor_vs_i32m2_i32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m4_i32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv8i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 8 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m4_i32m1_m(vbool8_t mask, vint32m1_t dst,
vint32m4_t vector, vint32m1_t scalar,
size_t vl) {
return vredor_vs_i32m4_i32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i32m8_i32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv16i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 16 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vint32m1_t test_vredor_vs_i32m8_i32m1_m(vbool4_t mask, vint32m1_t dst,
vint32m8_t vector, vint32m1_t scalar,
size_t vl) {
return vredor_vs_i32m8_i32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m1_i64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 1 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m1_i64m1_m(vbool64_t mask, vint64m1_t dst,
vint64m1_t vector, vint64m1_t scalar,
size_t vl) {
return vredor_vs_i64m1_i64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m2_i64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv2i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 2 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m2_i64m1_m(vbool32_t mask, vint64m1_t dst,
vint64m2_t vector, vint64m1_t scalar,
size_t vl) {
return vredor_vs_i64m2_i64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m4_i64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv4i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 4 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m4_i64m1_m(vbool16_t mask, vint64m1_t dst,
vint64m4_t vector, vint64m1_t scalar,
size_t vl) {
return vredor_vs_i64m4_i64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_i64m8_i64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv8i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 8 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vint64m1_t test_vredor_vs_i64m8_i64m1_m(vbool8_t mask, vint64m1_t dst,
vint64m8_t vector, vint64m1_t scalar,
size_t vl) {
return vredor_vs_i64m8_i64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8mf8_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv1i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 1 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8mf8_u8m1_m(vbool64_t mask, vuint8m1_t dst,
vuint8mf8_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8mf8_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8mf4_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv2i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 2 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8mf4_u8m1_m(vbool32_t mask, vuint8m1_t dst,
vuint8mf4_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8mf4_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8mf2_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv4i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 4 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8mf2_u8m1_m(vbool16_t mask, vuint8m1_t dst,
vuint8mf2_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8mf2_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m1_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv8i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 8 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m1_u8m1_m(vbool8_t mask, vuint8m1_t dst,
vuint8m1_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8m1_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m2_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv16i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 16 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m2_u8m1_m(vbool4_t mask, vuint8m1_t dst,
vuint8m2_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8m2_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m4_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv32i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 32 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m4_u8m1_m(vbool2_t mask, vuint8m1_t dst,
vuint8m4_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8m4_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u8m8_u8m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 8 x i8> @llvm.riscv.vredor.mask.nxv8i8.nxv64i8.i64(<vscale x 8 x i8> [[DST:%.*]], <vscale x 64 x i8> [[VECTOR:%.*]], <vscale x 8 x i8> [[SCALAR:%.*]], <vscale x 64 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 8 x i8> [[TMP0]]
//
vuint8m1_t test_vredor_vs_u8m8_u8m1_m(vbool1_t mask, vuint8m1_t dst,
vuint8m8_t vector, vuint8m1_t scalar,
size_t vl) {
return vredor_vs_u8m8_u8m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16mf4_u16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv1i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 1 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16mf4_u16m1_m(vbool64_t mask, vuint16m1_t dst,
vuint16mf4_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16mf4_u16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16mf2_u16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv2i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 2 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16mf2_u16m1_m(vbool32_t mask, vuint16m1_t dst,
vuint16mf2_t vector,
vuint16m1_t scalar, size_t vl) {
return vredor_vs_u16mf2_u16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m1_u16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv4i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 4 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m1_u16m1_m(vbool16_t mask, vuint16m1_t dst,
vuint16m1_t vector, vuint16m1_t scalar,
size_t vl) {
return vredor_vs_u16m1_u16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m2_u16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv8i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 8 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m2_u16m1_m(vbool8_t mask, vuint16m1_t dst,
vuint16m2_t vector, vuint16m1_t scalar,
size_t vl) {
return vredor_vs_u16m2_u16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m4_u16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv16i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 16 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m4_u16m1_m(vbool4_t mask, vuint16m1_t dst,
vuint16m4_t vector, vuint16m1_t scalar,
size_t vl) {
return vredor_vs_u16m4_u16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u16m8_u16m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 4 x i16> @llvm.riscv.vredor.mask.nxv4i16.nxv32i16.i64(<vscale x 4 x i16> [[DST:%.*]], <vscale x 32 x i16> [[VECTOR:%.*]], <vscale x 4 x i16> [[SCALAR:%.*]], <vscale x 32 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 4 x i16> [[TMP0]]
//
vuint16m1_t test_vredor_vs_u16m8_u16m1_m(vbool2_t mask, vuint16m1_t dst,
vuint16m8_t vector, vuint16m1_t scalar,
size_t vl) {
return vredor_vs_u16m8_u16m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32mf2_u32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv1i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 1 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32mf2_u32m1_m(vbool64_t mask, vuint32m1_t dst,
vuint32mf2_t vector,
vuint32m1_t scalar, size_t vl) {
return vredor_vs_u32mf2_u32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m1_u32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv2i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 2 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m1_u32m1_m(vbool32_t mask, vuint32m1_t dst,
vuint32m1_t vector, vuint32m1_t scalar,
size_t vl) {
return vredor_vs_u32m1_u32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m2_u32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv4i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 4 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m2_u32m1_m(vbool16_t mask, vuint32m1_t dst,
vuint32m2_t vector, vuint32m1_t scalar,
size_t vl) {
return vredor_vs_u32m2_u32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m4_u32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv8i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 8 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m4_u32m1_m(vbool8_t mask, vuint32m1_t dst,
vuint32m4_t vector, vuint32m1_t scalar,
size_t vl) {
return vredor_vs_u32m4_u32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u32m8_u32m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 2 x i32> @llvm.riscv.vredor.mask.nxv2i32.nxv16i32.i64(<vscale x 2 x i32> [[DST:%.*]], <vscale x 16 x i32> [[VECTOR:%.*]], <vscale x 2 x i32> [[SCALAR:%.*]], <vscale x 16 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 2 x i32> [[TMP0]]
//
vuint32m1_t test_vredor_vs_u32m8_u32m1_m(vbool4_t mask, vuint32m1_t dst,
vuint32m8_t vector, vuint32m1_t scalar,
size_t vl) {
return vredor_vs_u32m8_u32m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m1_u64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv1i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 1 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 1 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m1_u64m1_m(vbool64_t mask, vuint64m1_t dst,
vuint64m1_t vector, vuint64m1_t scalar,
size_t vl) {
return vredor_vs_u64m1_u64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m2_u64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv2i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 2 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 2 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m2_u64m1_m(vbool32_t mask, vuint64m1_t dst,
vuint64m2_t vector, vuint64m1_t scalar,
size_t vl) {
return vredor_vs_u64m2_u64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m4_u64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv4i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 4 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 4 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m4_u64m1_m(vbool16_t mask, vuint64m1_t dst,
vuint64m4_t vector, vuint64m1_t scalar,
size_t vl) {
return vredor_vs_u64m4_u64m1_m(mask, dst, vector, scalar, vl);
}
//
// CHECK-RV64-LABEL: @test_vredor_vs_u64m8_u64m1_m(
// CHECK-RV64-NEXT: entry:
// CHECK-RV64-NEXT: [[TMP0:%.*]] = call <vscale x 1 x i64> @llvm.riscv.vredor.mask.nxv1i64.nxv8i64.i64(<vscale x 1 x i64> [[DST:%.*]], <vscale x 8 x i64> [[VECTOR:%.*]], <vscale x 1 x i64> [[SCALAR:%.*]], <vscale x 8 x i1> [[MASK:%.*]], i64 [[VL:%.*]])
// CHECK-RV64-NEXT: ret <vscale x 1 x i64> [[TMP0]]
//
vuint64m1_t test_vredor_vs_u64m8_u64m1_m(vbool8_t mask, vuint64m1_t dst,
vuint64m8_t vector, vuint64m1_t scalar,
size_t vl) {
return vredor_vs_u64m8_u64m1_m(mask, dst, vector, scalar, vl);
}
|
lel352/Curso-Python | aulaspythonbasico/aula13/aula13.py | # Documentação e funções built-in úteis
# python não converte um dado para outro tipo dinamicamente o programador tem que mandar ele fazer isso
num1 = input('Número 1: ')
num2 = input('Número 2: ')
#isnumeric isdigit isdecimal
# Verificando se só tem numeros e positivos
print(num1.isnumeric())
# não preocupado com negativo e ponto flutuante
if num1.isdigit() and num2.isdigit():
num1 = int(num1)
num2 = int(num2)
print(num1 + num2)
else:
print('Não só pode números !!!')
|
livehybrid/addonfactory-ucc-generator | splunk_add_on_ucc_framework/UCC-UI-lib/bower_components/SplunkWebCore/build_tools/web_loaders/splunk-es6-polyfill-loader.js | var coreJSRequire = 'require(\'core-js/es6\');\n';
var useStrictRegExp = /^['"]use strict['"];?/;
/**
* A webpack loader that prepends a require statement importing
* the core-js ES6 polyfill. Handles 'use strict' statements if
* they appear at the very beginning of the resource.
*
* @param content - module content
* @returns - polyfilled or unmodified module content
*/
module.exports = function (content) {
this.cacheable();
var result = useStrictRegExp.exec(content);
if (result) {
return '\'use strict\';\n' + coreJSRequire + content.slice(result[0].length);
}
return coreJSRequire + content;
}; |
StGrail/AlleyCat-bot | handlers/command_info.py | <gh_stars>0
from aiogram import types
from aiogram.dispatcher import FSMContext
from aiogram.dispatcher.filters.builtin import Command
from FSM.Registation_states import Registration_form
from constants.text_messages import START_INFO
from keyboards.inline_kb import are_you_ready, gender
from utils.config import admins
from utils.loader import dp, db
@dp.message_handler(Command('send_all'), user_id=admins)
async def send_all(message: types.Message):
racers = await db.select_all_racers()
mess = message.text[9:]
if len(mess) < 1:
await message.answer("Слишком короткое сообщение.")
else:
if len(racers) > 0:
for racer in racers:
try:
await dp.bot.send_message(racer['id'], mess)
except:
pass
else:
await message.answer("Пока никто не зарегистрировался.")
@dp.message_handler(Command('start_race'), user_id=admins)
async def start_race(message: types.Message):
racers = await db.select_all_racers()
if len(racers) > 0:
for racer in racers:
try:
await dp.bot.send_message(racer['id'], 'Мы начинаем!\nТы готов к гонке?', reply_markup=are_you_ready)
except:
pass
else:
await message.answer("Пока никто не зарегистрировался.")
@dp.message_handler(Command('info'))
async def send_info(message: types.Message):
await message.answer(START_INFO)
@dp.message_handler(Command('change'))
async def change_reg_info(message: types.Message, state: FSMContext):
await state.reset_data()
await state.reset_state()
await message.answer('Укажи еще раз свой пол:', reply_markup=gender)
await Registration_form.Sex.set()
@dp.message_handler(Command('delete_time'), user_id=412112889)
async def delete_time_from_db(message: types.Message):
await db.delete_time()
await message.answer('Время почищено в бд')
|
domydev/Dark-Basic-Pro | Dark Basic Public Shared/Official Plugins/Enhancement Pack/Code/ogg vorbis/Core.cpp | <filename>Dark Basic Public Shared/Official Plugins/Enhancement Pack/Code/ogg vorbis/Core.cpp
////////////////////////////////////////////////////////////////////
// INFORMATION /////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
/*
CORE SET UP COMMANDS
*/
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
// SOUND DLL
// LPDIRECTSOUND8 GetSoundInterface ( void )
/*
*/
////////////////////////////////////////////////////////////////////
// DEFINES AND INCLUDES ////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
#define DARKSDK __declspec ( dllexport )
#define WIN32_LEAN_AND_MEAN
#include <windows.h>
#include "globstruct.h"
#include <mmsystem.h>
#include <mmreg.h>
#include <dsound.h>
#include "core.h"
#include <stdio.h>
#define SAFE_DELETE( p ) { if ( p ) { delete ( p ); ( p ) = NULL; } }
#define SAFE_RELEASE( p ) { if ( p ) { ( p )->Release ( ); ( p ) = NULL; } }
#define SAFE_DELETE_ARRAY( p ) { if ( p ) { delete [ ] ( p ); ( p ) = NULL; } }
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
// GLOBALS /////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
char g_szErrorList [ 256 ] [ 256 ];
sSound g_SoundList [ 256 ];
bool g_bErrorFile = false;
GlobStruct* g_pGlobal = NULL;
LPDIRECTSOUND8 g_pSound = NULL;
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
// FUNCTIONS ///////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
DARKSDK void ReceiveCoreDataPtr ( LPVOID pCore );
DARKSDK int GetAssociatedDLLs ( void );
DARKSDK void Destructor ( void );
void LoadSystemDLL ( void );
void LoadSoundDLL ( void );
void SetupErrorCodes ( void );
void ReceiveCoreDataPtr ( LPVOID pCore )
{
// get the global structure pointer from DB Pro
// store the pointer
g_pGlobal = ( GlobStruct* ) pCore;
// set up error codes
SetupErrorCodes ( );
// load the sound dll
LoadSoundDLL ( );
}
int GetAssociatedDLLs ( void )
{
// return associated DLLs - in this case we depend on the sound DLL
return 2;
}
void Destructor ( void )
{
// clean up
}
void LoadSoundDLL ( void )
{
// load the sound DLL and extract the sound interface function
// set up the function pointer type definition
typedef LPDIRECTSOUND8 ( *GetSoundPFN ) ( void );
// declare the function pointer
GetSoundPFN pGetSound = NULL;
// display error message if there is a problem with the global structure
if ( !g_pGlobal )
{
Error ( 1 );
return;
}
// attempt to set the sound function pointer
pGetSound = ( GetSoundPFN ) GetProcAddress ( g_pGlobal->g_Sound, "?GetSoundInterface@@YAPAUIDirectSound8@@XZ" );
// fail if pointer is invalid
if ( !pGetSound )
{
Error ( 3 );
return;
}
// store the sound interface
g_pSound = pGetSound ( );
// make sure all is okay
if ( !g_pSound )
{
Error ( 2 );
return;
}
// clear the sound list
memset ( g_SoundList, 0, sizeof ( g_SoundList ) );
// go through all items in list and reset values
for ( int i = 0; i < 256; i++ )
{
g_SoundList [ i ].pBuffer = NULL;
g_SoundList [ i ].bPlaying = false;
g_SoundList [ i ].bPaused = false;
g_SoundList [ i ].bStopped = false;
g_SoundList [ i ].bLooping = false;
g_SoundList [ i ].dwPosition = 0;
}
}
void SetupErrorCodes ( void )
{
// set up error codes for when things fail
char szPath [ 255 ] = "";
char szDir [ 255 ] = "";
HKEY hKey;
DWORD dwBufLen;
// open the registry key
RegOpenKeyEx ( HKEY_LOCAL_MACHINE, "SOFTWARE\\Dark Basic\\Dark Basic Pro", 0, KEY_QUERY_VALUE, &hKey );
RegQueryValueEx ( hKey, "install-path", NULL, NULL, ( LPBYTE ) szPath, &dwBufLen );
RegCloseKey ( hKey );
// append our path
strcat ( szPath, "\\compiler\\plugins-licensed\\" );
GetCurrentDirectory ( 255, szDir );
SetCurrentDirectory ( szPath );
// clear out the error table
memset ( g_szErrorList, 0, sizeof ( g_szErrorList ) );
// attempt to open the "loc" file
FILE* fp = fopen ( "EnhancementsOV.loc", "rt" );
// if the file is valid we should be running from DB Pro
if ( fp )
{
// read in each line of the file
char szTemp [ 256 ];
int iIndex = 0;
while ( fgets ( szTemp, sizeof ( szTemp ), fp ) )
{
char* token = strtok ( szTemp, "\n" );
while ( token )
{
strcpy ( g_szErrorList [ iIndex++ ], token );
token = strtok ( 0, "\n" );
}
}
// close the file
fclose ( fp );
// save state as we have the error file loaded
g_bErrorFile = true;
}
// restory directory
SetCurrentDirectory ( szDir );
}
void Error ( int iID )
{
if ( g_bErrorFile )
MessageBox ( NULL, g_szErrorList [ iID ], g_szErrorList [ 0 ], MB_ICONERROR | MB_OK );
else
{
char szNum [ 3 ];
itoa ( iID, szNum, 10 );
MessageBox ( NULL, szNum, "Enhancements - Ogg Vorbis - Runtime Error", MB_ICONERROR | MB_OK );
}
PostQuitMessage ( 0 );
}
////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////
|
llwantedl/Bank-currency-test | src/main/java/com/test/privat/currency/models/dtolayer/services/UserDetailsDTOServiceImpl.java | package com.test.privat.currency.models.dtolayer.services;
import com.test.privat.currency.models.dtolayer.converter.DTOConverter;
import com.test.privat.currency.models.dtolayer.wrappers.UserDetailsWrapper;
import com.test.privat.currency.models.entities.User;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.stream.Collectors;
@Service
public class UserDetailsDTOServiceImpl implements UserDetailsDTOService {
private final DTOConverter<User, UserDetailsWrapper> userConverter;
@Autowired
public UserDetailsDTOServiceImpl(DTOConverter<User, UserDetailsWrapper> converter) {
this.userConverter = converter;
}
@Override
public UserDetailsWrapper getUserDetails(User user) {
return userConverter.backward(user);
}
@Override
public List<UserDetailsWrapper> convertListToDTO(List<User> entities) {
return entities
.stream()
.map(userConverter::backward)
.collect(Collectors.toList());
}
}
|
rtortajada/osgjs | sources/osgDB/Registry.js | <reponame>rtortajada/osgjs
'use strict';
var Notify = require( 'osg/notify' );
/**
* This is a very simplistic version of the OSG registry, we could
* expand/improve it in the future
*/
var Registry = {
instance: function () {
if ( !Registry._instance ) {
Registry._instance = Registry;
Registry._instance.plugins = new window.Map();
}
return Registry._instance;
},
// We register directly a plugin for a extension.
addReaderWriter: function ( extension, plugin ) {
if ( Registry.instance().plugins.get( extension ) !== undefined )
Notify.warn( 'the \'' + extension + '\' plugin already exists' );
Registry.instance().plugins.set( extension, plugin );
},
getReaderWriterForExtension: function ( name ) {
return Registry.instance().plugins.get( name );
}
};
module.exports = Registry;
|
paullewallencom/spring-978-1-7858-8827-4 | _src/Chapter 06/Ch06/src/main/java/org/packt/hotel/portal/controller/ThymeleafController.java | package org.packt.hotel.portal.controller;
import org.packt.hotel.portal.exception.GuestFoundException;
import org.packt.hotel.portal.model.data.HmsUserProfile;
import org.packt.hotel.portal.model.form.ProfileForm;
import org.packt.hotel.portal.service.HotelService;
import org.packt.hotel.portal.service.UserService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.ExceptionHandler;
import org.springframework.web.bind.annotation.ModelAttribute;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
@Controller
public class ThymeleafController {
@Autowired
private HotelService hotelService;
@Autowired
private UserService userService;
@RequestMapping(value = "/hms/thyme/users", method = RequestMethod.GET)
public String showUsers(Model model) {
model.addAttribute("users", hotelService.getUserProfiles());
return "result";
}
@RequestMapping(value = "/hms/thyme/addprofile", method = RequestMethod.GET)
public String showHome(Model model) {
ProfileForm user = new ProfileForm();
model.addAttribute("user", user);
return "home";
}
@RequestMapping(value = "/hms/guests/insert", method = RequestMethod.POST)
public String insertGuest(Model model, @ModelAttribute ProfileForm user) {
userService.addUser(user);
model.addAttribute("user", user);
return "home";
}
@ExceptionHandler({GuestFoundException.class})
public ModelAndView handleDatabaseError(GuestFoundException e) {
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("home");
modelAndView.addObject("errorMessage", "error.user.exist");
return modelAndView;
}
}
|
mepsd/CLAC | user_account/tests/test_views.py | <reponame>mepsd/CLAC<gh_stars>100-1000
from model_mommy import mommy
from calc.tests.common import ProtectedViewTestCase
from data_capture.models import SubmittedPriceList
class AccountTests(ProtectedViewTestCase):
url = '/account/'
def test_get_is_ok(self):
self.login()
res = self.client.get(self.url)
self.assertEqual(res.status_code, 200)
def test_has_context_vars(self):
self.login()
res = self.client.get(self.url)
ctx = res.context
self.assertIn('total_approved', ctx)
self.assertIn('total_unreviewed', ctx)
self.assertIn('total_rejected', ctx)
self.assertIn('total_submitted', ctx)
self.assertIn('recently_approved_price_lists', ctx)
self.assertIn('recently_submitted_price_lists', ctx)
def test_context_var_values_are_correct(self):
user = self.login()
mommy.make(SubmittedPriceList,
submitter=user,
status=SubmittedPriceList.STATUS_UNREVIEWED,
_quantity=6)
mommy.make(SubmittedPriceList,
submitter=user,
status=SubmittedPriceList.STATUS_APPROVED,
_quantity=6)
mommy.make(SubmittedPriceList,
submitter=user,
status=SubmittedPriceList.STATUS_REJECTED,
_quantity=1)
res = self.client.get(self.url)
ctx = res.context
self.assertEqual(ctx['total_approved'], 6)
self.assertEqual(ctx['total_unreviewed'], 6)
self.assertEqual(ctx['total_rejected'], 1)
self.assertEqual(ctx['total_submitted'], 13)
# The view should only show the 5 most recent price lists
self.assertEqual(len(ctx['recently_submitted_price_lists']), 5)
self.assertEqual(len(ctx['recently_approved_price_lists']), 5)
|
figment-networks/indexing-engine | worker/process/ranged/ranged.go | <gh_stars>10-100
package ranged
import (
"context"
"sync"
"github.com/figment-networks/indexing-engine/structs"
)
type hBTx struct {
Height uint64
Last bool
}
type OutH struct {
Height uint64
Block structs.BlockWithMeta
Error error
}
type BTX interface {
BlockAndTx(ctx context.Context, height uint64) (blockWM structs.BlockWithMeta, txsWM []structs.TransactionWithMeta, err error)
}
type RangeRequester struct {
BTX BTX
workers int
}
func NewRangeRequester(btx BTX, workers int) *RangeRequester {
return &RangeRequester{BTX: btx, workers: workers}
}
// getRange gets given range of blocks and transactions
func (rr *RangeRequester) GetRange(ctx context.Context, hr structs.HeightRange) (h structs.Heights, err error) {
errored := make(chan struct{})
fin := make(chan struct{}, rr.workers)
outH := make(chan OutH, rr.workers)
chH := make(chan hBTx, 10) //oHBTxPool.Get()
lock := &sync.Mutex{}
isErr := false
wg := &sync.WaitGroup{}
for i := 0; i < rr.workers; i++ {
wg.Add(1)
go rr.asyncBlockAndTx(ctx, wg, chH, outH, errored, lock, &isErr, fin)
}
go rr.populateRange(chH, hr, errored)
outHeight := &structs.Heights{}
var count int
RANGE_LOOP:
for {
select {
case <-fin:
count++
if count == rr.workers {
l := len(outH)
if l > 0 {
DRAIN:
for h := range outH {
l--
// DRAIN
if h.Error != nil {
err = h.Error
outHeight.ErrorAt = append(outHeight.ErrorAt, h.Height)
if l == 0 {
break DRAIN
}
continue
}
assign(outHeight, h)
if l == 0 {
break DRAIN
}
}
}
break RANGE_LOOP
}
case h, ok := <-outH:
if !ok {
break RANGE_LOOP
}
if h.Error != nil {
err = h.Error
outHeight.ErrorAt = append(outHeight.ErrorAt, h.Height)
continue
}
assign(outHeight, h)
case <-ctx.Done():
break RANGE_LOOP
}
}
wg.Wait()
close(outH)
if !isErr {
close(errored)
}
return *outHeight, err
}
func assign(outHeight *structs.Heights, h OutH) {
outHeight.Heights = append(outHeight.Heights, h.Height)
outHeight.NumberOfHeights++
outHeight.NumberOfTx += h.Block.Block.NumberOfTransactions
if outHeight.LatestData.LastTime.IsZero() || outHeight.LatestData.LastHeight <= h.Height {
outHeight.LatestData.LastEpoch = h.Block.Block.Epoch
outHeight.LatestData.LastHash = h.Block.Block.Hash
outHeight.LatestData.LastHeight = h.Height
outHeight.LatestData.LastTime = h.Block.Block.Time
}
}
func (rr *RangeRequester) asyncBlockAndTx(ctx context.Context, wg *sync.WaitGroup, cinn chan hBTx, out chan OutH, er chan struct{}, l *sync.Mutex, isErr *bool, fin chan struct{}) {
defer wg.Done()
for in := range cinn {
if in.Last {
fin <- struct{}{}
return
}
b, _, err := rr.BTX.BlockAndTx(ctx, in.Height)
l.Lock() // (lukanus): this lock is for errors from other asyncBlockAndTx
if !*isErr {
select {
case _, ok := <-er:
if !ok {
l.Unlock()
fin <- struct{}{}
return
}
case out <- OutH{Height: in.Height, Block: b, Error: err}:
}
if err != nil {
*isErr = true
close(er)
}
}
l.Unlock()
}
fin <- struct{}{}
}
func (rr *RangeRequester) populateRange(out chan hBTx, hr structs.HeightRange, er chan struct{}) {
height := hr.StartHeight
POPULATE:
for {
select {
case out <- hBTx{Height: height}:
case <-er:
break POPULATE
}
height++
if height > hr.EndHeight {
break POPULATE
}
}
out <- hBTx{Last: true}
close(out)
}
|
lszzy/FWFramework | Example/Mediator/Test/Test/Classes/Private/Framework/TestNotificationViewController.h | //
// TestNotificationViewController.h
// Example
//
// Created by wuyong on 2019/9/3.
// Copyright © 2019 wuyong.site. All rights reserved.
//
#import "TestViewController.h"
NS_ASSUME_NONNULL_BEGIN
@interface TestNotificationViewController : TestViewController
@end
NS_ASSUME_NONNULL_END
|
eaglesjava/kissy | src/tabs/src/tabs/panel.js | <filename>src/tabs/src/tabs/panel.js<gh_stars>1-10
/**
* @ignore
* single tab panel.
* @author <EMAIL>
*/
var Container = require('component/container');
/**
* KISSY.Tabs.Panel.xclass: 'tabs-panel'.
* @class KISSY.Tabs.Panel
* @extends KISSY.Component.Container
*/
module.exports = Container.extend({
isTabsPanel: 1,
beforeCreateDom: function (renderData) {
var self = this;
renderData.elAttrs.role = 'tabpanel';
if (renderData.selected) {
renderData.elCls.push(self.getBaseCssClasses('selected'));
} else {
renderData.elAttrs['aria-hidden'] = false;
}
},
_onSetSelected: function (v) {
var el = this.$el;
var selectedCls = this.getBaseCssClasses('selected');
el[v ? 'addClass' : 'removeClass'](selectedCls)
.attr('aria-hidden', !v);
}
}, {
ATTRS: {
allowTextSelection: {
value: true
},
focusable: {
value: false
},
handleGestureEvents: {
value: false
},
/**
* whether selected
* @cfg {Boolean} selected
*/
/**
* @ignore
*/
selected: {
render: 1,
sync: 0,
parse: function (el) {
return el.hasClass(this.getBaseCssClass('selected'));
}
}
},
xclass: 'tabs-panel'
}); |
ScalablyTyped/SlinkyTyped | o/office-js-preview/src/main/scala/typingsSlinky/officeJsPreview/Word/Interfaces/SettingCollectionLoadOptions.scala | <reponame>ScalablyTyped/SlinkyTyped
package typingsSlinky.officeJsPreview.Word.Interfaces
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
/**
*
* Contains the collection of {@link Word.Setting} objects.
*
* [Api set: WordApi BETA (PREVIEW ONLY)]
* @beta
*/
@js.native
trait SettingCollectionLoadOptions extends StObject {
/**
Specifying `$all` for the LoadOptions loads all the scalar properties (e.g.: `Range.address`) but not the navigational properties (e.g.: `Range.format.fill.color`).
*/
@JSName("$all")
var $all: js.UndefOr[Boolean] = js.native
/**
*
* For EACH ITEM in the collection: Gets the key of the setting. Read only.
*
* [Api set: WordApi BETA (PREVIEW ONLY)]
* @beta
*/
var key: js.UndefOr[Boolean] = js.native
/**
*
* For EACH ITEM in the collection: Gets or sets the value of the setting.
*
* [Api set: WordApi BETA (PREVIEW ONLY)]
* @beta
*/
var value: js.UndefOr[Boolean] = js.native
}
object SettingCollectionLoadOptions {
@scala.inline
def apply(): SettingCollectionLoadOptions = {
val __obj = js.Dynamic.literal()
__obj.asInstanceOf[SettingCollectionLoadOptions]
}
@scala.inline
implicit class SettingCollectionLoadOptionsMutableBuilder[Self <: SettingCollectionLoadOptions] (val x: Self) extends AnyVal {
@scala.inline
def set$all(value: Boolean): Self = StObject.set(x, "$all", value.asInstanceOf[js.Any])
@scala.inline
def set$allUndefined: Self = StObject.set(x, "$all", js.undefined)
@scala.inline
def setKey(value: Boolean): Self = StObject.set(x, "key", value.asInstanceOf[js.Any])
@scala.inline
def setKeyUndefined: Self = StObject.set(x, "key", js.undefined)
@scala.inline
def setValue(value: Boolean): Self = StObject.set(x, "value", value.asInstanceOf[js.Any])
@scala.inline
def setValueUndefined: Self = StObject.set(x, "value", js.undefined)
}
}
|
davepallot/OSKAR | oskar/telescope/station/element/oskar_evaluate_dipole_pattern_cuda.h | <gh_stars>1-10
/*
* Copyright (c) 2014, The University of Oxford
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of the University of Oxford nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef OSKAR_EVALUATE_DIPOLE_PATTERN_CUDA_H_
#define OSKAR_EVALUATE_DIPOLE_PATTERN_CUDA_H_
/**
* @file oskar_evaluate_dipole_pattern_cuda.h
*/
#include <oskar_global.h>
#include <utility/oskar_vector_types.h>
#ifdef __cplusplus
extern "C" {
#endif
/**
* @brief
* Evaluates pattern of a perfect dipole at source positions using CUDA
* (single precision).
*
* @details
* This function evaluates the pattern of a perfect dipole antenna
* at the supplied source positions using CUDA.
*
* The magnitude of the dipole response is given by
*
* \f[
* E_{\theta^{'}} =
* \frac{\cos(\frac{kL}{2}\cos\phi\sin\theta) - \cos(\frac{kL}{2})}
* {\sqrt{1 - \cos^2\phi \sin^2\theta}};
* \f]
*
* where phi and theta are the angles measured from x to y and from z to xy,
* respectively.
*
* The supplied theta and phi positions of the sources are the <b>modified</b>
* source positions. They must be adjusted relative to a dipole with its axis
* oriented along the x-direction.
*
* Note that all pointers refer to device memory.
*
* @param[in] num_points Number of points.
* @param[in] d_theta Point position (modified) theta values in rad.
* @param[in] d_phi Point position (modified) phi values in rad.
* @param[in] freq_hz Observing frequency in Hz.
* @param[in] dipole_length_m Length of dipole in metres.
* @param[in] stride Stride into output arrays.
* @param[out] d_E_theta Response per point in E_theta.
* @param[out] d_E_phi Response per point in E_phi.
*/
OSKAR_EXPORT
void oskar_evaluate_dipole_pattern_cuda_f(int num_points,
const float* d_theta, const float* d_phi, float freq_hz,
float dipole_length_m, int stride,
float2* d_E_theta, float2* d_E_phi);
/**
* @brief
* Evaluates pattern of a perfect dipole at source positions using CUDA
* (scalar version, single precision).
*
* @details
* This function evaluates the scalar pattern of a perfect dipole antenna
* at the supplied source positions using CUDA.
*
* The supplied theta and phi positions of the sources are the <b>modified</b>
* source positions. They must be adjusted relative to a dipole with its axis
* oriented along the x-direction.
*
* Note that all pointers refer to device memory.
*
* @param[in] num_points Number of points.
* @param[in] d_theta Point position (modified) theta values in rad.
* @param[in] d_phi Point position (modified) phi values in rad.
* @param[in] freq_hz Observing frequency in Hz.
* @param[in] dipole_length_m Length of dipole in metres.
* @param[in] stride Stride into output array (normally 1).
* @param[out] d_pattern Response per point.
*/
OSKAR_EXPORT
void oskar_evaluate_dipole_pattern_scalar_cuda_f(int num_points,
const float* d_theta, const float* d_phi, float freq_hz,
float dipole_length_m, int stride, float2* d_pattern);
/**
* @brief
* Evaluates pattern of a perfect dipole at source positions using CUDA
* (double precision).
*
* @details
* This function evaluates the pattern of a perfect dipole antenna
* at the supplied source positions using CUDA.
*
* The magnitude of the dipole response is given by
*
* \f[
* E_{\theta^{'}} =
* \frac{\cos(\frac{kL}{2}\cos\phi\sin\theta) - \cos(\frac{kL}{2})}
* {\sqrt{1 - \cos^2\phi \sin^2\theta}};
* \f]
*
* where phi and theta are the angles measured from x to y and from z to xy,
* respectively.
*
* The supplied theta and phi positions of the sources are the <b>modified</b>
* source positions. They must be adjusted relative to a dipole with its axis
* oriented along the x-direction.
*
* Note that all pointers refer to device memory.
*
* @param[in] num_points Number of points.
* @param[in] d_theta Point position (modified) theta values in rad.
* @param[in] d_phi Point position (modified) phi values in rad.
* @param[in] freq_hz Observing frequency in Hz.
* @param[in] dipole_length_m Length of dipole in metres.
* @param[in] stride Stride into output arrays.
* @param[out] d_E_theta Response per point in E_theta.
* @param[out] d_E_phi Response per point in E_phi.
*/
OSKAR_EXPORT
void oskar_evaluate_dipole_pattern_cuda_d(int num_points,
const double* d_theta, const double* d_phi, double freq_hz,
double dipole_length_m, int stride,
double2* d_E_theta, double2* d_E_phi);
/**
* @brief
* Evaluates pattern of a perfect dipole at source positions using CUDA
* (scalar version, double precision).
*
* @details
* This function evaluates the scalar pattern of a perfect dipole antenna
* at the supplied source positions using CUDA.
*
* The supplied theta and phi positions of the sources are the <b>modified</b>
* source positions. They must be adjusted relative to a dipole with its axis
* oriented along the x-direction.
*
* Note that all pointers refer to device memory.
*
* @param[in] num_points Number of points.
* @param[in] d_theta Point position (modified) theta values in rad.
* @param[in] d_phi Point position (modified) phi values in rad.
* @param[in] freq_hz Observing frequency in Hz.
* @param[in] dipole_length_m Length of dipole in metres.
* @param[in] stride Stride into output array (normally 1).
* @param[out] d_pattern Response per point.
*/
OSKAR_EXPORT
void oskar_evaluate_dipole_pattern_scalar_cuda_d(int num_points,
const double* d_theta, const double* d_phi, double freq_hz,
double dipole_length_m, int stride, double2* d_pattern);
#ifdef __CUDACC__
/* Kernels. */
__global__
void oskar_evaluate_dipole_pattern_cudak_f(const int num_points,
const float* restrict theta, const float* restrict phi,
const float kL, const float cos_kL, const int stride,
float2* E_theta, float2* E_phi);
__global__
void oskar_evaluate_dipole_pattern_scalar_cudak_f(const int num_points,
const float* restrict theta, const float* restrict phi,
const float kL, const float cos_kL, const int stride,
float2* restrict pattern);
__global__
void oskar_evaluate_dipole_pattern_cudak_d(const int num_points,
const double* restrict theta, const double* restrict phi,
const double kL, const double cos_kL, const int stride,
double2* E_theta, double2* E_phi);
__global__
void oskar_evaluate_dipole_pattern_scalar_cudak_d(const int num_points,
const double* restrict theta, const double* restrict phi,
const double kL, const double cos_kL, const int stride,
double2* restrict pattern);
#endif /* __CUDACC__ */
#ifdef __cplusplus
}
#endif
#endif /* OSKAR_EVALUATE_DIPOLE_PATTERN_CUDA_H_ */
|
Benefit-Zebra/ballistica | src/ballistica/input/device/test_input.h | <reponame>Benefit-Zebra/ballistica<gh_stars>100-1000
// Released under the MIT License. See LICENSE for details.
#ifndef BALLISTICA_INPUT_DEVICE_TEST_INPUT_H_
#define BALLISTICA_INPUT_DEVICE_TEST_INPUT_H_
#include "ballistica/ballistica.h"
namespace ballistica {
class TestInput {
public:
TestInput();
virtual ~TestInput();
void Process(millisecs_t time);
void Reset();
private:
void HandleAlreadyPressedTwice();
int lr_{};
int ud_{};
bool jump_pressed_{};
bool bomb_pressed_{};
bool pickup_pressed_{};
bool punch_pressed_{};
millisecs_t next_event_time_{};
millisecs_t join_start_time_{};
millisecs_t join_end_time_{9999};
int join_press_count_{};
bool reset_{true};
Joystick* joystick_{};
bool print_non_join_{};
bool print_already_did2_{};
};
} // namespace ballistica
#endif // BALLISTICA_INPUT_DEVICE_TEST_INPUT_H_
|
arturodrigues/carbon | .storybook/theme-selectors.js | <filename>.storybook/theme-selectors.js
export const dlsThemeSelector = {};
export const classicThemeSelector = {
isClassic: true
};
|
zakharchenkoAndrii/expo | android/versioned-abis/expoview-abi43_0_0/src/main/java/abi43_0_0/host/exp/exponent/modules/api/reanimated/ReanimatedJSIModulePackage.java | <gh_stars>1000+
package abi43_0_0.host.exp.exponent.modules.api.reanimated;
import abi43_0_0.com.facebook.react.bridge.JSIModulePackage;
import abi43_0_0.com.facebook.react.bridge.JSIModuleProvider;
import abi43_0_0.com.facebook.react.bridge.JSIModuleSpec;
import abi43_0_0.com.facebook.react.bridge.JSIModuleType;
import abi43_0_0.com.facebook.react.bridge.JavaScriptContextHolder;
import abi43_0_0.com.facebook.react.bridge.ReactApplicationContext;
import java.util.Arrays;
import java.util.List;
public class ReanimatedJSIModulePackage implements JSIModulePackage {
@Override
public List<JSIModuleSpec> getJSIModules(ReactApplicationContext reactApplicationContext, JavaScriptContextHolder jsContext) {
NodesManager nodesManager = reactApplicationContext.getNativeModule(ReanimatedModule.class).getNodesManager();
nodesManager.initWithContext(reactApplicationContext);
return Arrays.<JSIModuleSpec>asList();
}
}
|
The-True-Hooha/datafaker | src/main/java/net/datafaker/NatoPhoneticAlphabet.java | package net.datafaker;
public class NatoPhoneticAlphabet {
private final Faker faker;
protected NatoPhoneticAlphabet(Faker faker) {
this.faker = faker;
}
public String codeWord() {
return faker.fakeValuesService().resolve("nato_phonetic_alphabet.code_word", this, faker);
}
} |
0xlay/Distance | Client/Module/include/ProcessManagerModule.hpp | #pragma once
#include "Module.hpp"
#include <Tlhelp32.h>
#include <vector>
#include <variant>
namespace Distance::Module
{
/*
* @brief The ProcessManager class it is interface for control ProcessManager module
*/
class ProcessManager : public IModule
{
using ProcessList = std::vector<PROCESSENTRY32>;
using ErrorCode = DWORD;
using ProcessTerminateFunc = ErrorCode(*)(DWORD);
using GetProcessListFunc = std::variant<ProcessList, ErrorCode>(*)();
public:
~ProcessManager();
[[nodiscard]] void terminate(DWORD pid);
[[nodiscard]] ProcessList processList();
void run() override;
void stop() override;
private:
HMODULE hModule{};
ProcessTerminateFunc ProcessTerminate;
GetProcessListFunc GetProcessList;
};
} // Distance::Module
|
kinshuk4/algorithm-cpp | src/compete/hackerrank/closest-number.cpp | //closest-number.cpp
//Closest Number
//Weekly Challenges - Week 5
//Author: derekhh
#include<iostream>
#include<algorithm>
#include<climits>
#include <cmath>
using namespace std;
int main()
{
int t;
cin >> t;
while (t--)
{
int a, b, x;
cin >> a >> b >> x;
if (b < 0)
{
//Now a ^ b \in [0,1)
if (x != 1) cout << 0 << endl;
else
{
if (a == 1) cout << 1 << endl;
else cout << 0 << endl;
}
}
else if (b == 0)
{
if (x != 1) cout << 0 << endl;
else cout << 1 << endl;
}
else
{
double tmp = pow(a, b);
int lo = (int)(1.0 * tmp / x);
double best = INT_MAX;
int ans = 0;
for (int i = lo - 5; i <= lo + 5; i++)
{
if (abs(1.0*i*x - tmp) < best)
best = abs(1.0*i*x - tmp), ans = i;
}
cout << ans * x << endl;
}
}
return 0;
} |
nmahoney-pivotal/bosh-bootloader | vendor/github.com/genevieve/leftovers/aws/iam/users.go | package iam
import (
"fmt"
"strings"
awsiam "github.com/aws/aws-sdk-go/service/iam"
"github.com/genevieve/leftovers/aws/common"
)
type usersClient interface {
ListUsers(*awsiam.ListUsersInput) (*awsiam.ListUsersOutput, error)
DeleteUser(*awsiam.DeleteUserInput) (*awsiam.DeleteUserOutput, error)
}
type Users struct {
client usersClient
logger logger
policies userPolicies
accessKeys accessKeys
}
func NewUsers(client usersClient, logger logger, policies userPolicies, accessKeys accessKeys) Users {
return Users{
client: client,
logger: logger,
policies: policies,
accessKeys: accessKeys,
}
}
func (u Users) ListAll(filter string) ([]common.Deletable, error) {
return u.getUsers(filter)
}
func (u Users) List(filter string) ([]common.Deletable, error) {
resources, err := u.getUsers(filter)
if err != nil {
return nil, err
}
var delete []common.Deletable
for _, r := range resources {
proceed := u.logger.PromptWithDetails(r.Type(), r.Name())
if !proceed {
continue
}
delete = append(delete, r)
}
return delete, nil
}
func (u Users) getUsers(filter string) ([]common.Deletable, error) {
users, err := u.client.ListUsers(&awsiam.ListUsersInput{})
if err != nil {
return nil, fmt.Errorf("Listing users: %s", err)
}
var resources []common.Deletable
for _, r := range users.Users {
resource := NewUser(u.client, u.policies, u.accessKeys, r.UserName)
if !strings.Contains(resource.identifier, filter) {
continue
}
resources = append(resources, resource)
}
return resources, nil
}
|
zenOSmosis/reshell | src/hooks/useAppRegistrationLink.js | import { useCallback, useMemo } from "react";
// TODO: Refactor
import useAppOrchestrationContext from "./useAppOrchestrationContext";
// TODO: Move to @core/hooks
// FIXME: (jh) Is there any reason to not just use the appDescriptor itself,
// and not the ID?
// TODO: Document
export default function useAppRegistrationLink(appDescriptorID) {
const { getAppRegistrationTitleWithID, activateAppRegistrationWithID } =
useAppOrchestrationContext();
const title = useMemo(
() => getAppRegistrationTitleWithID(appDescriptorID),
[getAppRegistrationTitleWithID, appDescriptorID]
);
const link = useCallback(
() => activateAppRegistrationWithID(appDescriptorID),
[activateAppRegistrationWithID, appDescriptorID]
);
return {
title,
link,
};
}
|
miguelsorianod/kas-fleet-manager | internal/kafka/internal/handlers/data_plane_kafka.go | <filename>internal/kafka/internal/handlers/data_plane_kafka.go
package handlers
import (
"github.com/bf2fc6cc711aee1a0c2a/kas-fleet-manager/internal/kafka/internal/api/private"
"github.com/bf2fc6cc711aee1a0c2a/kas-fleet-manager/internal/kafka/internal/presenters"
"github.com/bf2fc6cc711aee1a0c2a/kas-fleet-manager/internal/kafka/internal/services"
"github.com/bf2fc6cc711aee1a0c2a/kas-fleet-manager/pkg/handlers"
"net/http"
"github.com/bf2fc6cc711aee1a0c2a/kas-fleet-manager/pkg/errors"
"github.com/gorilla/mux"
)
type dataPlaneKafkaHandler struct {
service services.DataPlaneKafkaService
kafkaService services.KafkaService
}
func NewDataPlaneKafkaHandler(service services.DataPlaneKafkaService, kafkaService services.KafkaService) *dataPlaneKafkaHandler {
return &dataPlaneKafkaHandler{
service: service,
kafkaService: kafkaService,
}
}
func (h *dataPlaneKafkaHandler) UpdateKafkaStatuses(w http.ResponseWriter, r *http.Request) {
clusterId := mux.Vars(r)["id"]
var data = map[string]private.DataPlaneKafkaStatus{}
cfg := &handlers.HandlerConfig{
MarshalInto: &data,
Validate: []handlers.Validate{},
Action: func() (interface{}, *errors.ServiceError) {
ctx := r.Context()
dataPlaneKafkaStatus := presenters.ConvertDataPlaneKafkaStatus(data)
err := h.service.UpdateDataPlaneKafkaService(ctx, clusterId, dataPlaneKafkaStatus)
return nil, err
},
}
handlers.Handle(w, r, cfg, http.StatusOK)
}
func (h *dataPlaneKafkaHandler) GetAll(w http.ResponseWriter, r *http.Request) {
clusterID := mux.Vars(r)["id"]
cfg := &handlers.HandlerConfig{
Validate: []handlers.Validate{
handlers.ValidateLength(&clusterID, "id", handlers.MinRequiredFieldLength, nil),
},
Action: func() (interface{}, *errors.ServiceError) {
managedKafkas, err := h.kafkaService.GetManagedKafkaByClusterID(clusterID)
if err != nil {
return nil, err
}
managedKafkaList := private.ManagedKafkaList{
Kind: "ManagedKafkaList",
Items: []private.ManagedKafka{},
}
for _, mk := range managedKafkas {
converted := presenters.PresentManagedKafka(&mk)
managedKafkaList.Items = append(managedKafkaList.Items, converted)
}
return managedKafkaList, nil
},
}
handlers.HandleGet(w, r, cfg)
}
|
labcart/micro-services-nutshell | order-service/src/main/java/io/github/devbhuwan/microservices/nutshell/order/service/api/RESTOrderCatalogService.java | <filename>order-service/src/main/java/io/github/devbhuwan/microservices/nutshell/order/service/api/RESTOrderCatalogService.java
package io.github.devbhuwan.microservices.nutshell.order.service.api;
import io.github.devbhuwan.microservices.nutshell.order.domain.Order;
import io.github.devbhuwan.microservices.nutshell.order.service.OrderApiService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
/**
* @author <NAME>
* @date 2017/05/19
*/
@RestController
@RequestMapping("/order")
public class RESTOrderCatalogService {
@Autowired
private OrderApiService orderApiService;
@GetMapping("/all")
public List<Order> orders() {
return orderApiService.orders();
}
}
|
ornata/llvm-project | libcxx/include/__algorithm/in_out_result.h | // -*- C++ -*-
//===----------------------------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef _LIBCPP___ALGORITHM_IN_OUT_RESULT_H
#define _LIBCPP___ALGORITHM_IN_OUT_RESULT_H
#include <__concepts/convertible_to.h>
#include <__config>
#include <__utility/move.h>
#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
# pragma GCC system_header
#endif
_LIBCPP_BEGIN_NAMESPACE_STD
#if _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_HAS_NO_INCOMPLETE_RANGES)
namespace ranges {
template<class _InIter1, class _OutIter1>
struct in_out_result {
_LIBCPP_NO_UNIQUE_ADDRESS _InIter1 in;
_LIBCPP_NO_UNIQUE_ADDRESS _OutIter1 out;
template <class _InIter2, class _OutIter2>
requires convertible_to<const _InIter1&, _InIter2> && convertible_to<const _OutIter1&, _OutIter2>
_LIBCPP_HIDE_FROM_ABI
constexpr operator in_out_result<_InIter2, _OutIter2>() const & {
return {in, out};
}
template <class _InIter2, class _OutIter2>
requires convertible_to<_InIter1, _InIter2> && convertible_to<_OutIter1, _OutIter2>
_LIBCPP_HIDE_FROM_ABI
constexpr operator in_out_result<_InIter2, _OutIter2>() && {
return {std::move(in), std::move(out)};
}
};
} // namespace ranges
#endif // _LIBCPP_STD_VER > 17 && !defined(_LIBCPP_HAS_NO_INCOMPLETE_RANGES)
_LIBCPP_END_NAMESPACE_STD
#endif // _LIBCPP___ALGORITHM_IN_OUT_RESULT_H
|
PSP-Archive/System-Interface-Library | external/libwebmdec/test/seek.c | /*
* libwebmdec: a decoder library for WebM audio/video streams
* Copyright (c) 2014-2019 <NAME> <<EMAIL>>
*
* This software may be copied and redistributed under certain conditions;
* see the file "COPYING" in the source code distribution for details.
* NO WARRANTY is provided with this software.
*/
#include "test/test.h"
#include <stdlib.h>
#include <string.h>
/*************************************************************************/
/*********************** Individual test routines ************************/
/*************************************************************************/
static int test_seek_basic(void)
{
webmdec_t *handle;
assert_true(handle = open_test_file("test/data/no-audio.webm"));
void *sixth_frame;
int sixth_frame_length;
double first_frame_time, sixth_frame_time;
const void *video_data;
int video_length;
double video_time;
assert_true(webmdec_read_frame(handle, &video_data, &video_length,
&video_time, NULL, NULL, NULL));
first_frame_time = video_time;
/* Skip a few frames so we can seek to a keyframe. */
assert_true(webmdec_read_frame(handle, &video_data, &video_length, NULL,
NULL, NULL, NULL));
assert_true(webmdec_read_frame(handle, &video_data, &video_length, NULL,
NULL, NULL, NULL));
assert_true(webmdec_read_frame(handle, &video_data, &video_length, NULL,
NULL, NULL, NULL));
assert_true(webmdec_read_frame(handle, &video_data, &video_length, NULL,
NULL, NULL, NULL));
assert_true(webmdec_read_frame(handle, &video_data, &video_length,
&video_time, NULL, NULL, NULL));
assert_not_near(video_time, first_frame_time, 0.001);
assert_true(sixth_frame = malloc(video_length));
memcpy(sixth_frame, video_data, video_length);
sixth_frame_length = video_length;
sixth_frame_time = video_time;
assert_true(webmdec_read_frame(handle, &video_data, &video_length,
&video_time, NULL, NULL, NULL));
assert_not_near(video_time, sixth_frame_time, 0.001);
assert_true(video_length != sixth_frame_length
|| memcmp(video_data, sixth_frame, video_length) != 0);
assert_true(webmdec_seek(handle, sixth_frame_time - 0.002));
assert_near(webmdec_tell(handle), sixth_frame_time - 0.002, 0.001);
#if 0 /* FIXME: nestegg can't seem to seek to this keyframe */
assert_true(webmdec_read_frame(handle, &video_data, &video_length,
&video_time, NULL, NULL, NULL));
assert_near(video_time, sixth_frame_time, 0.001);
assert_true(video_length == sixth_frame_length);
assert_true(memcmp(video_data, sixth_frame, video_length) == 0);
#endif
webmdec_close(handle);
return 1;
}
/*************************************************************************/
/****************************** Test runner ******************************/
/*************************************************************************/
int test_seek(void)
{
int pass = 1;
pass &= test_seek_basic();
return pass;
}
/*************************************************************************/
/*************************************************************************/
|
cloudfoundry-incubator/cf-networking | src/code.cloudfoundry.org/garden-external-networker/adapter/namespace_adapter.go | <reponame>cloudfoundry-incubator/cf-networking
package adapter
import "github.com/containernetworking/plugins/pkg/ns"
type NamespaceAdapter struct{}
func (n *NamespaceAdapter) GetNS(path string) (ns.NetNS, error) {
return ns.GetNS(path)
}
func (n *NamespaceAdapter) GetCurrentNS() (ns.NetNS, error) {
return ns.GetCurrentNS()
}
|
victorovejero/ejemplos-2022 | tema-8/fizzbuzz/src/test/java/ActualFizzBuzzAppTest.java | import org.junit.Test;
import uk.co.compendiumdev.fizzbuzz.FizzBuzzConverter;
public class ActualFizzBuzzAppTest {
@Test
public void outputTheHundredFizzBuzzes(){
FizzBuzzConverter fizzBuzz = new FizzBuzzConverter();
for(int i=1; i<=100; i++){
System.out.println(fizzBuzz.convert(i));
}
}
}
|
ordinary-developer/education | books/tech/cpp/qt/m_shlee-qt_5_10/ch_14-events/02-qmouse_event/MouseObserver.hpp | <reponame>ordinary-developer/education
#pragma once
#include <QtWidgets>
class MouseObserver : public QLabel {
public:
MouseObserver(QWidget * pWgt = nullptr);
protected:
virtual void mousePressEvent(QMouseEvent* pEvent) final override;
virtual void mouseReleaseEvent(QMouseEvent* pEvent) final override;
virtual void mouseMoveEvent(QMouseEvent* pEvent) final override;
private:
void dump_event(const QMouseEvent* const pEvent, const QString & strMsg);
QString modifier_info(const QMouseEvent * const pEvent);
QString button_info(const QMouseEvent* const pEvent);
};
|
cjoakim/azure-cosmosdb | code/java/async/src/main/java/org/cjoakim/cosmos/App.java | package org.cjoakim.cosmos;
import com.azure.cosmos.ConsistencyLevel;
import com.azure.cosmos.CosmosAsyncClient;
import com.azure.cosmos.CosmosAsyncContainer;
import com.azure.cosmos.CosmosAsyncDatabase;
import com.azure.cosmos.CosmosClientBuilder;
import com.azure.cosmos.CosmosException;
import com.azure.cosmos.models.CosmosContainerProperties;
import com.azure.cosmos.models.CosmosContainerRequestOptions;
import com.azure.cosmos.models.CosmosContainerResponse;
import com.azure.cosmos.models.CosmosDatabaseResponse;
import com.azure.cosmos.models.CosmosItemResponse;
import com.azure.cosmos.models.CosmosQueryRequestOptions;
import com.azure.cosmos.models.PartitionKey;
import com.azure.cosmos.models.ThroughputProperties;
import com.azure.cosmos.util.CosmosPagedFlux;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Mono;
import java.time.Duration;
import java.util.ArrayList;
import java.util.stream.Collectors;
/**
* SELECT DISTINCT VALUE a.pk FROM airports a
* See https://github.com/Azure-Samples/azure-cosmos-java-sql-api-samples/blob/main/src/main/java/com/azure/cosmos/examples/crudquickstart/async/SampleCRUDQuickstartAsync.java
* https://docs.microsoft.com/en-us/azure/cosmos-db/create-sql-api-java?tabs=sync#clone-the-sample-application
*/
public class App
{
// Constants:
private static final String databaseName = "dev";
private static final String containerName = "airports";
// Class variables:
private static CosmosAsyncClient client;
private static CosmosAsyncDatabase database;
private static CosmosAsyncContainer container;
//protected static Logger logger = LoggerFactory.getLogger(App.class);
public static void main( String[] args )
{
System.out.println( "start of main" );
try {
createClient();
getDatabaseReference();
getContainerReference();
}
catch (Exception e) {
e.printStackTrace();
}
finally {
close();
}
System.out.println( "end of main" );
}
private static void createClient() throws Exception {
String uri = envVar("AZURE_COSMOSDB_SQLDB_URI");
String key = envVar("AZURE_COSMOSDB_SQLDB_KEY");
String region = "East US"; //envVar("AZURE_COSMOSDB_SQLDB_PREF_REGION");
ArrayList<String> prefRegions = new ArrayList<String>();
prefRegions.add(region);
client = new CosmosClientBuilder()
.endpoint(uri)
.key(key)
.preferredRegions(prefRegions)
.contentResponseOnWriteEnabled(true)
.consistencyLevel(ConsistencyLevel.EVENTUAL)
.buildAsyncClient();
System.out.println("client created: " + client);
}
private static void getDatabaseReference() {
Mono<CosmosDatabaseResponse> databaseResponseMono =
client.createDatabaseIfNotExists(databaseName);
databaseResponseMono.flatMap(databaseResponse -> {
database = client.getDatabase(databaseResponse.getProperties().getId());
System.out.println("getDatabaseReference completed: " + database.getId());
return Mono.empty();
}).block();
System.out.println("database: " + database);
}
private static void getContainerReference() {
CosmosContainerProperties containerProperties =
new CosmosContainerProperties(containerName, "/pk");
Mono<CosmosContainerResponse> containerResponseMono =
database.createContainerIfNotExists(
containerProperties, ThroughputProperties.createManualThroughput(400));
containerResponseMono.flatMap(containerResponse -> {
container = database.getContainer(containerResponse.getProperties().getId());
System.out.println("getContainerReference completed; " + container.getId());
return Mono.empty();
}).block();
System.out.println("container: " + container);
}
private static synchronized String envVar(String name) {
return System.getenv().get(name);
}
private static void close() {
if (client != null) {
System.out.println("closing client");
client.close();
}
}
}
|
criteo/berilla | src/main/scala/com/criteo/dev/cluster/copy/FullCopyTableAction.scala | <gh_stars>10-100
package com.criteo.dev.cluster.copy
import com.criteo.dev.cluster._
import com.criteo.dev.cluster.config.GlobalConfig
import org.slf4j.LoggerFactory
/**
* Copy over all table data.
*/
class FullCopyTableAction(config: GlobalConfig, conf: Map[String, String], source: Node, target: Node) {
private val logger = LoggerFactory.getLogger(classOf[FullCopyTableAction])
def copy(tableInfo: TableInfo): TableInfo = {
val database = tableInfo.database
val table = tableInfo.ddl.table
val location = tableInfo.ddl.location.get
val partitions = tableInfo.partitions
logger.info("Copying " + partitions.length + " partitions from " +
database + "." + table)
//for now, only support table location as the common location.
val sourceCommon = CopyUtilities.getCommonLocation(location, partitions)
//handle case of no partitions, and some partitions.
val sourceLocations: Array[String] = {
if (partitions.isEmpty) Array(location)
else partitions.map(_.location)
}
val copyFileAction = CopyFileActionFactory.getCopyFileAction(config, source, target)
copyFileAction(sourceLocations, sourceCommon, CopyUtilities.toRelative(sourceCommon))
tableInfo
}
// def getCommonLocation(partLocation: Array[String]): String = {
// val location = partLocation.reduce[String] { case (prefix, cur) =>
// prefix.zip(cur).takeWhile { case (a, b) => a == b }.map(_._1).mkString
// }
//
// CopyUtilities.getParent(location)
// }
} |
AudithSoftworks/LodgeOfSorceresses-Event-Planner | cypress/fixtures/xhr-operations/skills.js | <reponame>AudithSoftworks/LodgeOfSorceresses-Event-Planner
export const skills = cy => {
cy.fixture('.skills.json').as('skills');
cy.route({
method: 'GET',
url: '/api/skills',
response: '@skills',
}).as('loadSkills');
};
|
lucasccordeiro/cbmc | src/goto-programs/class_identifier.cpp | /*******************************************************************\
Module: Extract class identifier
Author: <NAME>, <EMAIL>
\*******************************************************************/
/// \file
/// Extract class identifier
#include "class_identifier.h"
#include <util/std_expr.h>
#include <util/c_types.h>
#include <util/namespace.h>
/// \par parameters: Struct expression
/// \return Member expression giving the clsid field of the input, or its
/// parent, grandparent, etc.
static exprt build_class_identifier(
const exprt &src,
const namespacet &ns)
{
// the class identifier is in the root class
exprt e=src;
while(1)
{
const typet &type=ns.follow(e.type());
const struct_typet &struct_type=to_struct_type(type);
const struct_typet::componentst &components=struct_type.components();
assert(!components.empty());
const auto &first_member_name=components.front().get_name();
member_exprt member_expr(
e,
first_member_name,
components.front().type());
if(first_member_name=="@class_identifier")
{
// found it
return member_expr;
}
else
{
e.swap(member_expr);
}
}
}
/// \par parameters: Pointer expression of any pointer type, including void*,
/// and a recommended access type if the pointer is void-typed.
/// \return Member expression to access a class identifier, as above.
exprt get_class_identifier_field(
const exprt &this_expr_in,
const symbol_typet &suggested_type,
const namespacet &ns)
{
// Get a pointer from which we can extract a clsid.
// If it's already a pointer to an object of some sort, just use it;
// if it's void* then use the suggested type.
exprt this_expr=this_expr_in;
assert(this_expr.type().id()==ID_pointer &&
"Non-pointer this-arg in remove-virtuals?");
const auto &points_to=this_expr.type().subtype();
if(points_to==empty_typet())
this_expr=typecast_exprt(this_expr, pointer_type(suggested_type));
exprt deref=dereference_exprt(this_expr, this_expr.type().subtype());
return build_class_identifier(deref, ns);
}
|
14ms/Minecraft-Disclosed-Source-Modifications | Catware/me/earth/phobos/features/modules/render/Ranges.java | <gh_stars>1-10
package me.earth.phobos.features.modules.render;
import java.awt.Color;
import java.util.ArrayList;
import me.earth.phobos.Phobos;
import me.earth.phobos.event.events.Render3DEvent;
import me.earth.phobos.features.modules.Module;
import me.earth.phobos.features.setting.Setting;
import me.earth.phobos.util.EntityUtil;
import me.earth.phobos.util.RenderUtil;
import net.minecraft.client.renderer.GlStateManager;
import net.minecraft.client.renderer.entity.RenderManager;
import net.minecraft.entity.Entity;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.math.RayTraceResult;
import net.minecraft.util.math.Vec3d;
import org.lwjgl.opengl.GL11;
public class Ranges extends Module {
private final Setting<Boolean> hitSpheres = register(new Setting("HitSpheres", Boolean.valueOf(false)));
private final Setting<Boolean> circle = register(new Setting("Circle", Boolean.valueOf(true)));
private final Setting<Boolean> ownSphere = register(new Setting("OwnSphere", Boolean.valueOf(false), v -> ((Boolean)this.hitSpheres.getValue()).booleanValue()));
private final Setting<Boolean> raytrace = register(new Setting("RayTrace", Boolean.valueOf(false), v -> ((Boolean)this.circle.getValue()).booleanValue()));
private final Setting<Float> lineWidth = register(new Setting("LineWidth", Float.valueOf(1.5F), Float.valueOf(0.1F), Float.valueOf(5.0F)));
private final Setting<Double> radius = register(new Setting("Radius", Double.valueOf(4.5D), Double.valueOf(0.1D), Double.valueOf(8.0D)));
public Ranges() {
super("Ranges", "Draws a circle around the player.", Module.Category.RENDER, false, false, false);
}
public void onUpdate() {}
public void onRender3D(Render3DEvent event) {
if (((Boolean)this.circle.getValue()).booleanValue()) {
GlStateManager.pushMatrix();
GlStateManager.enableBlend();
GlStateManager.disableTexture2D();
GlStateManager.enableDepth();
GlStateManager.tryBlendFuncSeparate(GlStateManager.SourceFactor.SRC_ALPHA, GlStateManager.DestFactor.ONE_MINUS_SRC_ALPHA, GlStateManager.SourceFactor.ONE, GlStateManager.DestFactor.ZERO);
RenderManager renderManager = mc.getRenderManager();
float hue = (float)(System.currentTimeMillis() % 7200L) / 7200.0F;
Color color = new Color(Color.HSBtoRGB(hue, 1.0F, 1.0F));
ArrayList<Vec3d> hVectors = new ArrayList<>();
double x = mc.player.lastTickPosX + (mc.player.posX - mc.player.lastTickPosX) * event.getPartialTicks() - renderManager.renderPosX;
double y = mc.player.lastTickPosY + (mc.player.posY - mc.player.lastTickPosY) * event.getPartialTicks() - renderManager.renderPosY;
double z = mc.player.lastTickPosZ + (mc.player.posZ - mc.player.lastTickPosZ) * event.getPartialTicks() - renderManager.renderPosZ;
GL11.glLineWidth(((Float)this.lineWidth.getValue()).floatValue());
GL11.glBegin(1);
for (int i = 0; i <= 360; i++) {
Vec3d vec = new Vec3d(x + Math.sin(i * Math.PI / 180.0D) * ((Double)this.radius.getValue()).doubleValue(), y + 0.1D, z + Math.cos(i * Math.PI / 180.0D) * ((Double)this.radius.getValue()).doubleValue());
RayTraceResult result = mc.world.rayTraceBlocks(new Vec3d(mc.player.posX, mc.player.posY + mc.player.getEyeHeight(), mc.player.posZ), vec, false, false, true);
if (result != null && ((Boolean)this.raytrace.getValue()).booleanValue()) {
Phobos.LOGGER.info("raytrace was not null");
hVectors.add(result.hitVec);
} else {
hVectors.add(vec);
}
}
for (int j = 0; j < hVectors.size() - 1; j++) {
GL11.glColor4f(color.getRed() / 255.0F, color.getGreen() / 255.0F, color.getBlue() / 255.0F, color.getAlpha() / 255.0F);
GL11.glVertex3d(((Vec3d)hVectors.get(j)).x, ((Vec3d)hVectors.get(j)).y, ((Vec3d)hVectors.get(j)).z);
GL11.glVertex3d(((Vec3d)hVectors.get(j + 1)).x, ((Vec3d)hVectors.get(j + 1)).y, ((Vec3d)hVectors.get(j + 1)).z);
color = new Color(Color.HSBtoRGB(hue += 0.0027777778F, 1.0F, 1.0F));
}
GL11.glEnd();
GlStateManager.resetColor();
GlStateManager.disableDepth();
GlStateManager.enableTexture2D();
GlStateManager.disableBlend();
GlStateManager.popMatrix();
}
if (((Boolean)this.hitSpheres.getValue()).booleanValue())
for (EntityPlayer player : mc.world.playerEntities) {
if (player == null || (player.equals(mc.player) && !((Boolean)this.ownSphere.getValue()).booleanValue()))
continue;
Vec3d interpolated = EntityUtil.interpolateEntity((Entity)player, event.getPartialTicks());
if (Phobos.friendManager.isFriend(player.getName())) {
GL11.glColor4f(0.15F, 0.15F, 1.0F, 1.0F);
} else if (mc.player.getDistance((Entity)player) >= 64.0F) {
GL11.glColor4f(0.0F, 1.0F, 0.0F, 1.0F);
} else {
GL11.glColor4f(1.0F, mc.player.getDistance((Entity)player) / 150.0F, 0.0F, 1.0F);
}
RenderUtil.drawSphere(interpolated.x, interpolated.y, interpolated.z, ((Double)this.radius.getValue()).floatValue(), 20, 15);
}
}
}
|
GeneralZero/CS-577-Final-Project | attitude_adjustment/target/linux/s3c24xx/files-2.6.30/drivers/ar6000/include/dbglog_id.h | /*
*
* Copyright (c) 2004-2007 Atheros Communications Inc.
* All rights reserved.
*
* $ATH_LICENSE_HOSTSDK0_C$
*
* This file contains the definitions of the debug identifiers for different
* modules.
*
*/
#ifndef _DBGLOG_ID_H_
#define _DBGLOG_ID_H_
#ifdef __cplusplus
extern "C" {
#endif
/*
* The nomenclature for the debug identifiers is MODULE_DESCRIPTION.
* Please ensure that the definition of any new debugid introduced is captured
* between the <MODULE>_DBGID_DEFINITION_START and
* <MODULE>_DBGID_DEFINITION_END defines. The structure is required for the
* parser to correctly pick up the values for different debug identifiers.
*/
/* INF debug identifier definitions */
#define INF_DBGID_DEFINITION_START
#define INF_ASSERTION_FAILED 1
#define INF_TARGET_ID 2
#define INF_DBGID_DEFINITION_END
/* WMI debug identifier definitions */
#define WMI_DBGID_DEFINITION_START
#define WMI_CMD_RX_XTND_PKT_TOO_SHORT 1
#define WMI_EXTENDED_CMD_NOT_HANDLED 2
#define WMI_CMD_RX_PKT_TOO_SHORT 3
#define WMI_CALLING_WMI_EXTENSION_FN 4
#define WMI_CMD_NOT_HANDLED 5
#define WMI_IN_SYNC 6
#define WMI_TARGET_WMI_SYNC_CMD 7
#define WMI_SET_SNR_THRESHOLD_PARAMS 8
#define WMI_SET_RSSI_THRESHOLD_PARAMS 9
#define WMI_SET_LQ_TRESHOLD_PARAMS 10
#define WMI_TARGET_CREATE_PSTREAM_CMD 11
#define WMI_WI_DTM_INUSE 12
#define WMI_TARGET_DELETE_PSTREAM_CMD 13
#define WMI_TARGET_IMPLICIT_DELETE_PSTREAM_CMD 14
#define WMI_TARGET_GET_BIT_RATE_CMD 15
#define WMI_GET_RATE_MASK_CMD_FIX_RATE_MASK_IS 16
#define WMI_TARGET_GET_AVAILABLE_CHANNELS_CMD 17
#define WMI_TARGET_GET_TX_PWR_CMD 18
#define WMI_FREE_EVBUF_WMIBUF 19
#define WMI_FREE_EVBUF_DATABUF 20
#define WMI_FREE_EVBUF_BADFLAG 21
#define WMI_HTC_RX_ERROR_DATA_PACKET 22
#define WMI_HTC_RX_SYNC_PAUSING_FOR_MBOX 23
#define WMI_INCORRECT_WMI_DATA_HDR_DROPPING_PKT 24
#define WMI_SENDING_READY_EVENT 25
#define WMI_SETPOWER_MDOE_TO_MAXPERF 26
#define WMI_SETPOWER_MDOE_TO_REC 27
#define WMI_BSSINFO_EVENT_FROM 28
#define WMI_TARGET_GET_STATS_CMD 29
#define WMI_SENDING_SCAN_COMPLETE_EVENT 30
#define WMI_SENDING_RSSI_INDB_THRESHOLD_EVENT 31
#define WMI_SENDING_RSSI_INDBM_THRESHOLD_EVENT 32
#define WMI_SENDING_LINK_QUALITY_THRESHOLD_EVENT 33
#define WMI_SENDING_ERROR_REPORT_EVENT 34
#define WMI_SENDING_CAC_EVENT 35
#define WMI_TARGET_GET_ROAM_TABLE_CMD 36
#define WMI_TARGET_GET_ROAM_DATA_CMD 37
#define WMI_SENDING_GPIO_INTR_EVENT 38
#define WMI_SENDING_GPIO_ACK_EVENT 39
#define WMI_SENDING_GPIO_DATA_EVENT 40
#define WMI_CMD_RX 41
#define WMI_CMD_RX_XTND 42
#define WMI_EVENT_SEND 43
#define WMI_EVENT_SEND_XTND 44
#define WMI_DBGID_DEFINITION_END
/* CSERV debug identifier definitions */
#define CSERV_DBGID_DEFINITION_START
#define CSERV_BEGIN_SCAN1 1
#define CSERV_BEGIN_SCAN2 2
#define CSERV_END_SCAN1 3
#define CSERV_END_SCAN2 4
#define CSERV_CHAN_SCAN_START 5
#define CSERV_CHAN_SCAN_STOP 6
#define CSERV_CHANNEL_OPPPORTUNITY 7
#define CSERV_NC_TIMEOUT 8
#define CSERV_BACK_HOME 10
#define CSERV_CHMGR_CH_CALLBACK1 11
#define CSERV_CHMGR_CH_CALLBACK2 12
#define CSERV_CHMGR_CH_CALLBACK3 13
#define CSERV_SET_SCAN_PARAMS1 14
#define CSERV_SET_SCAN_PARAMS2 15
#define CSERV_SET_SCAN_PARAMS3 16
#define CSERV_SET_SCAN_PARAMS4 17
#define CSERV_ABORT_SCAN 18
#define CSERV_NEWSTATE 19
#define CSERV_MINCHMGR_OP_END 20
#define CSERV_CHMGR_OP_END 21
#define CSERV_DISCONNECT_TIMEOUT 22
#define CSERV_ROAM_TIMEOUT 23
#define CSERV_FORCE_SCAN1 24
#define CSERV_FORCE_SCAN2 25
#define CSERV_FORCE_SCAN3 26
#define CSERV_UTIL_TIMEOUT 27
#define CSERV_RSSIPOLLER 28
#define CSERV_RETRY_CONNECT_TIMEOUT 29
#define CSERV_RSSIINDBMPOLLER 30
#define CSERV_BGSCAN_ENABLE 31
#define CSERV_BGSCAN_DISABLE 32
#define CSERV_WLAN_START_SCAN_CMD1 33
#define CSERV_WLAN_START_SCAN_CMD2 34
#define CSERV_WLAN_START_SCAN_CMD3 35
#define CSERV_START_SCAN_CMD 36
#define CSERV_START_FORCE_SCAN 37
#define CSERV_NEXT_CHAN 38
#define CSERV_SET_REGCODE 39
#define CSERV_START_ADHOC 40
#define CSERV_ADHOC_AT_HOME 41
#define CSERV_OPT_AT_HOME 42
#define CSERV_WLAN_CONNECT_CMD 43
#define CSERV_WLAN_RECONNECT_CMD 44
#define CSERV_WLAN_DISCONNECT_CMD 45
#define CSERV_BSS_CHANGE_CHANNEL 46
#define CSERV_BEACON_RX 47
#define CSERV_KEEPALIVE_CHECK 48
#define CSERV_RC_BEGIN_SCAN 49
#define CSERV_RC_SCAN_START 50
#define CSERV_RC_SCAN_STOP 51
#define CSERV_RC_NEXT 52
#define CSERV_RC_SCAN_END 53
#define CSERV_PROBE_CALLBACK 54
#define CSERV_ROAM1 55
#define CSERV_ROAM2 56
#define CSERV_ROAM3 57
#define CSERV_CONNECT_EVENT 58
#define CSERV_DISCONNECT_EVENT 59
#define CSERV_BMISS_HANDLER1 60
#define CSERV_BMISS_HANDLER2 61
#define CSERV_BMISS_HANDLER3 62
#define CSERV_LOWRSSI_HANDLER 63
#define CSERV_WLAN_SET_PMKID_CMD 64
#define CSERV_RECONNECT_REQUEST 65
#define CSERV_KEYSPLUMBED_EVENT 66
#define CSERV_NEW_REG 67
#define CSERV_SET_RSSI_THOLD 68
#define CSERV_RSSITHRESHOLDCHECK 69
#define CSERV_RSSIINDBMTHRESHOLDCHECK 70
#define CSERV_WLAN_SET_OPT_CMD1 71
#define CSERV_WLAN_SET_OPT_CMD2 72
#define CSERV_WLAN_SET_OPT_CMD3 73
#define CSERV_WLAN_SET_OPT_CMD4 74
#define CSERV_SCAN_CONNECT_STOP 75
#define CSERV_BMISS_HANDLER4 76
#define CSERV_INITIALIZE_TIMER 77
#define CSERV_ARM_TIMER 78
#define CSERV_DISARM_TIMER 79
#define CSERV_UNINITIALIZE_TIMER 80
#define CSERV_DISCONNECT_EVENT2 81
#define CSERV_SCAN_CONNECT_START 82
#define CSERV_BSSINFO_MEMORY_ALLOC_FAILED 83
#define CSERV_SET_SCAN_PARAMS5 84
#define CSERV_DBGID_DEFINITION_END
/* TXRX debug identifier definitions */
#define TXRX_TXBUF_DBGID_DEFINITION_START
#define TXRX_TXBUF_ALLOCATE_BUF 1
#define TXRX_TXBUF_QUEUE_BUF_TO_MBOX 2
#define TXRX_TXBUF_QUEUE_BUF_TO_TXQ 3
#define TXRX_TXBUF_TXQ_DEPTH 4
#define TXRX_TXBUF_IBSS_QUEUE_TO_SFQ 5
#define TXRX_TXBUF_IBSS_QUEUE_TO_TXQ_FRM_SFQ 6
#define TXRX_TXBUF_INITIALIZE_TIMER 7
#define TXRX_TXBUF_ARM_TIMER 8
#define TXRX_TXBUF_DISARM_TIMER 9
#define TXRX_TXBUF_UNINITIALIZE_TIMER 10
#define TXRX_TXBUF_DBGID_DEFINITION_END
#define TXRX_RXBUF_DBGID_DEFINITION_START
#define TXRX_RXBUF_ALLOCATE_BUF 1
#define TXRX_RXBUF_QUEUE_TO_HOST 2
#define TXRX_RXBUF_QUEUE_TO_WLAN 3
#define TXRX_RXBUF_ZERO_LEN_BUF 4
#define TXRX_RXBUF_QUEUE_TO_HOST_LASTBUF_IN_RXCHAIN 5
#define TXRX_RXBUF_LASTBUF_IN_RXCHAIN_ZEROBUF 6
#define TXRX_RXBUF_QUEUE_EMPTY_QUEUE_TO_WLAN 7
#define TXRX_RXBUF_SEND_TO_RECV_MGMT 8
#define TXRX_RXBUF_SEND_TO_IEEE_LAYER 9
#define TXRX_RXBUF_DBGID_DEFINITION_END
#define TXRX_MGMTBUF_DBGID_DEFINITION_START
#define TXRX_MGMTBUF_ALLOCATE_BUF 1
#define TXRX_MGMTBUF_ALLOCATE_SM_BUF 2
#define TXRX_MGMTBUF_ALLOCATE_RMBUF 3
#define TXRX_MGMTBUF_GET_BUF 4
#define TXRX_MGMTBUF_GET_SM_BUF 5
#define TXRX_MGMTBUF_QUEUE_BUF_TO_TXQ 6
#define TXRX_MGMTBUF_REAPED_BUF 7
#define TXRX_MGMTBUF_REAPED_SM_BUF 8
#define TXRX_MGMTBUF_WAIT_FOR_TXQ_DRAIN 9
#define TXRX_MGMTBUF_WAIT_FOR_TXQ_SFQ_DRAIN 10
#define TXRX_MGMTBUF_ENQUEUE_INTO_SFQ 11
#define TXRX_MGMTBUF_DEQUEUE_FROM_SFQ 12
#define TXRX_MGMTBUF_PAUSE_TXQ 13
#define TXRX_MGMTBUF_RESUME_TXQ 14
#define TXRX_MGMTBUF_WAIT_FORTXQ_DRAIN_TIMEOUT 15
#define TXRX_MGMTBUF_DRAINQ 16
#define TXRX_MGMTBUF_INDICATE_Q_DRAINED 17
#define TXRX_MGMTBUF_DBGID_DEFINITION_END
/* PM (Power Module) debug identifier definitions */
#define PM_DBGID_DEFINITION_START
#define PM_INIT 1
#define PM_ENABLE 2
#define PM_SET_STATE 3
#define PM_SET_POWERMODE 4
#define PM_CONN_NOTIFY 5
#define PM_REF_COUNT_NEGATIVE 6
#define PM_APSD_ENABLE 7
#define PM_UPDATE_APSD_STATE 8
#define PM_CHAN_OP_REQ 9
#define PM_SET_MY_BEACON_POLICY 10
#define PM_SET_ALL_BEACON_POLICY 11
#define PM_SET_PM_PARAMS1 12
#define PM_SET_PM_PARAMS2 13
#define PM_ADHOC_SET_PM_CAPS_FAIL 14
#define PM_ADHOC_UNKNOWN_IBSS_ATTRIB_ID 15
#define PM_DBGID_DEFINITION_END
/* Wake on Wireless debug identifier definitions */
#define WOW_DBGID_DEFINITION_START
#define WOW_INIT 1
#define WOW_GET_CONFIG_DSET 2
#define WOW_NO_CONFIG_DSET 3
#define WOW_INVALID_CONFIG_DSET 4
#define WOW_USE_DEFAULT_CONFIG 5
#define WOW_SETUP_GPIO 6
#define WOW_INIT_DONE 7
#define WOW_SET_GPIO_PIN 8
#define WOW_CLEAR_GPIO_PIN 9
#define WOW_SET_WOW_MODE_CMD 10
#define WOW_SET_HOST_MODE_CMD 11
#define WOW_ADD_WOW_PATTERN_CMD 12
#define WOW_NEW_WOW_PATTERN_AT_INDEX 13
#define WOW_DEL_WOW_PATTERN_CMD 14
#define WOW_LIST_CONTAINS_PATTERNS 15
#define WOW_GET_WOW_LIST_CMD 16
#define WOW_INVALID_FILTER_ID 17
#define WOW_INVALID_FILTER_LISTID 18
#define WOW_NO_VALID_FILTER_AT_ID 19
#define WOW_NO_VALID_LIST_AT_ID 20
#define WOW_NUM_PATTERNS_EXCEEDED 21
#define WOW_NUM_LISTS_EXCEEDED 22
#define WOW_GET_WOW_STATS 23
#define WOW_CLEAR_WOW_STATS 24
#define WOW_WAKEUP_HOST 25
#define WOW_EVENT_WAKEUP_HOST 26
#define WOW_EVENT_DISCARD 27
#define WOW_PATTERN_MATCH 28
#define WOW_PATTERN_NOT_MATCH 29
#define WOW_PATTERN_NOT_MATCH_OFFSET 30
#define WOW_DISABLED_HOST_ASLEEP 31
#define WOW_ENABLED_HOST_ASLEEP_NO_PATTERNS 32
#define WOW_ENABLED_HOST_ASLEEP_NO_MATCH_FOUND 33
#define WOW_DBGID_DEFINITION_END
/* WHAL debug identifier definitions */
#define WHAL_DBGID_DEFINITION_START
#define WHAL_ERROR_ANI_CONTROL 1
#define WHAL_ERROR_CHIP_TEST1 2
#define WHAL_ERROR_CHIP_TEST2 3
#define WHAL_ERROR_EEPROM_CHECKSUM 4
#define WHAL_ERROR_EEPROM_MACADDR 5
#define WHAL_ERROR_INTERRUPT_HIU 6
#define WHAL_ERROR_KEYCACHE_RESET 7
#define WHAL_ERROR_KEYCACHE_SET 8
#define WHAL_ERROR_KEYCACHE_TYPE 9
#define WHAL_ERROR_KEYCACHE_TKIPENTRY 10
#define WHAL_ERROR_KEYCACHE_WEPLENGTH 11
#define WHAL_ERROR_PHY_INVALID_CHANNEL 12
#define WHAL_ERROR_POWER_AWAKE 13
#define WHAL_ERROR_POWER_SET 14
#define WHAL_ERROR_RECV_STOPDMA 15
#define WHAL_ERROR_RECV_STOPPCU 16
#define WHAL_ERROR_RESET_CHANNF1 17
#define WHAL_ERROR_RESET_CHANNF2 18
#define WHAL_ERROR_RESET_PM 19
#define WHAL_ERROR_RESET_OFFSETCAL 20
#define WHAL_ERROR_RESET_RFGRANT 21
#define WHAL_ERROR_RESET_RXFRAME 22
#define WHAL_ERROR_RESET_STOPDMA 23
#define WHAL_ERROR_RESET_RECOVER 24
#define WHAL_ERROR_XMIT_COMPUTE 25
#define WHAL_ERROR_XMIT_NOQUEUE 26
#define WHAL_ERROR_XMIT_ACTIVEQUEUE 27
#define WHAL_ERROR_XMIT_BADTYPE 28
#define WHAL_DBGID_DEFINITION_END
#ifdef __cplusplus
}
#endif
#endif /* _DBGLOG_ID_H_ */
|
ysBach/irafdocgen | iraf.v2161/pkg/cl/scan.c | /* Copyright(c) 1986 Association of Universities for Research in Astronomy Inc.
*/
#define import_spp
#define import_libc
#define import_stdio
#include <iraf.h>
#include "config.h"
#include "operand.h"
#include "param.h"
#include "grammar.h"
#include "task.h"
#include "errs.h"
#include "proto.h"
/*
* SCAN -- free-format and formatted scan functions.
*/
extern int cldebug;
extern char *nullstr;
extern char *eofstr;
extern char *indefstr;
extern char *indeflc;
#define MAXARGS 32
static int nscan_val=0; /* value returned by NSCAN intrinsic */
/* SCAN -- Perform the bulk of the scan,fscan intrinsic functions to do
* free-formatted reads into nargs params. Formatting is done by makeop()
* according to the type of the corresponding destination param.
* Destination may be "stdout".
*
* Nargs is the number of operands on the stack we need to deal with.
* They are all strings. The scan procedure is actually called to
* process calls to both the SCAN and FSCAN intrinsics. If scan was
* called, the argument "source" will be the string "stdin". If source
* is null, the source is given by the first operand on the stack; it
* may be the special string "stdin". Thereafter, there are exactly
* nargs-1 string operands each of which is the name of a destination
* parameter to be assigned. The operand order must be such that the
* first one popped is the name of the parameter to which the first field
* of the scan line is to be assigned.
*
* EOF or OK is returned as the function value. The number of items
* successfully scanned is returned by a subsequent call to NSCAN().
*
* query if readlist yields undefined.
* error() may be called on various conditions.
*/
void
cl_scan (
int nargs,
char *source
)
{
char buf[SZ_LINE];
char *bp, *start, c;
char *pk, *t, *p, *f;
char field;
struct operand o;
struct param *pp;
int eoftst;
eoftst = 0;
/* Fill buf with the line to be scanned.
*/
if (strcmp (source, "stdin") == 0) {
/* Read from the standard input (SCAN call).
*/
if (fgets (buf, SZ_LINE, currentask->t_stdin) == NULL)
eoftst++;
else
lentst (buf);
/* First arg is an output param, not source, so increment
* nargs.
*/
nargs++;
} else {
/* Get source name from first operand (FSCAN call)
*/
o = popop();
if (!strcmp (o.o_val.v_s, "stdin") ||
!strcmp (o.o_val.v_s, "STDIN")) {
if (fgets (buf, SZ_LINE, currentask->t_stdin) == NULL)
eoftst++;
else
lentst (buf);
} else {
breakout (o.o_val.v_s, &pk, &t, &p, &f);
pp = paramsrch (pk, t, p);
paramget (pp, *f);
opcast (OT_STRING);
o = popop();
if (pp->p_flags & P_LEOF)
eoftst++;
else {
if (opundef (&o)) {
query (pp); /* pushes op */
opcast (OT_STRING);
o = popop();
}
strncpy (buf, o.o_val.v_s, SZ_LINE);
}
}
}
if (eoftst) {
o.o_type = OT_INT;
o.o_val.v_i = CL_EOF;
while (nargs-- > 0)
popop(); /* flush op stack */
pushop (&o);
return;
}
/* Take each portion of buf and assign to the given parameter.
*/
bp = buf;
nscan_val = 0;
while (nargs-- > 0) { /* get each destination name */
o = popop();
if (!strcmp (o.o_val.v_s, "stdout") ||
!strcmp (o.o_val.v_s, "STDOUT")) {
pp = NULL;
} else {
breakout (o.o_val.v_s, &pk, &t, &p, &f);
field = *f;
pp = paramsrch (pk, t, p); /* never returns NULL */
}
/* Assign rest of line if struct type parameter. For simple
* string or filename type params, the next whitespace delimited
* word is broken out (see below).
*/
if (pp != NULL &&
((pp->p_type & (PT_STRUCT|PT_IMCUR|PT_GCUR|PT_UKEY)) &&
!(pp->p_type & (PT_FILNAM|PT_PSET|PT_LIST)))) {
if (nargs != 0)
cl_error (E_UERR,
"Struct type param must be final Scan argument");
start = bp;
} else {
while (*bp == ' ' || *bp == '\t')
bp++;
/* It is not an error if not all params can be filled by scan.
* Simply break off scan, pop the unused args off the stack,
* and return as the function value the number of items
* sucessfully scanned.
*/
if (*bp == '\0')
break;
start = bp;
for (c = *bp; c!=' ' && c!='\t' && c!='\0'; c = *bp)
bp++;
if (c != '\0')
*bp++ = '\0';
}
if (pp == NULL)
fputs (start, currentask->t_stdout);
else {
o = makeop (start, pp->p_type & OT_BASIC);
if (opundef (&o))
break; /* cannot convert as basic type */
pushop (&o);
paramset (pp, field);
}
nscan_val++;
}
/* If we broke out of the above loop because of an unsuccessful
* conversion, we must pop the remaining unused operands off the stack.
*/
while (--nargs >= 0)
popop();
o.o_type = OT_INT;
o.o_val.v_i = nscan_val;
pushop (&o);
}
/* CL_SCANF -- Formatted scan. Like SCAN except that a C-scanf like format
* statement is used to decode the input text.
*/
void
cl_scanf (
char *format,
int nargs,
char *input
)
{
int nscan_val, eoftst, n;
char *pk, *t, *p, *f;
struct operand o;
char buf[SZ_LINE];
char *v[MAXARGS];
struct param *pp;
eoftst = 0;
/* Fill buf with the line to be scanned.
*/
if (strcmp (input, "stdin") == 0) {
/* Read from the standard input (SCANF).
*/
if (fgets (buf, SZ_LINE, currentask->t_stdin) == NULL)
eoftst++;
else
lentst (buf);
/* First arg is an output param, not source, so increment nargs. */
nargs++;
} else {
/* Get source name from first operand (FSCANF).
*/
o = popop();
if (!strcmp (o.o_val.v_s, "stdin") ||
!strcmp (o.o_val.v_s, "STDIN")) {
if (fgets (buf, SZ_LINE, currentask->t_stdin) == NULL)
eoftst++;
else
lentst (buf);
} else {
breakout (o.o_val.v_s, &pk, &t, &p, &f);
pp = paramsrch (pk, t, p);
paramget (pp, *f);
opcast (OT_STRING);
o = popop();
if (pp->p_flags & P_LEOF)
eoftst++;
else {
if (opundef (&o)) {
query (pp); /* pushes op */
opcast (OT_STRING);
o = popop();
}
strncpy (buf, o.o_val.v_s, SZ_LINE);
}
}
}
/* Check for EOF. */
if (eoftst) {
o.o_type = OT_INT;
o.o_val.v_i = CL_EOF;
while (nargs-- > 0)
popop(); /* flush op stack */
pushop (&o);
return;
}
/* Process the stacked operands and build the argument list for
* the scanf call. Each argument pointer points directly to the
* stored parameter value in the parameter descriptor.
*/
for (n=0; --nargs >= 0; n++) {
/* Stacked operand is parameter name. */
o = popop();
breakout (o.o_val.v_s, &pk, &t, &p, &f);
pp = paramsrch (pk, t, p);
/* Add address of parameter value to argument list. First set
* the value with PARAMSET, to make sure that the pset knows
* that the value has been modified.
*/
switch (pp->p_valo.o_type & OT_BASIC) {
case OT_BOOL:
o = makeop ("yes", OT_BOOL); pushop (&o);
paramset (pp, FN_VALUE);
v[n] = (char *) &pp->p_valo.o_val.v_i;
break;
case OT_INT:
o = makeop ("0", OT_INT); pushop (&o);
paramset (pp, FN_VALUE);
v[n] = (char *) &pp->p_valo.o_val.v_i;
break;
case OT_REAL:
o = makeop ("0", OT_REAL); pushop (&o);
paramset (pp, FN_VALUE);
v[n] = (char *) &pp->p_valo.o_val.v_r;
break;
case OT_STRING:
o = makeop ("", OT_STRING); pushop (&o);
paramset (pp, FN_VALUE);
v[n] = (char *) pp->p_valo.o_val.v_s;
break;
default:
cl_error (E_UERR, "scanf: cannot scan into %s\n", o.o_val.v_s);
}
}
/* Perform the scan. */
nscan_val = sscanf (buf, format,
v[ 0], v[ 1], v[ 2], v[ 3], v[ 4], v[ 5], v[ 6], v[ 7],
v[ 8], v[ 9], v[10], v[11], v[12], v[13], v[14], v[15],
v[16], v[17], v[18], v[19], v[20], v[21], v[22], v[23],
v[24], v[25], v[26], v[27], v[28], v[29], v[30], v[31]);
o.o_type = OT_INT;
o.o_val.v_i = nscan_val;
pushop (&o);
}
/* GET_NSCANVAL -- Return the number of items successfully scanned in the
* last call to SCAN.
*/
int
get_nscanval (void)
{
return (nscan_val);
}
/* LENTST -- Test that the scan line just read did not overflow the line
* buffer.
*/
void
lentst (
char *buf
)
{
char *index();
char *bp;
bp = index (buf, '\n');
if (bp != NULL)
*bp = '\0';
else
cl_error (E_UERR, "scan limited to %d char lines", SZ_LINE-1);
}
|
zhouhaifeng/vpe | src/frr/bfdd/bfdd.c | <gh_stars>0
/*
* BFD daemon code
* Copyright (C) 2018 Network Device Education Foundation, Inc. ("NetDEF")
*
* FRR is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2, or (at your option) any
* later version.
*
* FRR is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with FRR; see the file COPYING. If not, write to the Free
* Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*/
#include <zebra.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <sys/socket.h>
#include <sys/un.h>
#include <err.h>
#include "filter.h"
#include "if.h"
#include "vrf.h"
#include "bfd.h"
#include "bfdd_nb.h"
#include "bfddp_packet.h"
#include "lib/version.h"
#include "lib/command.h"
/*
* FRR related code.
*/
DEFINE_MGROUP(BFDD, "Bidirectional Forwarding Detection Daemon");
DEFINE_MTYPE(BFDD, BFDD_CONTROL, "long-lived control socket memory");
DEFINE_MTYPE(BFDD, BFDD_NOTIFICATION, "short-lived control notification data");
/* Master of threads. */
struct thread_master *master;
/* BFDd privileges */
static zebra_capabilities_t _caps_p[] = {ZCAP_BIND, ZCAP_SYS_ADMIN, ZCAP_NET_RAW};
/* BFD daemon information. */
static struct frr_daemon_info bfdd_di;
void socket_close(int *s)
{
if (*s <= 0)
return;
if (close(*s) != 0)
zlog_err("%s: close(%d): (%d) %s", __func__, *s, errno,
strerror(errno));
*s = -1;
}
static void sigusr1_handler(void)
{
zlog_rotate();
}
static void sigterm_handler(void)
{
bglobal.bg_shutdown = true;
/* Signalize shutdown. */
frr_early_fini();
/* Stop receiving message from zebra. */
bfdd_zclient_stop();
/* Shutdown controller to avoid receiving anymore commands. */
control_shutdown();
/* Shutdown and free all protocol related memory. */
bfd_shutdown();
bfd_vrf_terminate();
/* Terminate and free() FRR related memory. */
frr_fini();
exit(0);
}
static void sighup_handler(void)
{
zlog_info("SIGHUP received");
/* Reload config file. */
vty_read_config(NULL, bfdd_di.config_file, config_default);
}
static struct quagga_signal_t bfd_signals[] = {
{
.signal = SIGUSR1,
.handler = &sigusr1_handler,
},
{
.signal = SIGTERM,
.handler = &sigterm_handler,
},
{
.signal = SIGINT,
.handler = &sigterm_handler,
},
{
.signal = SIGHUP,
.handler = &sighup_handler,
},
};
static const struct frr_yang_module_info *const bfdd_yang_modules[] = {
&frr_filter_info,
&frr_interface_info,
&frr_bfdd_info,
&frr_vrf_info,
};
FRR_DAEMON_INFO(bfdd, BFD, .vty_port = 2617,
.proghelp = "Implementation of the BFD protocol.",
.signals = bfd_signals, .n_signals = array_size(bfd_signals),
.privs = &bglobal.bfdd_privs,
.yang_modules = bfdd_yang_modules,
.n_yang_modules = array_size(bfdd_yang_modules),
);
#define OPTION_CTLSOCK 1001
#define OPTION_DPLANEADDR 2000
static const struct option longopts[] = {
{"bfdctl", required_argument, NULL, OPTION_CTLSOCK},
{"dplaneaddr", required_argument, NULL, OPTION_DPLANEADDR},
{0}
};
/*
* BFD daemon related code.
*/
struct bfd_global bglobal;
const struct bfd_diag_str_list diag_list[] = {
{.str = "control-expired", .type = BD_CONTROL_EXPIRED},
{.str = "echo-failed", .type = BD_ECHO_FAILED},
{.str = "neighbor-down", .type = BD_NEIGHBOR_DOWN},
{.str = "forwarding-reset", .type = BD_FORWARDING_RESET},
{.str = "path-down", .type = BD_PATH_DOWN},
{.str = "concatenated-path-down", .type = BD_CONCATPATH_DOWN},
{.str = "administratively-down", .type = BD_ADMIN_DOWN},
{.str = "reverse-concat-path-down", .type = BD_REVCONCATPATH_DOWN},
{.str = NULL},
};
const struct bfd_state_str_list state_list[] = {
{.str = "admin-down", .type = PTM_BFD_ADM_DOWN},
{.str = "down", .type = PTM_BFD_DOWN},
{.str = "init", .type = PTM_BFD_INIT},
{.str = "up", .type = PTM_BFD_UP},
{.str = NULL},
};
static uint16_t
parse_port(const char *str)
{
char *nulbyte;
long rv;
errno = 0;
rv = strtol(str, &nulbyte, 10);
/* No conversion performed. */
if (rv == 0 && errno == EINVAL) {
fprintf(stderr, "invalid BFD data plane address port: %s\n",
str);
exit(0);
}
/* Invalid number range. */
if ((rv <= 0 || rv >= 65535) || errno == ERANGE) {
fprintf(stderr, "invalid BFD data plane port range: %s\n",
str);
exit(0);
}
/* There was garbage at the end of the string. */
if (*nulbyte != 0) {
fprintf(stderr, "invalid BFD data plane port: %s\n",
str);
exit(0);
}
return (uint16_t)rv;
}
static void
distributed_bfd_init(const char *arg)
{
char *sptr, *saux;
bool is_client = false;
size_t slen;
socklen_t salen;
char addr[64];
char type[64];
union {
struct sockaddr_in sin;
struct sockaddr_in6 sin6;
struct sockaddr_un sun;
} sa;
/* Basic parsing: find ':' to figure out type part and address part. */
sptr = strchr(arg, ':');
if (sptr == NULL) {
fprintf(stderr, "invalid BFD data plane socket: %s\n", arg);
exit(1);
}
/* Calculate type string length. */
slen = (size_t)(sptr - arg);
/* Copy the address part. */
sptr++;
strlcpy(addr, sptr, sizeof(addr));
/* Copy type part. */
strlcpy(type, arg, slen + 1);
/* Reset address data. */
memset(&sa, 0, sizeof(sa));
/* Fill the address information. */
if (strcmp(type, "unix") == 0 || strcmp(type, "unixc") == 0) {
if (strcmp(type, "unixc") == 0)
is_client = true;
salen = sizeof(sa.sun);
sa.sun.sun_family = AF_UNIX;
strlcpy(sa.sun.sun_path, addr, sizeof(sa.sun.sun_path));
} else if (strcmp(type, "ipv4") == 0 || strcmp(type, "ipv4c") == 0) {
if (strcmp(type, "ipv4c") == 0)
is_client = true;
salen = sizeof(sa.sin);
sa.sin.sin_family = AF_INET;
/* Parse port if any. */
sptr = strchr(addr, ':');
if (sptr == NULL) {
sa.sin.sin_port = htons(BFD_DATA_PLANE_DEFAULT_PORT);
} else {
*sptr = 0;
sa.sin.sin_port = htons(parse_port(sptr + 1));
}
if (inet_pton(AF_INET, addr, &sa.sin.sin_addr) != 1)
errx(1, "%s: inet_pton: invalid address %s", __func__,
addr);
} else if (strcmp(type, "ipv6") == 0 || strcmp(type, "ipv6c") == 0) {
if (strcmp(type, "ipv6c") == 0)
is_client = true;
salen = sizeof(sa.sin6);
sa.sin6.sin6_family = AF_INET6;
/* Check for IPv6 enclosures '[]' */
sptr = &addr[0];
if (*sptr != '[')
errx(1, "%s: invalid IPv6 address format: %s", __func__,
addr);
saux = strrchr(addr, ']');
if (saux == NULL)
errx(1, "%s: invalid IPv6 address format: %s", __func__,
addr);
/* Consume the '[]:' part. */
slen = saux - sptr;
memmove(addr, addr + 1, slen);
addr[slen - 1] = 0;
/* Parse port if any. */
saux++;
sptr = strrchr(saux, ':');
if (sptr == NULL) {
sa.sin6.sin6_port = htons(BFD_DATA_PLANE_DEFAULT_PORT);
} else {
*sptr = 0;
sa.sin6.sin6_port = htons(parse_port(sptr + 1));
}
if (inet_pton(AF_INET6, addr, &sa.sin6.sin6_addr) != 1)
errx(1, "%s: inet_pton: invalid address %s", __func__,
addr);
} else {
fprintf(stderr, "invalid BFD data plane socket type: %s\n",
type);
exit(1);
}
/* Initialize BFD data plane listening socket. */
bfd_dplane_init((struct sockaddr *)&sa, salen, is_client);
}
static void bg_init(void)
{
struct zebra_privs_t bfdd_privs = {
#if defined(FRR_USER) && defined(FRR_GROUP)
.user = FRR_USER,
.group = FRR_GROUP,
#endif
#if defined(VTY_GROUP)
.vty_group = VTY_GROUP,
#endif
.caps_p = _caps_p,
.cap_num_p = array_size(_caps_p),
.cap_num_i = 0,
};
TAILQ_INIT(&bglobal.bg_bcslist);
TAILQ_INIT(&bglobal.bg_obslist);
memcpy(&bglobal.bfdd_privs, &bfdd_privs,
sizeof(bfdd_privs));
}
int main(int argc, char *argv[])
{
char ctl_path[512], dplane_addr[512];
bool ctlsockused = false;
int opt;
/* Initialize system sockets. */
bg_init();
frr_preinit(&bfdd_di, argc, argv);
frr_opt_add("", longopts,
" --bfdctl Specify bfdd control socket\n"
" --dplaneaddr Specify BFD data plane address\n");
snprintf(ctl_path, sizeof(ctl_path), BFDD_CONTROL_SOCKET,
"", "");
while (true) {
opt = frr_getopt(argc, argv, NULL);
if (opt == EOF)
break;
switch (opt) {
case OPTION_CTLSOCK:
strlcpy(ctl_path, optarg, sizeof(ctl_path));
ctlsockused = true;
break;
case OPTION_DPLANEADDR:
strlcpy(dplane_addr, optarg, sizeof(dplane_addr));
bglobal.bg_use_dplane = true;
break;
default:
frr_help_exit(1);
}
}
if (bfdd_di.pathspace && !ctlsockused)
snprintf(ctl_path, sizeof(ctl_path), BFDD_CONTROL_SOCKET,
"/", bfdd_di.pathspace);
/* Initialize FRR infrastructure. */
master = frr_init();
/* Initialize control socket. */
control_init(ctl_path);
/* Initialize BFD data structures. */
bfd_initialize();
bfd_vrf_init();
access_list_init();
/* Initialize zebra connection. */
bfdd_zclient_init(&bglobal.bfdd_privs);
thread_add_read(master, control_accept, NULL, bglobal.bg_csock,
&bglobal.bg_csockev);
/* Install commands. */
bfdd_vty_init();
/* read configuration file and daemonize */
frr_config_fork();
/* Initialize BFD data plane listening socket. */
if (bglobal.bg_use_dplane)
distributed_bfd_init(dplane_addr);
frr_run(master);
/* NOTREACHED */
return 0;
}
|
fullirondesign/strapi-heroku | web-client/.cache/async-requires.js | // prefer default export if available
const preferDefault = m => m && m.default || m
exports.components = {
"component---src-pages-index-js": () => import("C:\\Users\\ilyafefelov\\Dropbox\\Code\\strapi-heroku\\good-ua\\web-client\\src\\pages\\index.js" /* webpackChunkName: "component---src-pages-index-js" */),
"component---src-pages-posts-1-js": () => import("C:\\Users\\ilyafefelov\\Dropbox\\Code\\strapi-heroku\\good-ua\\web-client\\src\\pages\\posts.1.js" /* webpackChunkName: "component---src-pages-posts-1-js" */),
"component---src-pages-posts-3-js": () => import("C:\\Users\\ilyafefelov\\Dropbox\\Code\\strapi-heroku\\good-ua\\web-client\\src\\pages\\posts3.js" /* webpackChunkName: "component---src-pages-posts-3-js" */),
"component---src-pages-test-js": () => import("C:\\Users\\ilyafefelov\\Dropbox\\Code\\strapi-heroku\\good-ua\\web-client\\src\\pages\\test.js" /* webpackChunkName: "component---src-pages-test-js" */)
}
exports.data = () => import("C:\\Users\\ilyafefelov\\Dropbox\\Code\\strapi-heroku\\good-ua\\web-client\\.cache\\data.json")
|
praetorian-thendrickson/msgraph-sdk-go | groups/item/team/operations/item/teams_async_operation_item_request_builder.go | <filename>groups/item/team/operations/item/teams_async_operation_item_request_builder.go<gh_stars>0
package item
import (
ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9 "github.com/microsoft/kiota/abstractions/go"
i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87 "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph"
i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b "github.com/microsoftgraph/msgraph-sdk-go/models/microsoft/graph/odataerrors"
)
// TeamsAsyncOperationItemRequestBuilder provides operations to manage the operations property of the microsoft.graph.team entity.
type TeamsAsyncOperationItemRequestBuilder struct {
// Path parameters for the request
pathParameters map[string]string;
// The request adapter to use to execute the requests.
requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter;
// Url template to use to build the URL for the current request builder
urlTemplate string;
}
// TeamsAsyncOperationItemRequestBuilderDeleteOptions options for Delete
type TeamsAsyncOperationItemRequestBuilderDeleteOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// TeamsAsyncOperationItemRequestBuilderGetOptions options for Get
type TeamsAsyncOperationItemRequestBuilderGetOptions struct {
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Request query parameters
Q *TeamsAsyncOperationItemRequestBuilderGetQueryParameters;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// TeamsAsyncOperationItemRequestBuilderGetQueryParameters the async operations that ran or are running on this team.
type TeamsAsyncOperationItemRequestBuilderGetQueryParameters struct {
// Expand related entities
Expand []string;
// Select properties to be returned
Select []string;
}
// TeamsAsyncOperationItemRequestBuilderPatchOptions options for Patch
type TeamsAsyncOperationItemRequestBuilderPatchOptions struct {
//
Body i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.TeamsAsyncOperationable;
// Request headers
H map[string]string;
// Request options
O []ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestOption;
// Response handler to use in place of the default response handling provided by the core service
ResponseHandler ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ResponseHandler;
}
// NewTeamsAsyncOperationItemRequestBuilderInternal instantiates a new TeamsAsyncOperationItemRequestBuilder and sets the default values.
func NewTeamsAsyncOperationItemRequestBuilderInternal(pathParameters map[string]string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*TeamsAsyncOperationItemRequestBuilder) {
m := &TeamsAsyncOperationItemRequestBuilder{
}
m.urlTemplate = "{+baseurl}/groups/{group_id}/team/operations/{teamsAsyncOperation_id}{?select,expand}";
urlTplParams := make(map[string]string)
for idx, item := range pathParameters {
urlTplParams[idx] = item
}
m.pathParameters = urlTplParams;
m.requestAdapter = requestAdapter;
return m
}
// NewTeamsAsyncOperationItemRequestBuilder instantiates a new TeamsAsyncOperationItemRequestBuilder and sets the default values.
func NewTeamsAsyncOperationItemRequestBuilder(rawUrl string, requestAdapter ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestAdapter)(*TeamsAsyncOperationItemRequestBuilder) {
urlParams := make(map[string]string)
urlParams["request-raw-url"] = rawUrl
return NewTeamsAsyncOperationItemRequestBuilderInternal(urlParams, requestAdapter)
}
// CreateDeleteRequestInformation delete navigation property operations for groups
func (m *TeamsAsyncOperationItemRequestBuilder) CreateDeleteRequestInformation(options *TeamsAsyncOperationItemRequestBuilderDeleteOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.DELETE
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreateGetRequestInformation the async operations that ran or are running on this team.
func (m *TeamsAsyncOperationItemRequestBuilder) CreateGetRequestInformation(options *TeamsAsyncOperationItemRequestBuilderGetOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.GET
if options != nil && options.Q != nil {
requestInfo.AddQueryParameters(*(options.Q))
}
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// CreatePatchRequestInformation update the navigation property operations in groups
func (m *TeamsAsyncOperationItemRequestBuilder) CreatePatchRequestInformation(options *TeamsAsyncOperationItemRequestBuilderPatchOptions)(*ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.RequestInformation, error) {
requestInfo := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.NewRequestInformation()
requestInfo.UrlTemplate = m.urlTemplate
requestInfo.PathParameters = m.pathParameters
requestInfo.Method = ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.PATCH
requestInfo.SetContentFromParsable(m.requestAdapter, "application/json", options.Body)
if options != nil && options.H != nil {
requestInfo.Headers = options.H
}
if options != nil && len(options.O) != 0 {
err := requestInfo.AddRequestOptions(options.O...)
if err != nil {
return nil, err
}
}
return requestInfo, nil
}
// Delete delete navigation property operations for groups
func (m *TeamsAsyncOperationItemRequestBuilder) Delete(options *TeamsAsyncOperationItemRequestBuilderDeleteOptions)(error) {
requestInfo, err := m.CreateDeleteRequestInformation(options);
if err != nil {
return err
}
errorMapping := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ErrorMappings {
"4XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
"5XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
}
err = m.requestAdapter.SendNoContentAsync(requestInfo, nil, errorMapping)
if err != nil {
return err
}
return nil
}
// Get the async operations that ran or are running on this team.
func (m *TeamsAsyncOperationItemRequestBuilder) Get(options *TeamsAsyncOperationItemRequestBuilderGetOptions)(i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.TeamsAsyncOperationable, error) {
requestInfo, err := m.CreateGetRequestInformation(options);
if err != nil {
return nil, err
}
errorMapping := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ErrorMappings {
"4XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
"5XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
}
res, err := m.requestAdapter.SendAsync(requestInfo, i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.CreateTeamsAsyncOperationFromDiscriminatorValue, nil, errorMapping)
if err != nil {
return nil, err
}
return res.(i4a838ef194e4c99e9f2c63ba10dab9cb120a89367c1d4ab0daa63bb424e20d87.TeamsAsyncOperationable), nil
}
// Patch update the navigation property operations in groups
func (m *TeamsAsyncOperationItemRequestBuilder) Patch(options *TeamsAsyncOperationItemRequestBuilderPatchOptions)(error) {
requestInfo, err := m.CreatePatchRequestInformation(options);
if err != nil {
return err
}
errorMapping := ida96af0f171bb75f894a4013a6b3146a4397c58f11adb81a2b7cbea9314783a9.ErrorMappings {
"4XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
"5XX": i7df4e557a1198b9abe14a17b40c7ac7db49b0d3050c749c3169541cb6f012b8b.CreateODataErrorFromDiscriminatorValue,
}
err = m.requestAdapter.SendNoContentAsync(requestInfo, nil, errorMapping)
if err != nil {
return err
}
return nil
}
|
Superomeg4/pyleecan | Methods/Machine/WindingDW2L/get_dim_wind.py | <reponame>Superomeg4/pyleecan<filename>Methods/Machine/WindingDW2L/get_dim_wind.py
# -*- coding: utf-8 -*-
"""@package Methods.Machine.Winding.get_dim_wind
Compute the Winding Matrix Dimension Method
@date Created on Fri Jan 15 11:03:49 2016
@copyright (C) 2015-2016 EOMYS ENGINEERING.
@author pierre_b
@todo unittest it
"""
from pyleecan.Methods import NotImplementedYetError
def get_dim_wind(self):
"""Get the two first dimension of the winding matrix
Parameters
----------
self : Winding
A Winding object
Returns
-------
(Nrad, Ntan): tuple
Number of layer in radial and tangential direction
"""
return (2, 1)
|
vmayoral/Vitis_Libraries | dsp/L1/tests/aie/inc/fir_sr_sym_ref.hpp | /*
* Copyright 2021 Xilinx, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef _DSPLIB_fir_sr_sym_REF_HPP_
#define _DSPLIB_fir_sr_sym_REF_HPP_
/*
Single Rate Symmetric FIR Kernel Reference model.
This file holds the declaration of the reference model class. The reference model
is functionally equivalent to the kernel class with intrinics. The reference model
does not use intrinsics or vector operations. The reference mode, once validated
acts as the golden reference to verify the AIE-targetting kernel class.
*/
#include <adf.h>
#include <limits>
namespace xf {
namespace dsp {
namespace aie {
namespace fir {
namespace sr_sym {
//-----------------------------------------------------------------------------------------------------
// Single Rate class - no coefficient reload, single output
template <typename TT_DATA, // type of data input and output
typename TT_COEFF, // type of coefficients (e.g. int16, cint32)
unsigned int TP_FIR_LEN,
unsigned int TP_SHIFT,
unsigned int TP_RND,
unsigned int TP_INPUT_WINDOW_VSIZE,
unsigned int TP_USE_COEFF_RELOAD = 0, // 1 = use coeff reload, 0 = don't use coeff reload
unsigned int TP_NUM_OUTPUTS = 1>
class fir_sr_sym_ref {
private:
// This array holds the coefficient values for the reference model.
// Strictly speaking the alignment is not required when targetting
// the x86 or scalar processors.
TT_COEFF chess_storage(% chess_alignof(v8cint16)) m_internalTaps[TP_FIR_LEN];
public:
// Constructor
fir_sr_sym_ref(const TT_COEFF (&taps)[(TP_FIR_LEN + 1) / 2]) {
for (int i = 0; i < (TP_FIR_LEN + 1) / 2; i++) {
m_internalTaps[i] = taps[i];
m_internalTaps[TP_FIR_LEN - 1 - i] = taps[i];
}
}
// Register Kernel Class
static void registerKernelClass() { REGISTER_FUNCTION(fir_sr_sym_ref::filter); }
// FIR
void filter(input_window<TT_DATA>* inWindow, output_window<TT_DATA>* outWindow);
};
// Specialization - no coefficient reload, dual outputs
template <typename TT_DATA, // type of data input and output
typename TT_COEFF, // type of coefficients (e.g. int16, cint32)
unsigned int TP_FIR_LEN,
unsigned int TP_SHIFT,
unsigned int TP_RND,
unsigned int TP_INPUT_WINDOW_VSIZE>
class fir_sr_sym_ref<TT_DATA,
TT_COEFF,
TP_FIR_LEN,
TP_SHIFT,
TP_RND,
TP_INPUT_WINDOW_VSIZE,
0 /*USE_COEFF_RELOAD_FALSE*/,
2> {
private:
// This array holds the coefficient values for the reference model.
// Strictly speaking the alignment is not required when targetting
// the x86 or scalar processors.
TT_COEFF chess_storage(% chess_alignof(v8cint16)) m_internalTaps[TP_FIR_LEN];
public:
// Constructor
fir_sr_sym_ref(const TT_COEFF (&taps)[(TP_FIR_LEN + 1) / 2]) {
for (int i = 0; i < (TP_FIR_LEN + 1) / 2; i++) {
m_internalTaps[i] = taps[i];
m_internalTaps[TP_FIR_LEN - 1 - i] = taps[i];
}
}
// Register Kernel Class
static void registerKernelClass() { REGISTER_FUNCTION(fir_sr_sym_ref::filter); }
// FIR
void filter(input_window<TT_DATA>* inWindow, output_window<TT_DATA>* outWindow, output_window<TT_DATA>* outWindow2);
};
//-----------------------------------------------------------------------------------------------------
// Specialization using coefficient reload and single output
template <typename TT_DATA, // type of data input and output
typename TT_COEFF, // type of coefficients (e.g. int16, cint32)
unsigned int TP_FIR_LEN,
unsigned int TP_SHIFT,
unsigned int TP_RND,
unsigned int TP_INPUT_WINDOW_VSIZE>
class fir_sr_sym_ref<TT_DATA,
TT_COEFF,
TP_FIR_LEN,
TP_SHIFT,
TP_RND,
TP_INPUT_WINDOW_VSIZE,
1 /*USE_COEFF_RELOAD_TRUE*/,
1> {
private:
// This array holds the coefficient values for the reference model.
// Strictly speaking the alignment is not required when targetting
// the x86 or scalar processors.
TT_COEFF chess_storage(% chess_alignof(v8cint16)) m_internalTaps[TP_FIR_LEN];
public:
// Constructor
fir_sr_sym_ref() {}
void firReload(const TT_COEFF (&taps)[(TP_FIR_LEN + 1) / 2]) {
for (int i = 0; i < (TP_FIR_LEN + 1) / 2; i++) {
m_internalTaps[i] = taps[i];
m_internalTaps[TP_FIR_LEN - 1 - i] = taps[i];
}
}
// Register Kernel Class
static void registerKernelClass() { REGISTER_FUNCTION(fir_sr_sym_ref::filter); }
// FIR
void filter(input_window<TT_DATA>* inWindow,
output_window<TT_DATA>* outWindow,
const TT_COEFF (&inTaps)[(TP_FIR_LEN + 1) / 2]);
};
// Specialization using coefficient reload and dual output
template <typename TT_DATA, // type of data input and output
typename TT_COEFF, // type of coefficients (e.g. int16, cint32)
unsigned int TP_FIR_LEN,
unsigned int TP_SHIFT,
unsigned int TP_RND,
unsigned int TP_INPUT_WINDOW_VSIZE>
class fir_sr_sym_ref<TT_DATA,
TT_COEFF,
TP_FIR_LEN,
TP_SHIFT,
TP_RND,
TP_INPUT_WINDOW_VSIZE,
1 /*USE_COEFF_RELOAD_TRUE*/,
2> {
private:
// This array holds the coefficient values for the reference model.
// Strictly speaking the alignment is not required when targetting
// the x86 or scalar processors.
TT_COEFF chess_storage(% chess_alignof(v8cint16)) m_internalTaps[TP_FIR_LEN];
public:
// Constructor
fir_sr_sym_ref() {}
void firReload(const TT_COEFF (&taps)[(TP_FIR_LEN + 1) / 2]) {
for (int i = 0; i < (TP_FIR_LEN + 1) / 2; i++) {
m_internalTaps[i] = taps[i];
m_internalTaps[TP_FIR_LEN - 1 - i] = taps[i];
}
}
// Register Kernel Class
static void registerKernelClass() { REGISTER_FUNCTION(fir_sr_sym_ref::filter); }
// FIR
void filter(input_window<TT_DATA>* inWindow,
output_window<TT_DATA>* outWindow,
output_window<TT_DATA>* outWindow2,
const TT_COEFF (&inTaps)[(TP_FIR_LEN + 1) / 2]);
};
}
}
}
}
}
#endif // _DSPLIB_fir_sr_sym_REF_HPP_
|
emaphis/Introduction_to_Java_Programming | Introduction_to_Java_Programming/src/chapter08/exercise/exercise08_26/RowSorting.java | /*
* Exercise 8.26
* (Row sorting) Implement the following method to sort the rows in a two-
* dimensional array. A new array is returned and the original array is intact.
*
* public static double[][] sortRows(double[][] m)
*
* Write a test program that prompts the user to enter a 3 * 3 matrix of double
* values and displays a new row-sorted matrix
*/
package chapter08.exercise.exercise08_26;
import java.util.Scanner;
/**
* Row sorting
* @author emaph
*/
public class RowSorting {
public static void main(String[] args) {
final int SIZE = 3;
Scanner input = new Scanner(System.in);
System.out.println("Enter a 3-by-3 matrix row by row: ");
double[][] m = init(SIZE, input);
double[][] m2 = sortRows(m);
System.out.println("The row-sorted array is");
printArray(m2);
}
public static double[][] sortRows(double[][] m) {
double[][] out = new double[m.length][];
for (int row = 0; row < m.length; row++) {
out[row] = sortRow(m[row]);
}
return out;
}
private static double[] sortRow(double[] aRow) {
double[] row = copy(aRow);
for (int col = 0; col < row.length; col++) {
double min = row[col];
int idx = col;
for (int j = col + 1; j < row.length; j++) {
if (min > row[j]) {
min = row[j];
idx = j;
}
}
if (idx != col) {
row[idx] = row[col];
row[col] = min;
}
}
return row;
}
/** create a new array and copy array parameter to it */
public static double[] copy(double[] arr) {
double[] out = new double[arr.length];
for (int i = 0; i < arr.length; i++) {
out[i] = arr[i];
}
return out;
}
private static double[][] init(int size, Scanner input) {
double[][] array = new double[size][size];
for (int i = 0; i < size; i++) {
for (int j = 0; j < size; j++) {
array[i][j] = input.nextDouble();
}
}
return array;
}
private static void printArray(double[][] m2) {
for (double[] row : m2) {
for (double d : row) {
System.out.print(d + " ");
}
System.out.println();
}
}
}
|
hgmelectronics/xcpsetup | src/libsetuptools/LinearSlot.cpp | <reponame>hgmelectronics/xcpsetup
#include "LinearSlot.h"
#include "util.h"
namespace SetupTools {
LinearSlot::LinearSlot(QObject *parent) :
Slot(parent),
mEngrA(NAN),
mEngrB(NAN),
mOorEngr(NAN),
mRawA(quint32(0)),
mRawB(quint32(0)),
mOorRaw(quint32(0xFFFFFFFF)),
mValidator(new QDoubleValidator(this))
{
connect(this, &Slot::valueParamChanged, this, &LinearSlot::updateValidator);
}
double LinearSlot::engrA() const
{
return mEngrA;
}
void LinearSlot::setEngrA(double newVal)
{
if(updateDelta<>(mEngrA, newVal))
{
emit valueParamChanged();
emit linearValueParamChanged();
}
}
double LinearSlot::engrB() const
{
return mEngrB;
}
void LinearSlot::setEngrB(double newVal)
{
if(updateDelta<>(mEngrB, newVal))
{
emit valueParamChanged();
emit linearValueParamChanged();
}
}
double LinearSlot::oorEngr() const
{
return mOorEngr;
}
void LinearSlot::setOorEngr(double newVal)
{
if(updateDelta<>(mOorEngr, newVal))
{
emit valueParamChanged();
emit linearValueParamChanged();
}
}
double LinearSlot::rawA() const
{
return mRawA;
}
void LinearSlot::setRawA(double newVal)
{
if(updateDelta<>(mRawA, newVal))
{
emit valueParamChanged();
emit linearValueParamChanged();
}
}
double LinearSlot::rawB() const
{
return mRawB;
}
void LinearSlot::setRawB(double newVal)
{
if(updateDelta<>(mRawB, newVal))
{
emit valueParamChanged();
emit linearValueParamChanged();
}
}
double LinearSlot::oorRaw() const
{
return mOorRaw;
}
void LinearSlot::setOorRaw(double newVal)
{
if(updateDelta<>(mOorRaw, newVal))
{
emit valueParamChanged();
emit linearValueParamChanged();
}
}
double LinearSlot::asFloat(QVariant raw) const
{
bool convertedOk = false;
double rawConv = raw.toDouble(&convertedOk);
if(!convertedOk || !inRange(rawConv, mRawA, mRawB))
return mOorEngr;
return (rawConv - mRawA) / (mRawB - mRawA) * (mEngrB - mEngrA) + mEngrA;
}
QString LinearSlot::asString(QVariant raw) const
{
double engr = asFloat(raw);
if(base() == 10)
return QString::number(engr, 'f', precision());
else
return QString::number(qint64(engr), base());
}
QVariant LinearSlot::asRaw(QVariant engr) const
{
bool convertedOk = false;
double engrConv;
if(base() == 10 || engr.type() != QVariant::Type::String)
{
engrConv = engr.toDouble(&convertedOk);
}
else
{
QString engrStr = engr.toString();
engrConv = engrStr.toLongLong(&convertedOk, base());
}
if(!convertedOk || !inRange(engrConv, mEngrA, mEngrB))
return mOorRaw;
double raw = (engrConv - mEngrA) / (mEngrB - mEngrA) * (mRawB - mRawA) + mRawA;
if(storageType() != QMetaType::Float && storageType() != QMetaType::Double)
raw = round(raw);
QVariant rawVar = raw;
if(rawVar.convert(storageType()))
return rawVar;
else
return mOorRaw;
}
bool LinearSlot::rawInRange(QVariant raw) const
{
bool convertedOk = false;
double rawConv = raw.toDouble(&convertedOk);
return (convertedOk && inRange(rawConv, mRawA, mRawB));
}
bool LinearSlot::engrInRange(QVariant engr) const
{
bool convertedOk = false;
double engrConv;
if(base() == 10 || engr.type() != QVariant::Type::String)
{
engrConv = engr.toDouble(&convertedOk);
}
else
{
QString engrStr = engr.toString();
engrConv = engrStr.toLongLong(&convertedOk, base());
}
return (convertedOk && inRange(engrConv, mEngrA, mEngrB));
}
QVariant LinearSlot::rawMin() const
{
return std::min(mRawA, mRawB);
}
QVariant LinearSlot::rawMax() const
{
return std::max(mRawA, mRawB);
}
QVariant LinearSlot::engrMin() const
{
return std::min(mEngrA, mEngrB);
}
QVariant LinearSlot::engrMax() const
{
return std::max(mEngrA, mEngrB);
}
QValidator *LinearSlot::validator()
{
return mValidator;
}
void LinearSlot::updateValidator()
{
if(mEngrA > mEngrB)
mValidator->setRange(mEngrB, mEngrA, precision());
else
mValidator->setRange(mEngrA, mEngrB, precision());
}
} // namespace SetupTools
|
krautbox/smarthome18 | src/de/hsb/smarthome/alternative/RemoteController.java | <filename>src/de/hsb/smarthome/alternative/RemoteController.java
package de.hsb.smarthome.alternative;
import java.io.IOException;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import org.xml.sax.SAXException;
import com.kilo52.common.io.ConfigurationFile;
import com.kilo52.common.io.ConfigurationFileHandler;
import de.hsb.smarthome.server.FritzBoxConnection;
import de.hsb.smarthome.util.json.Device;
import de.hsb.smarthome.util.log.Logger;
import de.hsb.smarthome.util.log.Logger.LoggerMode;
/**
* Controls the program flow on the remote side of the Safer Smart Home project
* (alternative).
*
* @author <NAME>
*
*/
public class RemoteController {
/**
* @param remote_gateway
* IP address of the remote gateway (FRITZ!Box)
* @param fb_username
* Username for the FRITZ!Box
* @param fb_password
* Password for the FR<PASSWORD>
* @param commPort
* Serial interface ("/dev/ttyS0")
* @param phoneNumbers
* Phone numbers as string array ("+4915000000000", "...")
* @param timeout
* Timeout for the {@link ATCallHandler} (time in ms)
* @param pinCode
* Pin code to get access via DTMF input
*/
public RemoteController(String remote_gateway, String fb_username, String fb_password, String commPort,
String[] phoneNumbers, int timeout, String pinCode, int mode) {
REMOTE_GATEWAY = remote_gateway;
FB_USERNAME = fb_username;
FB_PASSWORD = <PASSWORD>;
AT_CALL_HANDLER = new ATCallHandler(commPort, phoneNumbers, timeout, pinCode, mode);
loop();
}
/**
* @param confFilePath
* Configuration file using the library claymore
* @throws ProcessException
*/
public RemoteController(String confFilePath) throws ProcessException {
try {
LOGGER.write(this, "Read from the conf file: " + confFilePath, LoggerMode.INFO);
ConfigurationFileHandler confFileHandler = new ConfigurationFileHandler(confFilePath);
ConfigurationFile confFile = confFileHandler.read();
REMOTE_GATEWAY = confFile.getSection("general").valueOf("remoteGateway");
FB_USERNAME = confFile.getSection("general").valueOf("fbUserName");
FB_PASSWORD = confFile.getSection("general").valueOf("fbPassword");
String commPort = confFile.getSection("general").valueOf("commPort");
String[] phoneNumbers = confFile.getSection("general").valueOf("phoneNumbers").split("([,])");
int timeout = Integer.valueOf(confFile.getSection("alternative").valueOf("timeout"));
String pinCode = confFile.getSection("alternative").valueOf("pinCode");
int mode = Integer.valueOf(confFile.getSection("alternative").valueOf("mode"));
AT_CALL_HANDLER = new ATCallHandler(commPort, phoneNumbers, timeout, pinCode, mode);
} catch (IOException e) {
LOGGER.write(this, "Failed to read the conf file: " + confFilePath, LoggerMode.ERROR);
throw new ProcessException("Failed to read the conf file: " + confFilePath);
}
loop();
}
private void loop() {
mDevices = getDevices();
boolean isAlright = true;
while (isAlright) {
try {
AT_CALL_HANDLER.handleCall();
} catch (Exception e) {
isAlright = false;
e.printStackTrace();
}
}
}
/**
* Returns all devices that are available in the network (FRITZ!Box).
*/
public static List<Device> getDevices() {
List<Device> listDevices = new ArrayList<Device>();
try {
String sessionId = "0000000000000000";
for (int i = 0; i < TEST_COUNT && sessionId.equals("0000000000000000"); i++) {
sessionId = FritzBoxConnection.getSessionId(REMOTE_GATEWAY, FB_USERNAME, FB_PASSWORD);
}
listDevices = FritzBoxConnection.getDeviceListInfos(REMOTE_GATEWAY, sessionId);
FritzBoxConnection.logout(REMOTE_GATEWAY, sessionId);
} catch (KeyManagementException | NoSuchAlgorithmException | IOException | ParserConfigurationException
| SAXException e) {
e.printStackTrace();
}
return listDevices;
}
private final Logger LOGGER = Logger.getLogger();
private final ATCallHandler AT_CALL_HANDLER;
static String REMOTE_GATEWAY;
static String FB_USERNAME;
static String FB_PASSWORD;
static final String CMD_SWITCH_OFF = "setswitchoff";
static final String CMD_SWITCH_ON = "setswitchon";
static final int TEST_COUNT = 5;
static List<Device> mDevices = new ArrayList<Device>();
/**
* Exception thrown when errors occur in the {@link RemoteController}.
*
* @author <NAME>
*
*/
class ProcessException extends Exception {
public ProcessException(String msg) {
super(msg);
}
private static final long serialVersionUID = 1L;
}
} |
sofwerx/OSUS-R | mil.dod.th.ose.datastream/test/mil/dod/th/ose/datastream/TestStreamProfileRegistryCallback.java | //==============================================================================
// This software is part of the Open Standard for Unattended Sensors (OSUS)
// reference implementation (OSUS-R).
//
// To the extent possible under law, the author(s) have dedicated all copyright
// and related and neighboring rights to this software to the public domain
// worldwide. This software is distributed without any warranty.
//
// You should have received a copy of the CC0 Public Domain Dedication along
// with this software. If not, see
// <http://creativecommons.org/publicdomain/zero/1.0/>.
//==============================================================================
package mil.dod.th.ose.datastream;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import mil.dod.th.core.factory.FactoryException;
import mil.dod.th.ose.core.factory.api.FactoryRegistry;
import mil.dod.th.ose.core.factory.api.FactoryServiceContext;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
/**
* @author jmiller
*
*/
public class TestStreamProfileRegistryCallback
{
private StreamProfileRegistryCallback m_SUT;
@Mock private FactoryServiceContext<StreamProfileInternal> m_FactoryServiceContext;
@Mock private FactoryRegistry<StreamProfileInternal> m_Registry;
@Before
public void setUp()
{
MockitoAnnotations.initMocks(this);
when(m_FactoryServiceContext.getRegistry()).thenReturn(m_Registry);
m_SUT = new StreamProfileRegistryCallback();
}
/**
* Verify there are no registry dependencies.
*/
@Test
public void testRetrieveRegistryDependencies()
{
assertThat(m_SUT.retrieveRegistryDependencies().size(), is(0));
}
@Test
public void testPostObjectInitialize() throws FactoryException
{
StreamProfileInternal mockStreamProfile = mock(StreamProfileInternal.class);
String name = "name";
when(mockStreamProfile.getName()).thenReturn(name);
when(m_Registry.isObjectCreated(name)).thenReturn(false);
m_SUT.postObjectInitialize(mockStreamProfile);
}
}
|
Quantisan/WholeCell | simulation/doc/doxygen/html/index_8php.js | <filename>simulation/doc/doxygen/html/index_8php.js
var index_8php =
[
[ "$content", "index_8php.html#a57b284fe00866494b33afa80ba729bed", null ],
[ "$data", "index_8php.html#a6efc15b5a2314dd4b5aaa556a375c6d6", null ],
[ "$dirId", "index_8php.html#aa6f717bb2e5ba54d7b92c9292453c495", null ]
]; |
dmytro-sylaiev/commons-net | src/test/java/org/apache/commons/net/ftp/parser/NTFTPEntryParserTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.net.ftp.parser;
import java.io.ByteArrayInputStream;
import java.util.Calendar;
import org.apache.commons.net.ftp.FTPFile;
import org.apache.commons.net.ftp.FTPFileEntryParser;
import org.apache.commons.net.ftp.FTPListParseEngine;
/**
*/
public class NTFTPEntryParserTest extends CompositeFTPParseTestFramework
{
private static final String [][] goodsamples = {
{ // DOS-style tests
"05-26-95 10:57AM 143712 $LDR$",
"05-20-97 03:31PM 681 .bash_history",
"12-05-96 05:03PM <DIR> absoft2",
"11-14-97 04:21PM 953 AUDITOR3.INI",
"05-22-97 08:08AM 828 AUTOEXEC.BAK",
"01-22-98 01:52PM 795 AUTOEXEC.BAT",
"05-13-97 01:46PM 828 AUTOEXEC.DOS",
"12-03-96 06:38AM 403 AUTOTOOL.LOG",
"12-03-96 06:38AM <DIR> 123xyz",
"01-20-97 03:48PM <DIR> bin",
"05-26-1995 10:57AM 143712 $LDR$",
// 24hr clock as used on Windows_CE
"12-05-96 17:03 <DIR> absoft2",
"05-22-97 08:08 828 AUTOEXEC.BAK",
"01-01-98 05:00 <DIR> Network",
"01-01-98 05:00 <DIR> StorageCard",
"09-13-10 20:08 <DIR> Recycled",
"09-06-06 19:00 69 desktop.ini",
"09-13-10 13:08 23 Control Panel.lnk",
"09-13-10 13:08 <DIR> My Documents",
"09-13-10 13:08 <DIR> Program Files",
"09-13-10 13:08 <DIR> Temp",
"09-13-10 13:08 <DIR> Windows",
},
{ // Unix-style tests
"-rw-r--r-- 1 root root 111325 Apr 27 2001 zxJDBC-2.0.1b1.tar.gz",
"-rw-r--r-- 1 root root 190144 Apr 27 2001 zxJDBC-2.0.1b1.zip",
"-rwxr-xr-x 2 500 500 166 Nov 2 2001 73131-testtes1.afp",
"-rw-r--r-- 1 500 500 166 Nov 9 2001 73131-testtes1.AFP",
"drwx------ 4 maxm Domain Users 512 Oct 2 10:59 .metadata",
}
};
private static final String[][] badsamples =
{
{ // DOS-style tests
"20-05-97 03:31PM 681 .bash_history",
" 0 DIR 05-19-97 12:56 local",
" 0 DIR 05-12-97 16:52 Maintenance Desktop",
},
{ // Unix-style tests
"drwxr-xr-x 2 root 99 4096Feb 23 30:01 zzplayer",
}
};
private static final String directoryBeginningWithNumber =
"12-03-96 06:38AM <DIR> 123xyz";
// byte -123 when read using ISO-8859-1 encoding becomes 0X85 line terminator
private static final byte[] listFilesByteTrace = {
48, 57, 45, 48, 52, 45, 49, 51, 32, 32, 48, 53, 58, 53, 49, 80, 77,
32, 32, 32, 32, 32, 32, 32, 60, 68, 73, 82, 62, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32,
97, 115, 112, 110, 101, 116, 95, 99, 108, 105, 101, 110, 116,
13, 10, // 1
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 53, 52, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 50, 32,
65, 95, 113, 117, 105, 99, 107, 95, 98, 114, 111, 119, 110, 95, 102, 111, 120, 95, 106, 117, 109, 112, 115,
95, 111, 118, 101, 114, 95, 116, 104, 101, 95, 108, 97, 122, 121, 95, 100, 111, 103,
13, 10, // 2
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 49, 55, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 51, 32,
120, -127, -123, 121,
13, 10, // 3
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 49, 58, 52, 57, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 52, 32,
-126, -28, -126, -83, -119, -51, -126, -52, -105, -84, -126, -22, -126, -51,
-112, -30, -126, -90, -126, -72, -126, -75, -126, -60, -127, 65, -126, -75, -126, -87, -126, -32, -126,
-32, -126, -58, -126, -52, -112, -123, -126, -55, -126, -96, -126, -25, -126, -72, 46, 116, 120, 116,
13, 10, // 4
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 52, 54, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 53, 32,
-125, 76, -125, -125, -125, 98, -125, 86, -125, 116, -125, -115, -127, 91, -116, 118, -114, 90, -113, -111,
13, 10, // 5
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 52, 54, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 54, 32,
-125, 76, -125, -125, -125, 98, -125, 86, -125, -123, -125, 116, -125, -115, -127, 91, -116, 118, -114, 90, -113, -111,
13, 10, // 6
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 49, 58, 52, 57, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 55, 32,
-114, 79, -116, -38, -126, -52, -105, -25, 46, 116, 120, 116,
13, 10, // 7
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 49, 58, 52, 57, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 56, 32,
-111, -66, -116, -10, -106, 93, 46, 116, 120, 116,
13, 10, // 8
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 53, 52, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 57, 32,
-113, -84, -106, -20, -106, -123, -114, 113,
13, 10, // 9
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 49, 58, 52, 57, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 48, 32,
-119, -28, -109, 99, -118, -108, -114, -82, -119, -17, -114, -48, -120, -8, -112, -123, -108, 95, -117, -58, 46, 80, 68, 70,
13, 10, // 10
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 49, 49, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 49, 32,
-112, -124, -99, -56, 46, 116, 120, 116,
13, 10, // 11
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 52, 51, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 50, 32,
-117, -76, -116, -123,
13, 10, // 12
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 49, 50, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 51, 32,
-114, -107, -111, -123, -108, 94, -104, 82,
13, 10, //13
48, 55, 45, 48, 51, 45, 49, 51, 32, 32, 48, 50, 58, 51, 53, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 52, 32,
-112, -123, -117, -101, -126, -52, -116, -16, -126, -19, -126, -24, 46, 116, 120, 116,
13, 10, // 14
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 50, 58, 49, 50, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 53, 32,
-114, -123, -117, -101, -112, -20,
13, 10, //15
48, 55, 45, 49, 55, 45, 49, 51, 32, 32, 48, 49, 58, 52, 57, 80, 77,
32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 32, 49, 54, 32,
-107, -94, -112, -123, -106, 126, -126, -55, -107, -44, -126, -25, -126, -72, 46, 116, 120, 116,
13, 10 // 16
};
private static final int LISTFILE_COUNT = 16;
/**
* @see junit.framework.TestCase#TestCase(String)
*/
public NTFTPEntryParserTest (final String name)
{
super(name);
}
@Override
protected void doAdditionalGoodTests(final String test, final FTPFile f)
{
if (test.indexOf("<DIR>") >= 0)
{
assertEquals("directory.type",
FTPFile.DIRECTORY_TYPE, f.getType());
}
}
/**
* @see org.apache.commons.net.ftp.parser.CompositeFTPParseTestFramework#getBadListings()
*/
@Override
protected String[][] getBadListings()
{
return badsamples;
}
/**
* @see org.apache.commons.net.ftp.parser.CompositeFTPParseTestFramework#getGoodListings()
*/
@Override
protected String[][] getGoodListings()
{
return goodsamples;
}
/**
* @see org.apache.commons.net.ftp.parser.FTPParseTestFramework#getParser()
*/
@Override
protected FTPFileEntryParser getParser()
{
return new CompositeFileEntryParser(new FTPFileEntryParser[]
{
new NTFTPEntryParser(),
new UnixFTPEntryParser()
});
}
@Override
public void testDefaultPrecision() {
testPrecision("05-26-1995 10:57AM 143712 $LDR$", CalendarUnit.MINUTE);
testPrecision("05-22-97 08:08 828 AUTOEXEC.BAK", CalendarUnit.MINUTE);
}
/*
* test condition reported as bug 20259 - now NET-106.
* directory with name beginning with a numeric character
* was not parsing correctly
*/
public void testDirectoryBeginningWithNumber()
{
final FTPFile f = getParser().parseFTPEntry(directoryBeginningWithNumber);
assertEquals("name", "123xyz", f.getName());
}
public void testDirectoryBeginningWithNumberFollowedBySpaces()
{
FTPFile f = getParser().parseFTPEntry("12-03-96 06:38AM <DIR> 123 xyz");
assertEquals("name", "123 xyz", f.getName());
f = getParser().parseFTPEntry("12-03-96 06:38AM <DIR> 123 abc xyz");
assertNotNull(f);
assertEquals("name", "123 abc xyz", f.getName());
}
/*
* Test that group names with embedded spaces can be handled correctly
*
*/
public void testGroupNameWithSpaces() {
final FTPFile f = getParser().parseFTPEntry("drwx------ 4 maxm Domain Users 512 Oct 2 10:59 .metadata");
assertNotNull(f);
assertEquals("maxm", f.getUser());
assertEquals("Domain Users", f.getGroup());
}
public void testNET339() {
final FTPFile file = getParser().parseFTPEntry("05-22-97 12:08 5000000000 10 years and under");
assertNotNull("Could not parse entry", file);
assertEquals("10 years and under", file.getName());
assertEquals(5000000000L, file.getSize());
Calendar timestamp = file.getTimestamp();
assertNotNull("Could not parse time",timestamp);
assertEquals("Thu May 22 12:08:00 1997",df.format(timestamp.getTime()));
final FTPFile dir = getParser().parseFTPEntry("12-03-96 06:38 <DIR> 10 years and under");
assertNotNull("Could not parse entry", dir);
assertEquals("10 years and under", dir.getName());
timestamp = dir.getTimestamp();
assertNotNull("Could not parse time",timestamp);
assertEquals("Tue Dec 03 06:38:00 1996",df.format(timestamp.getTime()));
}
public void testNET516() throws Exception { // problem where part of a multi-byte char gets converted to 0x85 = line term
final int utf = testNET516("UTF-8");
assertEquals(LISTFILE_COUNT, utf);
final int ascii = testNET516("ASCII");
assertEquals(LISTFILE_COUNT, ascii);
final int iso8859_1 = testNET516("ISO-8859-1");
assertEquals(LISTFILE_COUNT, iso8859_1);
}
private int testNET516(final String charset) throws Exception {
final FTPFileEntryParser parser = new NTFTPEntryParser();
final FTPListParseEngine engine = new FTPListParseEngine(parser );
engine.readServerList(new ByteArrayInputStream(listFilesByteTrace),charset);
final FTPFile[] ftpfiles = engine.getFiles();
return ftpfiles.length;
}
/**
* @see org.apache.commons.net.ftp.parser.FTPParseTestFramework#testParseFieldsOnDirectory()
*/
@Override
public void testParseFieldsOnDirectory() throws Exception
{
FTPFile dir = getParser().parseFTPEntry("12-05-96 05:03PM <DIR> absoft2");
assertNotNull("Could not parse entry.", dir);
assertEquals("Thu Dec 05 17:03:00 1996",
df.format(dir.getTimestamp().getTime()));
assertTrue("Should have been a directory.",
dir.isDirectory());
assertEquals("absoft2", dir.getName());
assertEquals(0, dir.getSize());
dir = getParser().parseFTPEntry("12-03-96 06:38AM <DIR> 123456");
assertNotNull("Could not parse entry.", dir);
assertTrue("Should have been a directory.",
dir.isDirectory());
assertEquals("123456", dir.getName());
assertEquals(0, dir.getSize());
}
/**
* @see org.apache.commons.net.ftp.parser.FTPParseTestFramework#testParseFieldsOnFile()
*/
@Override
public void testParseFieldsOnFile() throws Exception
{
FTPFile f = getParser().parseFTPEntry("05-22-97 12:08AM 5000000000 AUTOEXEC.BAK");
assertNotNull("Could not parse entry.", f);
assertEquals("Thu May 22 00:08:00 1997",
df.format(f.getTimestamp().getTime()));
assertTrue("Should have been a file.",
f.isFile());
assertEquals("AUTOEXEC.BAK", f.getName());
assertEquals(5000000000L, f.getSize());
// test an NT-unix style listing that does NOT have a leading zero
// on the hour.
f = getParser().parseFTPEntry(
"-rw-rw-r-- 1 mqm mqm 17707 Mar 12 3:33 killmq.sh.log");
assertNotNull("Could not parse entry.", f);
final Calendar cal = Calendar.getInstance();
cal.setTime(f.getTimestamp().getTime());
assertEquals("hour", 3, cal.get(Calendar.HOUR));
assertTrue("Should have been a file.",
f.isFile());
assertEquals(17707, f.getSize());
}
public void testParseLeadingDigits() {
final FTPFile file = getParser().parseFTPEntry("05-22-97 12:08AM 5000000000 10 years and under");
assertNotNull("Could not parse entry", file);
assertEquals("10 years and under", file.getName());
assertEquals(5000000000L, file.getSize());
Calendar timestamp = file.getTimestamp();
assertNotNull("Could not parse time",timestamp);
assertEquals("Thu May 22 00:08:00 1997",df.format(timestamp.getTime()));
final FTPFile dir = getParser().parseFTPEntry("12-03-96 06:38PM <DIR> 10 years and under");
assertNotNull("Could not parse entry", dir);
assertEquals("10 years and under", dir.getName());
timestamp = dir.getTimestamp();
assertNotNull("Could not parse time",timestamp);
assertEquals("Tue Dec 03 18:38:00 1996",df.format(timestamp.getTime()));
}
@Override
public void testRecentPrecision() {
// Not used
}
}
|
zenglongGH/spresense | externals/cmsis/CMSIS_5/CMSIS/RTOS2/RTX/Source/rtx_core_c.h | <reponame>zenglongGH/spresense
/*
* Copyright (c) 2013-2018 Arm Limited. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* -----------------------------------------------------------------------------
*
* Project: CMSIS-RTOS RTX
* Title: Cortex Core definitions
*
* -----------------------------------------------------------------------------
*/
#ifndef RTX_CORE_C_H_
#define RTX_CORE_C_H_
//lint -emacro((923,9078),SCB) "cast from unsigned long to pointer" [MISRA Note 9]
#include "RTE_Components.h"
#include CMSIS_device_header
#if ((!defined(__ARM_ARCH_6M__)) && \
(!defined(__ARM_ARCH_7A__)) && \
(!defined(__ARM_ARCH_7M__)) && \
(!defined(__ARM_ARCH_7EM__)) && \
(!defined(__ARM_ARCH_8M_BASE__)) && \
(!defined(__ARM_ARCH_8M_MAIN__)))
#error "Unknown Arm Architecture!"
#endif
#if (defined(__ARM_ARCH_7A__) && (__ARM_ARCH_7A__ != 0))
#include "rtx_core_ca.h"
#else
#include "rtx_core_cm.h"
#endif
#endif // RTX_CORE_C_H_
|
Alejandro-Fuste/Python_Javascript_ReimbursementApp | page_object_model/home_page.py | <filename>page_object_model/home_page.py<gh_stars>0
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.support.select import Select
class HomePage:
def __init__(self, driver: WebDriver):
self.driver = driver
def select_username_input(self):
element: WebElement = self.driver.find_element(By.ID, "userName")
return element
def select_password_input(self):
element: WebElement = self.driver.find_element(By.ID, "password")
return element
def select_role_dropdown(self):
select_element = self.driver.find_element(By.ID, 'role')
select_object = Select(select_element)
select_object.select_by_value('employee')
first_selected_option = select_object.first_selected_option
return first_selected_option
def select_submit_button(self):
element: WebElement = self.driver.find_element(By.ID, "submitButton")
return element
|
random-logic/AP_CSA | BlueJ/WS11F/Ship.java | <gh_stars>0
/**
* Ship
* @author Andrew
* @version 2020
*/
class Ship {
private String name;
private int year;
/**
* Constructor
* @param name Name of the ship
* @param year Year ship is built
*/
public Ship (String name, int year) {
this.name = name;
this.year = year;
}
/**
* @return String representation of this object
*/
public String toString () {
return name + ": Built in " + year;
}
} |
sal562/life-logger-fe | src/components/LogoutButton.js | <gh_stars>1-10
/** @jsx jsx */
import { jsx } from 'theme-ui';
import { Button } from '@theme-ui/components';
const LogoutButton = () => {
const logoutFun = () => {
localStorage.clear();
window.location.href = '/';
};
return <Button onClick={logoutFun}>Logout Here</Button>;
};
export default LogoutButton;
|
LiveUSD/Nova-Exchange | stories/checkbox.stories.js | import React from 'react';
import { storiesOf } from '@storybook/react';
import Checkbox from '../components/Order/OrderMain/OrderState/OrderInitial/Checkbox/Checkbox';
import '../css/index.scss';
storiesOf('Checkboxes', module)
.add('Terms and Conditions', () => <Checkbox name="checkboxTC" order='order.iAgreedTC' />)
.add('Acknowledge ', () => <Checkbox type="checkboxKYC" order='order.iAcknowledgeKYC' />);
|
jeffpuzzo/jp-rosa-react-form-wizard | node_modules/@patternfly/react-tokens/dist/js/c_alert_m_inline_m_success_BackgroundColor.js | "use strict";
exports.__esModule = true;
exports.c_alert_m_inline_m_success_BackgroundColor = {
"name": "--pf-c-alert--m-inline--m-success--BackgroundColor",
"value": "#f3faf2",
"var": "var(--pf-c-alert--m-inline--m-success--BackgroundColor)"
};
exports["default"] = exports.c_alert_m_inline_m_success_BackgroundColor; |
hundanLi/guli-mall | gulimall-common/src/main/java/com/hundanli/common/utils/Utils.java | package com.hundanli.common.utils;
/**
* @author li
* @version 1.0
* @date 2020-05-22 14:36
**/
public class Utils {
public static boolean isNumeric(String str) {
if (str == null || "".equals(str.trim())) {
return false;
}
int sz = str.length();
for (int i = 0; i < sz; i++) {
if (!Character.isDigit(str.charAt(i))) {
return false;
}
}
return true;
}
}
|
Mortimyrrh/Mycelia-Forge | build/tmp/expandedArchives/forge-1.16.5-36.1.0_mapped_official_1.16.5-sources.jar_01fb3b8234f72f7172716347a075bc60/net/minecraft/resources/ServerPackFinder.java | package net.minecraft.resources;
import java.util.function.Consumer;
public class ServerPackFinder implements IPackFinder {
private final VanillaPack vanillaPack = new VanillaPack("minecraft");
public void loadPacks(Consumer<ResourcePackInfo> p_230230_1_, ResourcePackInfo.IFactory p_230230_2_) {
ResourcePackInfo resourcepackinfo = ResourcePackInfo.create("vanilla", false, () -> {
return this.vanillaPack;
}, p_230230_2_, ResourcePackInfo.Priority.BOTTOM, IPackNameDecorator.BUILT_IN);
if (resourcepackinfo != null) {
p_230230_1_.accept(resourcepackinfo);
}
}
}
|
aurbit/strategy-game | packages/truffle/constants/earth-coords.js | <filename>packages/truffle/constants/earth-coords.js
module.exports = [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
31,
128,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
63,
239,
0,
0,
3,
0,
0,
0,
0,
0,
0,
0,
0,
15,
127,
255,
128,
1,
129,
192,
28,
0,
0,
0,
0,
0,
0,
51,
255,
255,
192,
7,
128,
112,
16,
0,
0,
0,
0,
1,
0,
63,
255,
255,
192,
192,
0,
60,
5,
3,
240,
0,
0,
31,
0,
219,
247,
255,
129,
224,
0,
126,
99,
255,
248,
0,
0,
31,
193,
223,
227,
255,
129,
192,
48,
255,
255,
255,
248,
0,
0,
127,
254,
246,
224,
255,
128,
0,
65,
255,
255,
255,
240,
0,
1,
127,
255,
247,
224,
255,
128,
0,
71,
255,
255,
255,
248,
0,
0,
127,
255,
62,
241,
255,
0,
0,
71,
255,
255,
255,
240,
0,
0,
255,
255,
255,
241,
255,
0,
32,
63,
255,
255,
255,
224,
0,
0,
255,
255,
255,
112,
254,
0,
124,
255,
255,
255,
254,
192,
0,
0,
127,
255,
255,
125,
240,
0,
254,
255,
255,
255,
254,
192,
0,
0,
227,
255,
251,
177,
225,
128,
253,
255,
255,
255,
240,
224,
0,
3,
1,
255,
225,
241,
195,
129,
255,
255,
255,
255,
240,
224,
0,
96,
1,
255,
224,
240,
192,
3,
191,
255,
255,
255,
240,
96,
0,
0,
1,
255,
240,
240,
0,
3,
223,
255,
255,
255,
248,
32,
0,
0,
0,
255,
253,
240,
0,
57,
255,
255,
255,
255,
252,
0,
0,
0,
0,
255,
255,
248,
0,
57,
255,
255,
255,
255,
254,
0,
0,
0,
0,
255,
255,
248,
0,
63,
255,
255,
255,
255,
254,
64,
0,
0,
0,
255,
255,
220,
0,
31,
255,
255,
255,
255,
248,
128,
0,
0,
0,
255,
255,
224,
0,
15,
255,
255,
255,
255,
241,
128,
0,
0,
0,
255,
255,
0,
0,
62,
248,
255,
255,
255,
249,
128,
0,
0,
0,
255,
254,
0,
0,
57,
127,
255,
255,
255,
219,
0,
0,
0,
0,
255,
252,
0,
0,
56,
223,
255,
255,
255,
150,
0,
0,
0,
0,
127,
248,
0,
0,
31,
11,
255,
255,
255,
200,
0,
0,
0,
0,
127,
240,
0,
0,
63,
255,
255,
255,
255,
224,
0,
0,
0,
0,
62,
16,
0,
0,
255,
255,
255,
255,
255,
200,
0,
0,
0,
0,
62,
0,
0,
0,
255,
255,
249,
255,
255,
160,
0,
0,
0,
0,
14,
94,
0,
0,
255,
255,
126,
63,
255,
0,
0,
0,
0,
0,
15,
197,
128,
0,
255,
255,
254,
14,
60,
32,
0,
0,
0,
0,
0,
224,
0,
1,
255,
255,
184,
12,
30,
32,
0,
0,
0,
0,
0,
55,
192,
0,
255,
255,
248,
4,
6,
24,
0,
0,
0,
0,
0,
31,
224,
0,
127,
255,
248,
2,
40,
76,
0,
0,
0,
0,
0,
15,
248,
0,
1,
255,
240,
0,
24,
232,
0,
0,
0,
0,
0,
15,
252,
0,
0,
255,
192,
0,
13,
244,
0,
0,
0,
0,
0,
31,
255,
128,
0,
255,
192,
0,
14,
125,
216,
0,
0,
0,
0,
31,
255,
192,
0,
255,
128,
0,
3,
48,
252,
0,
0,
0,
0,
15,
255,
192,
0,
127,
128,
0,
0,
24,
27,
0,
0,
0,
0,
7,
255,
128,
0,
255,
136,
0,
0,
3,
64,
0,
0,
0,
0,
7,
255,
128,
0,
255,
152,
0,
0,
15,
96,
64,
0,
0,
0,
1,
255,
128,
0,
127,
56,
0,
0,
31,
224,
8,
0,
0,
0,
1,
255,
0,
0,
127,
48,
0,
0,
127,
240,
192,
0,
0,
0,
1,
252,
0,
0,
126,
0,
0,
0,
255,
240,
0,
0,
0,
0,
1,
252,
0,
0,
62,
0,
0,
0,
255,
248,
0,
0,
0,
0,
1,
248,
0,
0,
56,
0,
0,
0,
255,
240,
0,
0,
0,
0,
1,
240,
0,
0,
0,
0,
0,
0,
99,
240,
0,
0,
0,
0,
1,
224,
0,
0,
0,
0,
0,
0,
3,
224,
0,
0,
0,
0,
1,
192,
0,
0,
0,
0,
0,
0,
1,
224,
32,
0,
0,
0,
1,
192,
0,
0,
0,
0,
0,
0,
0,
0,
32,
0,
0,
0,
1,
192,
0,
0,
0,
0,
0,
0,
0,
192,
192,
0,
0,
0,
1,
192,
0,
0,
0,
0,
0,
0,
0,
1,
128,
0,
0,
0,
1,
208,
0,
0,
0,
0,
0,
0,
0,
3,
0,
0,
0,
0,
0,
224,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
]
|
Azn9/ToornamentClient | src/main/java/com/brentonpoke/toornamentclient/model/request/WebhookQuery.java | <gh_stars>0
package com.brentonpoke.toornamentclient.model.request;
import lombok.Builder;
import lombok.Getter;
@Getter
@Builder
public class WebhookQuery {
Boolean enabled;
String url, name;
}
|
mcmaxwell/frontier | frontier/content/urls.py | from django.conf.urls import url
from django.contrib import admin
from . import views
urlpatterns = [
url(r'^api/pages/$', views.pages_json, name='api-pages'),
]
|
isdidum/didum-gitrepo | java-experiment/iexperiment/src/pinterface/ComparableA.java | <filename>java-experiment/iexperiment/src/pinterface/ComparableA.java
package pinterface;
/**
* @author <NAME>
* @date February 26, 2013
* @description Comparable --i'm still working on this
*/
public interface ComparableA {
}
|
nnn3d/vk-bot | classes/db/UserStatistics.js | <filename>classes/db/UserStatistics.js
let mongoose = require('mongoose');
let userStatisticsSchema = mongoose.Schema({
userId: {
type: Number,
required: true,
},
chatId: {
type: Number,
required: true,
},
countSymbols: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
countMessages: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
countStickers: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
countForwards: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
countAttachments: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
countAudio: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
countCommands: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
},
_lastActivity: {
type: Number,
default: 0,
get: v => Math.round(v),
set: v => Math.round(v),
}
});
userStatisticsSchema.index({
userId: 1,
chatId: 1,
}, {unique: true});
userStatisticsSchema.virtual('lastActivity')
.get(function () {
return this._lastActivity;
})
.set(function (time) {
if (this._lastActivity < time || isNaN(this._lastActivity)) {
this._lastActivity = time;
}
});
let UserStatistics = mongoose.model('UserStatistics', userStatisticsSchema);
module.exports = UserStatistics; |
kreiger/capricious | src/main/java/com/linuxgods/kreiger/javafx/SingleOrDoubleClickMouseEventHandler.java | package com.linuxgods.kreiger.javafx;
import io.netty.util.concurrent.DefaultThreadFactory;
import javafx.event.EventHandler;
import javafx.event.EventType;
import javafx.scene.Node;
import javafx.scene.Scene;
import javafx.scene.input.MouseButton;
import javafx.scene.input.MouseEvent;
import java.awt.*;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.function.BiConsumer;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
public class SingleOrDoubleClickMouseEventHandler {
private final static int MULTI_CLICK_INTERVAL_MILLIS = (Integer) Toolkit.getDefaultToolkit().getDesktopProperty("awt.multiClickInterval");
private final static ScheduledThreadPoolExecutor timer;
static {
timer = new ScheduledThreadPoolExecutor(1, new DefaultThreadFactory(SingleOrDoubleClickMouseEventHandler.class.getName(), true));
timer.setRemoveOnCancelPolicy(true);
}
private EventHandler<? super MouseEvent> onSingleClick = singleClick -> {};
private EventHandler<? super MouseEvent> onDoubleClick = doubleClick -> {};
private boolean dragging;
private Future<?> scheduledSingleClick;
public SingleOrDoubleClickMouseEventHandler() {
}
public SingleOrDoubleClickMouseEventHandler(BiConsumer<EventType, EventHandler> addEventHandler) {
addTo(addEventHandler);
}
public void addTo(BiConsumer<EventType, EventHandler> addEventHandler) {
addOnDragDetected(addEventHandler, event -> {
dragging = true;
cancelSingleClick();
});
addOnMouseClicked(addEventHandler, event -> {
if (dragging) {
dragging = false;
return;
}
if (event.getButton() != MouseButton.PRIMARY) {
return;
}
int clickCount = event.getClickCount();
if (clickCount == 1) {
scheduleSingleClick(event);
} else if (clickCount == 2) {
if (cancelSingleClick()) onDoubleClick(event);
}
});
}
public void setOnSingleClick(EventHandler<? super MouseEvent> onSingleClick) {
requireNonNull(onSingleClick);
this.onSingleClick = singleClick -> {
onSingleClick.handle(singleClick);
singleClick.consume();
};
}
public void setOnDoubleClick(EventHandler<? super MouseEvent> onDoubleClick) {
requireNonNull(onDoubleClick);
this.onDoubleClick = doubleClick -> {
onDoubleClick.handle(doubleClick);
doubleClick.consume();
};
}
private void scheduleSingleClick(MouseEvent event) {
scheduledSingleClick = timer.schedule(() -> {
scheduledSingleClick = null;
onSingleClick(event);
}, MULTI_CLICK_INTERVAL_MILLIS, MILLISECONDS);
}
private boolean cancelSingleClick() {
Future<?> scheduledSingleClick = this.scheduledSingleClick;
this.scheduledSingleClick = null;
return scheduledSingleClick != null && scheduledSingleClick.cancel(false);
}
protected void onSingleClick(MouseEvent singleClick) {
onSingleClick.handle(singleClick);
}
protected void onDoubleClick(MouseEvent doubleClick) {
onDoubleClick.handle(doubleClick);
}
public static SingleOrDoubleClickMouseEventHandler on(Node node) {
return new SingleOrDoubleClickMouseEventHandler(node::addEventHandler);
}
public static SingleOrDoubleClickMouseEventHandler on(Scene scene) {
return new SingleOrDoubleClickMouseEventHandler(scene::addEventHandler);
}
private void addOnDragDetected(BiConsumer<EventType, EventHandler> addEventHandler, EventHandler<? super MouseEvent> dragDetectedHandler) {
addEventHandler.accept(MouseEvent.DRAG_DETECTED, dragDetectedHandler);
}
private void addOnMouseClicked(BiConsumer<EventType, EventHandler> addEventHandler, EventHandler<? super MouseEvent> mouseClickedHandler) {
addEventHandler.accept(MouseEvent.MOUSE_CLICKED, mouseClickedHandler);
}
}
|
frc5024/InfiniteRecharge | src/main/java/frc/lib5k/kinematics/DriveSignal.java | <reponame>frc5024/InfiniteRecharge
package frc.lib5k.kinematics;
/**
* A collection of differential drivebase motor values
*/
public class DriveSignal {
private double l, r;
/**
* Create a DriveSignal from values. These do not have to be direct outputs
*
* @param l Left value
* @param r Right value
*/
public DriveSignal(double l, double r) {
this.l = l;
this.r = r;
}
/**
* Complete a very simple solve from [speed, rotation] to [L, R] vectors. This
* does not handle smoothing or normalization.
*
* @param speed Speed component
* @param rotation Rotation component
* @return DriveSignal
*/
public static DriveSignal fromArcadeInputs(double speed, double rotation) {
return new DriveSignal((rotation + speed), (speed - rotation));
}
/**
* Get the right value
*
* @return Right value
*/
public double getR() {
return r;
}
/**
* Set the right value
*
* @param r Right value
*/
public void setR(double r) {
this.r = r;
}
/**
* Get the left value
*
* @return Left value
*/
public double getL() {
return l;
}
/**
* Set the left value
*
* @param l Left value
*/
public void setL(double l) {
this.l = l;
}
@Override
public String toString() {
return String.format("<%.2f, %.2f>", l, r);
}
} |
sitegate/sitegate-admin | public/src/modules/user/controllers/user-edit-controller.js | 'use strict';
module.exports = ['$scope', '$stateParams', '$http', 'growl', 'UserService', 'ClientService',
function UserEditController($scope, $stateParams, $http, growl, User, Client) {
$scope.user = User.get({
id: $stateParams.userId
});
$scope.clients = Client.query({
creatorId: $stateParams.userId
});
$scope.updateUser = function () {
User.update({
id: $stateParams.userId
}, $scope.user, function () {
growl.success('Saved!', {
ttl: 3000
});
});
};
$scope.savePassword = function () {
var data = {
newPassword: <PASSWORD>
};
$http
.put('/api/user/' + $stateParams.userId + '/password', data)
.success(function (data, status, headers, config) {
growl.success('Password updated!', {
ttl: 3000
});
})
.error(function (data, status, headers, config) {});
};
}]; |
kaungsgit/Python_DSP | DSP2/model_interleaved_filter.py | import numpy as np
import matplotlib.pyplot as plt
import scipy.signal as sig
import custom_tools.fftplot as fftplot
import custom_tools.handyfuncs as hf
import scipy.fftpack as fft
import DSP2.model_DAC as model_DAC
import DSP2.model_NCO as model_NCO
# IL spur summary, so far...
# Fs of combined IL filter is 10M, aka the sampling rate of the orig signal
# Fs of each filter is 5M because every other sample of the orig signal is routed through the filter
# IL spurs occur at 5M for gain and offset errors
# offset error adds an additional DC spur
Fs = 10e6
Ts = 1 / Fs
num_sampls = 2 ** 16
x_t = np.arange(0, num_sampls * Ts, Ts)
f1 = 1e3
# inputv = 1001 * np.cos(np.linspace(0, 4 * np.pi, 2 ** 16))
inputv = 1 * np.cos(2 * np.pi * f1 * x_t)
# inputv = [0.1, 0.2, 0.3]
data_gen_e = (y for idx, y in enumerate(inputv) if (idx % 2) == 0)
data_gen_o = (y for idx, y in enumerate(inputv) if (idx % 2) != 0)
def interleaved_filter(input_list):
print(1)
input_len = len(input_list)
out_array_e = np.zeros(input_len)
out_array_o = np.zeros(input_len)
for idx, val in enumerate(input_list):
print(idx)
print(val)
if (idx % 2) == 0:
filter_out_e = model_DAC.three_tap_moving_avg_gen(next(data_gen), 12, coeffs=[1, 1, 1])
else:
filter_out_o = model_DAC.three_tap_moving_avg_gen(next(data_gen), 12, coeffs=[1, 1, 1])
out_array_e[idx] = list(filter_out_e)
out_array_o[idx] = list(filter_out_o)
# yield filter_out1
# filter_out_IL = interleaved_filter(inputv)
filter_out_e = model_DAC.three_tap_moving_avg_gen(data_gen_e, 12, coeffs=[1.1, 0, 0], offset_err=0)
filter_out_o = model_DAC.three_tap_moving_avg_gen(data_gen_o, 12, coeffs=[1, 0, 0], offset_err=0)
def interleaved_filter2(input_list):
for idx, y in enumerate(input_list):
if (idx % 2) == 0:
yield next(filter_out_e)
else:
yield next(filter_out_o)
il_filter_gen = interleaved_filter2(inputv)
result = np.fromiter(il_filter_gen, float)
plt.figure()
plt.plot(inputv, label='Original Waveform')
plt.plot(result, label='IL Filtered Waveform')
plt.legend()
plt.title('IL Filter Output Waveform')
plt.figure()
plt.subplot(121)
# using cusomized fft module imported earlier
x, y = fftplot.winfft(inputv, fs=Fs, beta=12)
fftplot.plot_spectrum(x, y, ceil=40)
plt.title('Orig Output Spectrum (Unfiltered)')
plt.subplot(122)
# using cusomized fft module imported earlier
x, y = fftplot.winfft(result, fs=Fs, beta=12)
fftplot.plot_spectrum(x, y, ceil=40)
plt.title('IL Filter Output Spectrum (Unfiltered)')
plt.show()
pass
|
einride/iam-go | iamtest/fixture.go | <gh_stars>10-100
package iamtest
import (
"context"
"testing"
"go.einride.tech/iam/iampolicy"
"go.einride.tech/iam/iamspanner"
"google.golang.org/genproto/googleapis/iam/v1"
"gotest.tools/v3/assert"
)
// Fixture is a test fixture with helper methods for IAM testing.
type Fixture struct {
server *iamspanner.IAMServer
}
// NewFixture creates a new Fixture for the provided iamspanner.IAMServer.
func NewFixture(server *iamspanner.IAMServer) *Fixture {
return &Fixture{server: server}
}
// AddPolicyBinding adds the provided policy binding.
func (fx *Fixture) AddPolicyBinding(t *testing.T, resource, role, member string) {
ctx := withTestDeadline(context.Background(), t)
// Get current policy.
policy, err := fx.server.GetIamPolicy(ctx, &iam.GetIamPolicyRequest{
Resource: resource,
})
assert.NilError(t, err)
iampolicy.AddBinding(policy, role, member)
// Set updated policy.
_, err = fx.server.SetIamPolicy(ctx, &iam.SetIamPolicyRequest{
Resource: resource,
Policy: policy,
})
assert.NilError(t, err)
}
func withTestDeadline(ctx context.Context, t *testing.T) context.Context {
deadline, ok := t.Deadline()
if !ok {
return ctx
}
ctx, cancel := context.WithDeadline(ctx, deadline)
t.Cleanup(cancel)
return ctx
}
|
rimuln/arquillian-core | core/impl-base/src/test/java/org/jboss/arquillian/core/impl/InstanceImplTestCase.java | <gh_stars>100-1000
/*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.core.impl;
import org.jboss.arquillian.core.api.Instance;
import org.jboss.arquillian.core.api.InstanceProducer;
import org.jboss.arquillian.core.api.annotation.Observes;
import org.jboss.arquillian.core.spi.ManagerBuilder;
import org.jboss.arquillian.core.test.context.ManagerTestContext;
import org.jboss.arquillian.core.test.context.ManagerTestContextImpl;
import org.jboss.arquillian.core.test.context.ManagerTestScoped;
import org.junit.Assert;
import org.junit.Test;
/**
* InstanceImplTestCase
*
* @author <a href="mailto:<EMAIL>"><NAME></a>
* @version $Revision: $
*/
public class InstanceImplTestCase {
@Test
public void shouldBeAbleToLookupInContext() throws Exception {
ManagerImpl manager = (ManagerImpl) ManagerBuilder.from()
.context(ManagerTestContextImpl.class).create();
Object testObject = new Object();
ManagerTestContext context = manager.getContext(ManagerTestContext.class);
try {
context.activate();
context.getObjectStore().add(Object.class, testObject);
Instance<Object> instance = InstanceImpl.of(Object.class, ManagerTestScoped.class, manager);
Assert.assertEquals(
"Verify expected object was returned",
testObject, instance.get());
} finally {
context.deactivate();
context.destroy();
}
}
@Test
public void shouldFireEventOnSet() throws Exception {
ManagerImpl manager = (ManagerImpl) ManagerBuilder.from()
.context(ManagerTestContextImpl.class)
.extension(TestObserver.class).create();
ManagerTestContext context = manager.getContext(ManagerTestContext.class);
try {
context.activate();
InstanceProducer<Object> instance = InstanceImpl.of(Object.class, ManagerTestScoped.class, manager);
instance.set(new Object());
Assert.assertTrue(manager.getExtension(TestObserver.class).wasCalled);
} finally {
context.deactivate();
context.destroy();
}
}
@Test(expected = IllegalStateException.class)
public void shouldThrowExceptionIfTryingToSetAUnScopedInstance() throws Exception {
ManagerImpl manager = (ManagerImpl) ManagerBuilder.from().create();
InstanceProducer<Object> instance = InstanceImpl.of(Object.class, null, manager);
instance.set(new Object());
Assert.fail("Should have thrown " + IllegalStateException.class);
}
private static class TestObserver {
private boolean wasCalled = false;
@SuppressWarnings("unused")
public void shouldBeCalled(@Observes Object object) {
Assert.assertNotNull(object);
wasCalled = true;
}
}
}
|
kubernetes-sigs/clife_cluster-api | internal/controllers/topology/cluster/structuredmerge/drop_diff_test.go | <filename>internal/controllers/topology/cluster/structuredmerge/drop_diff_test.go
/*
Copyright 2022 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package structuredmerge
import (
"testing"
. "github.com/onsi/gomega"
"sigs.k8s.io/cluster-api/internal/contract"
)
func Test_dropDiffForNotAllowedPaths(t *testing.T) {
tests := []struct {
name string
ctx *dropDiffInput
wantModified map[string]interface{}
}{
{
name: "Sets not allowed paths to original value if defined",
ctx: &dropDiffInput{
path: contract.Path{},
original: map[string]interface{}{
"metadata": map[string]interface{}{
"name": "foo",
},
"status": map[string]interface{}{
"foo": "123",
},
},
modified: map[string]interface{}{
"metadata": map[string]interface{}{
"name": "foo-changed",
"labels": map[string]interface{}{
"foo": "123",
},
"annotations": map[string]interface{}{
"foo": "123",
},
},
"spec": map[string]interface{}{
"foo": "123",
},
"status": map[string]interface{}{
"foo": "123-changed",
},
},
shouldDropDiffFunc: isNotAllowedPath(
[]contract.Path{ // NOTE: we are dropping everything not in this list (IsNotAllowed)
{"metadata", "labels"},
{"metadata", "annotations"},
{"spec"},
},
),
},
wantModified: map[string]interface{}{
"metadata": map[string]interface{}{
"name": "foo", // metadata.name aligned to original
"labels": map[string]interface{}{
"foo": "123",
},
"annotations": map[string]interface{}{
"foo": "123",
},
},
"spec": map[string]interface{}{
"foo": "123",
},
"status": map[string]interface{}{ // status aligned to original
"foo": "123",
},
},
},
{
name: "Drops not allowed paths if they do not exist in original",
ctx: &dropDiffInput{
path: contract.Path{},
original: map[string]interface{}{
// Original doesn't have values for not allowed paths.
},
modified: map[string]interface{}{
"metadata": map[string]interface{}{
"name": "foo",
"labels": map[string]interface{}{
"foo": "123",
},
"annotations": map[string]interface{}{
"foo": "123",
},
},
"spec": map[string]interface{}{
"foo": "123",
},
"status": map[string]interface{}{
"foo": "123",
},
},
shouldDropDiffFunc: isNotAllowedPath(
[]contract.Path{ // NOTE: we are dropping everything not in this list (IsNotAllowed)
{"metadata", "labels"},
{"metadata", "annotations"},
{"spec"},
},
),
},
wantModified: map[string]interface{}{
"metadata": map[string]interface{}{
// metadata.name dropped
"labels": map[string]interface{}{
"foo": "123",
},
"annotations": map[string]interface{}{
"foo": "123",
},
},
"spec": map[string]interface{}{
"foo": "123",
},
// status dropped
},
},
{
name: "Cleanup empty maps",
ctx: &dropDiffInput{
path: contract.Path{},
original: map[string]interface{}{
// Original doesn't have values for not allowed paths.
},
modified: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "123",
},
},
shouldDropDiffFunc: isNotAllowedPath(
[]contract.Path{}, // NOTE: we are dropping everything not in this list (IsNotAllowed)
),
},
wantModified: map[string]interface{}{
// we are dropping spec.foo and then spec given that it is an empty map
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewWithT(t)
dropDiff(tt.ctx)
g.Expect(tt.ctx.modified).To(Equal(tt.wantModified))
})
}
}
func Test_dropDiffForIgnoredPaths(t *testing.T) {
tests := []struct {
name string
ctx *dropDiffInput
wantModified map[string]interface{}
}{
{
name: "Sets ignored paths to original value if defined",
ctx: &dropDiffInput{
path: contract.Path{},
original: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "bar",
"controlPlaneEndpoint": map[string]interface{}{
"host": "foo",
"port": "123",
},
},
},
modified: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "bar",
"controlPlaneEndpoint": map[string]interface{}{
"host": "foo-changed",
"port": "123-changed",
},
},
},
shouldDropDiffFunc: isIgnorePath(
[]contract.Path{
{"spec", "controlPlaneEndpoint"},
},
),
},
wantModified: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "bar",
"controlPlaneEndpoint": map[string]interface{}{ // spec.controlPlaneEndpoint aligned to original
"host": "foo",
"port": "123",
},
},
},
},
{
name: "Drops ignore paths if they do not exist in original",
ctx: &dropDiffInput{
path: contract.Path{},
original: map[string]interface{}{
// Original doesn't have values for ignore paths.
},
modified: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "bar",
"controlPlaneEndpoint": map[string]interface{}{
"host": "foo-changed",
"port": "123-changed",
},
},
},
shouldDropDiffFunc: isIgnorePath(
[]contract.Path{
{"spec", "controlPlaneEndpoint"},
},
),
},
wantModified: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "bar",
// spec.controlPlaneEndpoint dropped
},
},
},
{
name: "Cleanup empty maps",
ctx: &dropDiffInput{
path: contract.Path{},
original: map[string]interface{}{
// Original doesn't have values for not allowed paths.
},
modified: map[string]interface{}{
"spec": map[string]interface{}{
"foo": "123",
},
},
shouldDropDiffFunc: isIgnorePath(
[]contract.Path{
{"spec", "foo"},
},
),
},
wantModified: map[string]interface{}{
// we are dropping spec.foo and then spec given that it is an empty map
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
g := NewWithT(t)
dropDiff(tt.ctx)
g.Expect(tt.ctx.modified).To(Equal(tt.wantModified))
})
}
}
|
guatek/Dual-Mag-Control | include/SystemTrigger.h | <reponame>guatek/Dual-Mag-Control
#ifndef _SYSTEMTRIGGER
#define _SYSTEMTRIGGER
#include <Adafruit_ZeroTimer.h>
#define FLASH_DELAY_OFFSET 3
#define MIN_FLASH_DURATION 1
// Flash Triggers
Adafruit_ZeroTimer highMagTimer = Adafruit_ZeroTimer(3);
Adafruit_ZeroTimer lowMagTimer = Adafruit_ZeroTimer(5);
// Sensor Polling
Adafruit_ZeroTimer pollingTimer = Adafruit_ZeroTimer(4);
//define the interrupt handlers
void TC3_Handler(){
Adafruit_ZeroTimer::timerHandler(3);
}
void TC4_Handler(){
Adafruit_ZeroTimer::timerHandler(4);
}
void TC5_Handler(){
Adafruit_ZeroTimer::timerHandler(5);
}
void HighMagCallback();
void LowMagCallback();
void configTimer(float freq, uint16_t * divider, uint16_t * compare, tc_clock_prescaler * prescaler) {
// Set up the flexible divider/compare
//uint8_t divider = 1;
//uint16_t compare = 0;
*prescaler = TC_CLOCK_PRESCALER_DIV1;
if ((freq < 24000000) && (freq > 800)) {
*divider = 1;
*prescaler = TC_CLOCK_PRESCALER_DIV1;
*compare = 48000000/freq;
} else if (freq > 400) {
*divider = 2;
*prescaler = TC_CLOCK_PRESCALER_DIV2;
*compare = (48000000/2)/freq;
} else if (freq > 200) {
*divider = 4;
*prescaler = TC_CLOCK_PRESCALER_DIV4;
*compare = (48000000/4)/freq;
} else if (freq > 100) {
*divider = 8;
*prescaler = TC_CLOCK_PRESCALER_DIV8;
*compare = (48000000/8)/freq;
} else if (freq > 50) {
*divider = 16;
*prescaler = TC_CLOCK_PRESCALER_DIV16;
*compare = (48000000/16)/freq;
} else if (freq > 12) {
*divider = 64;
*prescaler = TC_CLOCK_PRESCALER_DIV64;
*compare = (48000000/64)/freq;
} else if (freq > 3) {
*divider = 256;
*prescaler = TC_CLOCK_PRESCALER_DIV256;
*compare = (48000000/256)/freq;
} else if (freq >= 0.75) {
*divider = 1024;
*prescaler = TC_CLOCK_PRESCALER_DIV1024;
*compare = (48000000/1024)/freq;
} else {
DEBUGPORT.println("Invalid frequency");
}
DEBUGPORT.print("Divider:"); Serial.println(*divider);
DEBUGPORT.print("Compare:"); Serial.println(*compare);
DEBUGPORT.print("Final freq:"); Serial.println((int)(48000000/(*compare)));
}
void configTriggers(float freq) {
pinMode(HIGH_MAG_CAM_TRIG,OUTPUT);
pinMode(LOW_MAG_CAM_TRIG,OUTPUT);
pinMode(HIGH_MAG_STROBE_TRIG,OUTPUT);
pinMode(LOW_MAG_STROBE_TRIG,OUTPUT);
pinMode(FLASH_TYPE_PIN,OUTPUT);
Serial.println("Trigger Configuration");
Serial.print("Desired freq (Hz):");
Serial.println(freq);
uint16_t divider = 1;
uint16_t compare = 0;
tc_clock_prescaler prescaler = TC_CLOCK_PRESCALER_DIV1;
configTimer(freq, ÷r, &compare, &prescaler);
highMagTimer.enable(false);
highMagTimer.configure(prescaler, // prescaler
TC_COUNTER_SIZE_16BIT, // bit width of timer/counter
TC_WAVE_GENERATION_MATCH_PWM // frequency or PWM mode
);
highMagTimer.setCompare(0, compare);
highMagTimer.setCallback(true, TC_CALLBACK_CC_CHANNEL0, HighMagCallback);
highMagTimer.enable(true);
configTimer(freq, ÷r, &compare, &prescaler);
lowMagTimer.enable(false);
lowMagTimer.configure(prescaler, // prescaler
TC_COUNTER_SIZE_16BIT, // bit width of timer/counter
TC_WAVE_GENERATION_MATCH_PWM // frequency or PWM mode
);
lowMagTimer.setCompare(0, compare);
lowMagTimer.setCallback(true, TC_CALLBACK_CC_CHANNEL0, LowMagCallback);
lowMagTimer.enable(true);
}
void configPolling(float freq, void (*callback)()) {
uint16_t divider = 1;
uint16_t compare = 0;
tc_clock_prescaler prescaler = TC_CLOCK_PRESCALER_DIV1;
configTimer(freq, ÷r, &compare, &prescaler);
pollingTimer.enable(false);
pollingTimer.configure(prescaler, // prescaler
TC_COUNTER_SIZE_8BIT, // bit width of timer/counter
TC_WAVE_GENERATION_MATCH_PWM // frequency or PWM mode
);
pollingTimer.setCompare(0, compare);
pollingTimer.setCallback(true, TC_CALLBACK_CC_CHANNEL0, callback);
pollingTimer.enable(true);
}
#endif
|
leadpony/johnzon | johnzon-core/src/main/java/org/apache/johnzon/core/JohnzonJsonParser.java | <filename>johnzon-core/src/main/java/org/apache/johnzon/core/JohnzonJsonParser.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.johnzon.core;
import java.math.BigDecimal;
import java.util.Map;
import java.util.stream.Stream;
import javax.json.JsonArray;
import javax.json.JsonObject;
import javax.json.JsonValue;
import javax.json.stream.JsonLocation;
import javax.json.stream.JsonParser;
/**
* JsonParser with extended functionality
*/
public interface JohnzonJsonParser extends JsonParser {
boolean isNotTooLong();
/**
* @return the _current_ Event. That's the one returned by the previous call to {@link #next()}
* but without propagating the Event pointer to the next entry.
*/
default Event current() {
throw new UnsupportedOperationException("getting the current JsonParser Event is not supported");
}
public static class JohnzonJsonParserWrapper implements JohnzonJsonParser {
private final JsonParser jsonParser;
public JohnzonJsonParserWrapper(JsonParser jsonParser) {
this.jsonParser = jsonParser;
}
@Override
public boolean isNotTooLong() {
return true;
}
@Override
public boolean hasNext() {
return jsonParser.hasNext();
}
@Override
public Event next() {
return jsonParser.next();
}
@Override
public String getString() {
return jsonParser.getString();
}
@Override
public boolean isIntegralNumber() {
return jsonParser.isIntegralNumber();
}
@Override
public int getInt() {
return jsonParser.getInt();
}
@Override
public long getLong() {
return jsonParser.getLong();
}
@Override
public BigDecimal getBigDecimal() {
return jsonParser.getBigDecimal();
}
@Override
public JsonLocation getLocation() {
return jsonParser.getLocation();
}
@Override
public void close() {
jsonParser.close();
}
@Override
public JsonObject getObject() {
return jsonParser.getObject();
}
@Override
public JsonValue getValue() {
return jsonParser.getValue();
}
@Override
public JsonArray getArray() {
return jsonParser.getArray();
}
@Override
public Stream<JsonValue> getArrayStream() {
return jsonParser.getArrayStream();
}
@Override
public Stream<Map.Entry<String, JsonValue>> getObjectStream() {
return jsonParser.getObjectStream();
}
@Override
public Stream<JsonValue> getValueStream() {
return jsonParser.getValueStream();
}
@Override
public void skipArray() {
jsonParser.skipArray();
}
@Override
public void skipObject() {
jsonParser.skipObject();
}
}
}
|
wujifengcn/binary-proto | binary-proto-java/src/test/java/test/com/jd/binaryproto/contract/AddressOrderDuplicate.java | package test.com.jd.binaryproto.contract;
import com.jd.binaryproto.DataContract;
import com.jd.binaryproto.DataField;
import com.jd.binaryproto.PrimitiveType;
/**
* Created by zhangshuang3 on 2018/7/9.
*/
@DataContract(code=0x03, name="Address" , description="")
public interface AddressOrderDuplicate {
@DataField(order=1, primitiveType= PrimitiveType.TEXT)
String getStreet();
@DataField(order=1, primitiveType=PrimitiveType.INT32)
int getNumber();
}
|
adobley/cloud_controller_ng | app/actions/revision_create.rb | <reponame>adobley/cloud_controller_ng<gh_stars>0
module VCAP::CloudController
class RevisionCreate
class << self
def create(app)
RevisionModel.db.transaction do
next_version = calculate_next_version(app)
if (existing_revision_for_version = RevisionModel.find(app: app, version: next_version))
existing_revision_for_version.destroy
end
RevisionModel.create(app: app, version: next_version)
end
end
private
def calculate_next_version(app)
previous_revision = RevisionModel.where(app: app).reverse(:created_at).first
return 1 if previous_revision.nil? || previous_revision.version >= 9999
previous_revision.version + 1
end
end
end
end
|
vwalsh/jsbattle | packages/jsbattle-server/app/services/leagueScheduler/actions/storeBattleResults.js | <reponame>vwalsh/jsbattle<filename>packages/jsbattle-server/app/services/leagueScheduler/actions/storeBattleResults.js
module.exports = async function(ctx) {
let refData = ctx.params.refData;
let ubd = JSON.stringify(ctx.params.ubd);
let teamList = ctx.params.teamList;
if(!teamList || teamList.length != 2) {
throw new Error('teamList must have exactly 2 elements');
}
let winner = teamList.reduce((best, current) => {
if(current.score > best.score) {
return current;
} else {
return best;
}
}, teamList[0]);
let loser = teamList.reduce((worst, current) => {
if(current.score < worst.score) {
return current;
} else {
return worst;
}
}, teamList[0]);
if(winner == loser) {
winner = null;
}
teamList = teamList.map((team) => {
if(!refData || !refData[team.name]) {
throw new Error('no team mapping in refData for: ' + team.name);
}
return {
id: refData[team.name],
name: team.name,
battleScore: team.score,
winner: team == winner
}
});
this.logger.info('Battle result: ' + teamList.map((t) => `${t.name} (${t.battleScore.toFixed(2)})`).join(' vs '))
let description = teamList.map((t) => t.name).join(' vs ').substring(0, 128);
await ctx.call('battleStore.create', {
ubd: ubd,
expiresIn: this.settings.historyDuration,
description: description,
meta: teamList,
owner: Object.values(refData)
});
let updateCalls = teamList.map((team) => new Promise(async (resolve) => {
try {
await ctx.call('league.updateRank', {
id: team.id,
name: team.name,
battleScore: team.battleScore,
winner: team.winner
});
} catch (err) {
this.logger.warn('Unable to store battle results of ' + team.name + ': ' + err.message);
}
resolve();
}));
await Promise.all(updateCalls)
ctx.broadcast('client.league.battleCompleted', {});
}
|
wytalw/datashops | datashops-model/src/main/java/com/bigdata/datashops/model/vo/VoJobResult.java | package com.bigdata.datashops.model.vo;
import lombok.AllArgsConstructor;
import lombok.Builder;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@Builder
@NoArgsConstructor
@AllArgsConstructor
public class VoJobResult {
// private String time;
//
// private String type;
private Object value;
}
|
codedsk/hubcheck | bin/contribtool_upload.py | <reponame>codedsk/hubcheck<gh_stars>1-10
#! /usr/bin/env python
#
# usage:
# ./contribtool_upload [options]
#
# options:
# --config <hub-config>
#
# examples:
# ./contribtool_upload \
# --config hub.conf \
# workspace.json
import hubcheck
import json
import os
# define global data files
class ContribtoolUploadTool(hubcheck.Tool):
def __init__(self,logfile='hcutils.log',loglevel='INFO'):
super(ContribtoolUploadTool,self).__init__(logfile,loglevel)
# parse command line and config file options
self.parse_options()
# start logging
self.start_logging()
def command(self):
if (len(self.options.remainder) != 1):
raise RuntimeError("Wrong # arguments, use --help for help: %s"
% (self.options.remainder))
# parse the tool configuration
with open(self.options.remainder[0],"r") as f:
toolconfig = json.load(f)
locators = self.testdata.get_locators()
hostname = self.testdata.find_url_for('https')
url = "https://%s" % (hostname)
hc = hubcheck.Hubcheck(hostname=hostname,locators=locators)
self.start_recording_xvfb('contribtool_upload.mp4')
username,userpass = self.testdata.find_account_for('toolsubmitter')
toolname = toolconfig['toolinfo']['name']
cm = hubcheck.ContainerManager()
try:
# launch the browser
hc.browser.get(url)
# login to the website as the tool developer
hc.utils.account.login_as(username,userpass)
data = {}
for (k,v) in toolconfig['files'].items():
data.update({os.path.join(hubcheck.config.data_dir,toolname,k) : v})
# upload the source code to the repository
# and flip the status to uploaded
hc.utils.contribtool.upload(toolname,data,username,userpass)
# logout of the website
hc.utils.account.logout()
except Exception as e:
hc.browser.take_screenshot(
self.screenshot_filepath('contribtool_upload'))
raise
finally:
# close the browser and cleanup
hc.browser.close()
self.stop_recording_xvfb()
# shut down all of the tool session containres
cm.stop_all()
if __name__=='__main__':
tool = ContribtoolUploadTool()
tool.run()
|
ThanhAnNguyen2014/TheUltimateMicroServicesTemplate | Stegeriluminacion/excepciones/src/main/java/steger/excepciones/_config/languaje/Translator.java | <gh_stars>1-10
package steger.excepciones._config.languaje;
import java.util.Locale;
import org.springframework.context.i18n.LocaleContextHolder;
import org.springframework.context.support.ResourceBundleMessageSource;
import org.springframework.stereotype.Component;
@Component
public class Translator {
public static String toLocale(String msgCode) {
ResourceBundleMessageSource messageSource = new ResourceBundleMessageSource();
messageSource.setBasename("languaje/messages");
messageSource.setDefaultEncoding("ISO-8859-1");
messageSource.setUseCodeAsDefaultMessage(true);
Locale locale = LocaleContextHolder.getLocale();
return messageSource.getMessage(msgCode, null, locale);
}
} |
PASSIONLab/MaskedSpGEMM | timsort-cpp/include/tim/minrun.h | <filename>timsort-cpp/include/tim/minrun.h
#ifndef TIMSORT_MINRUN_H
#define TIMSORT_MINRUN_H
namespace tim {
namespace internal {
/*
* Modified variant of the compute_minrun() function used in CPython's
* list_sort().
*
* The CPython version of this function chooses a value in [32, 65) for
* minrun. Unlike in CPython, C++ objects aren't guaranteed to be the
* size of a pointer. A heuristic is used here under the assumption
* that std::move(some_arbitrary_cpp_object) is basically a bit-blit.
* If the type is larger that 4 pointers then minrun maxes out at 32
* instead of 64. Similarly, if the type is larger than 8 pointers,
* it maxes out at 16. This is a major win for large objects
* (think tuple-of-strings).
* Four pointers is used as the cut-off because libstdc++'s std::string
* implementation was slightly, but measurably worse in the benchmarks
* when the max minrun was 32 instead of 64 (and their std::string
* is 4 pointers large).
*/
template <class T>
static constexpr std::size_t max_minrun() noexcept
{
if constexpr(sizeof(T) > (sizeof(void*) * 8))
return 16;
else if constexpr(sizeof(T) > (sizeof(void*) * 4))
return 32;
else
return 64;
}
template <class T>
static constexpr std::size_t compute_minrun(std::size_t n) noexcept
{
constexpr std::size_t minrun_max = max_minrun<T>();
std::size_t r = 0;
while (n >= minrun_max)
{
r |= (n & 1);
n >>= 1;
}
return (n + r);
}
} /* namespace internal */
} /* namespace tim */
#endif /* TIMSORT_MINRUN_H */
|
bodastage/bts-database | alembic/versions/5b8d91271e45_create_table_for_4g_external_cells.py | <gh_stars>1-10
"""Create table for 4G external cells
Revision ID: <KEY>
Revises: 0588ebb5527a
Create Date: 2018-05-23 07:29:17.642000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '<KEY>'
down_revision = '0588ebb5527a'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'lte_external_cells',
sa.Column('pk', sa.Integer, primary_key=True),
sa.Column('name', sa.String(100), nullable=False),
sa.Column('cell_pk', sa.Integer),
sa.Column('node_pk', sa.Integer,),
sa.Column('dl_earfcn', sa.Integer,),
sa.Column('ul_earfcn', sa.Integer, ),
sa.Column('mnc', sa.Integer,),
sa.Column('mcc', sa.Integer, ),
sa.Column('local_cellid', sa.Integer,),
sa.Column('pci', sa.Integer, ),
sa.Column('tac', sa.Integer, ),
sa.Column('ci', sa.Integer, ),
sa.Column('enodeb_id', sa.Integer, ),
sa.Column('modified_by', sa.Integer),
sa.Column('added_by', sa.Integer),
sa.Column('date_added', sa.TIMESTAMP, default=sa.func.now(), onupdate=sa.func.now()),
sa.Column('date_modified', sa.TIMESTAMP, default=sa.func.now()),
schema=u'live_network'
)
op.execute('ALTER SEQUENCE live_network.lte_external_cells_pk_seq RENAME TO seq_lte_external_cells_pk')
def downgrade():
op.drop_table('lte_external_cells', schema=u'live_network')
|
eZmaxinc/eZmax-SDK-cpp-restsdk | model/Ezsignfoldersignerassociation_createObject_v1_Request.h | /**
* eZmax API Definition
* This API expose all the functionnalities for the eZmax and eZsign applications.
*
* The version of the OpenAPI document: 1.1.3
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by OpenAPI-Generator 5.3.1-SNAPSHOT.
* https://openapi-generator.tech
* Do not edit the class manually.
*/
/*
* Ezsignfoldersignerassociation_createObject_v1_Request.h
*
* Request for the /1/object/ezsignfoldersignerassociation/createObject API Request
*/
#ifndef ORG_OPENAPITOOLS_CLIENT_MODEL_Ezsignfoldersignerassociation_createObject_v1_Request_H_
#define ORG_OPENAPITOOLS_CLIENT_MODEL_Ezsignfoldersignerassociation_createObject_v1_Request_H_
#include "ModelBase.h"
#include "model/Ezsignfoldersignerassociation_Request.h"
#include "model/Ezsignfoldersignerassociation_RequestCompound.h"
namespace org {
namespace openapitools {
namespace client {
namespace model {
/// <summary>
/// Request for the /1/object/ezsignfoldersignerassociation/createObject API Request
/// </summary>
class Ezsignfoldersignerassociation_createObject_v1_Request
: public ModelBase
{
public:
Ezsignfoldersignerassociation_createObject_v1_Request();
virtual ~Ezsignfoldersignerassociation_createObject_v1_Request();
/////////////////////////////////////////////
/// ModelBase overrides
void validate() override;
web::json::value toJson() const override;
bool fromJson(const web::json::value& json) override;
void toMultipart(std::shared_ptr<MultipartFormData> multipart, const utility::string_t& namePrefix) const override;
bool fromMultiPart(std::shared_ptr<MultipartFormData> multipart, const utility::string_t& namePrefix) override;
/////////////////////////////////////////////
/// Ezsignfoldersignerassociation_createObject_v1_Request members
/// <summary>
///
/// </summary>
std::shared_ptr<Ezsignfoldersignerassociation_Request> getObjEzsignfoldersignerassociation() const;
bool objEzsignfoldersignerassociationIsSet() const;
void unsetObjEzsignfoldersignerassociation();
void setObjEzsignfoldersignerassociation(const std::shared_ptr<Ezsignfoldersignerassociation_Request>& value);
/// <summary>
///
/// </summary>
std::shared_ptr<Ezsignfoldersignerassociation_RequestCompound> getObjEzsignfoldersignerassociationCompound() const;
bool objEzsignfoldersignerassociationCompoundIsSet() const;
void unsetObjEzsignfoldersignerassociationCompound();
void setObjEzsignfoldersignerassociationCompound(const std::shared_ptr<Ezsignfoldersignerassociation_RequestCompound>& value);
protected:
std::shared_ptr<Ezsignfoldersignerassociation_Request> m_ObjEzsignfoldersignerassociation;
bool m_ObjEzsignfoldersignerassociationIsSet;
std::shared_ptr<Ezsignfoldersignerassociation_RequestCompound> m_ObjEzsignfoldersignerassociationCompound;
bool m_ObjEzsignfoldersignerassociationCompoundIsSet;
};
}
}
}
}
#endif /* ORG_OPENAPITOOLS_CLIENT_MODEL_Ezsignfoldersignerassociation_createObject_v1_Request_H_ */
|
lechium/iOS1351Headers | System/Library/PrivateFrameworks/MapsSuggestions.framework/MapsSuggestionsStrategy.h | /*
* This header is generated by classdump-dyld 1.5
* on Wednesday, October 27, 2021 at 3:17:06 PM Mountain Standard Time
* Operating System: Version 13.5.1 (Build 17F80)
* Image Source: /System/Library/PrivateFrameworks/MapsSuggestions.framework/MapsSuggestions
* classdump-dyld is licensed under GPLv3, Copyright © 2013-2016 by <NAME>. Updated by <NAME>.
*/
@class MapsSuggestionsManager;
@protocol MapsSuggestionsStrategy <MapsSuggestionsObject>
@property (assign,nonatomic,__weak) MapsSuggestionsManager * manager;
@required
-(MapsSuggestionsManager *)manager;
-(void)setManager:(id)arg1;
-(void)clearData;
-(id)topSuggestionsWithSourceEntries:(id)arg1 error:(id*)arg2;
-(BOOL)preFiltersKept:(id)arg1;
-(BOOL)postFiltersKept:(id)arg1;
-(void)addPreFilter:(id)arg1;
-(void)addPostFilter:(id)arg1;
-(void)removeFilter:(id)arg1;
-(void)removeAllFilters;
-(void)addImprover:(id)arg1;
-(void)removeAllImprovers;
-(void)addDeduper:(id)arg1;
-(void)removeAllDedupers;
@end
|
Nhemesy/PlayFootbalManager | app/src/main/java/com/pedrodavidlp/footballmanager/FootballApplication.java | <reponame>Nhemesy/PlayFootbalManager<filename>app/src/main/java/com/pedrodavidlp/footballmanager/FootballApplication.java
package com.pedrodavidlp.footballmanager;
import android.app.Application;
import android.content.Context;
import com.pedrodavidlp.footballmanager.di.AppComponent;
import com.pedrodavidlp.footballmanager.di.AppModule;
import com.pedrodavidlp.footballmanager.di.DaggerAppComponent;
import com.pedrodavidlp.footballmanager.di.group.GroupComponent;
import com.pedrodavidlp.footballmanager.di.group.GroupModule;
import com.pedrodavidlp.footballmanager.di.launcher.LauncherComponent;
import com.pedrodavidlp.footballmanager.di.launcher.LauncherModule;
import com.pedrodavidlp.footballmanager.di.player.PlayerComponent;
import com.pedrodavidlp.footballmanager.di.player.PlayerModule;
public class FootballApplication extends Application {
private AppComponent appComponent;
private PlayerComponent playerComponent;
private GroupComponent groupComponent;
private LauncherComponent launcherComponent;
@Override
public void onCreate() {
super.onCreate();
this.appComponent = DaggerAppComponent.builder()
.appModule(new AppModule(getApplicationContext()))
.build();
}
public AppComponent getAppComponent(){
return appComponent;
}
public PlayerComponent getPlayerComponent(){
if(playerComponent == null){
playerComponent = appComponent.plus(new PlayerModule());
}
return playerComponent;
}
public GroupComponent getGroupComponent(){
if(groupComponent == null){
groupComponent = appComponent.plus(new GroupModule());
}
return groupComponent;
}
public LauncherComponent getLauncherComponent() {
if(launcherComponent == null){
launcherComponent = appComponent.plus(new LauncherModule());
}
return launcherComponent;
}
public void releasePlayerComponent(){
playerComponent = null;
}
public void releaseGroupComponent(){
groupComponent = null;
}
public void releaseLauncherComponent(){
launcherComponent = null;
}
public static FootballApplication get(Context context){
return (FootballApplication) context;
}
}
|
kuikuitage/NewCool-UC-3.1.0-priv | montage-tech/src/mdl/filters/music_play_filter/mp3_trans_filter/mp3_transfer_filter_intra.h | /********************************************************************************************/
/********************************************************************************************/
#ifndef __MP3_TRANS_FILTER_INTRA_H_
#define __MP3_TRANS_FILTER_INTRA_H_
/*!
private data
*/
typedef struct tag_mp3_trans_filter_private
{
/*!
this point !!
*/
void *p_this;
/*!
it's input pin
*/
mp3_trans_in_pin_t m_in_pin;
/*!
it's output pin
*/
mp3_trans_out_pin_t m_out_pin;
/*!
connect with lrc filter
*/
mp3_trans_lrc_out_pin_t m_lrc_out_pin;
/*!
connect with pic filter
*/
mp3_trans_pic_out_pin_t m_pic_out_pin;
/*!
using sample
*/
//mp3_getframe_state_t *p_state;
/*!
play mode
*/
mp3_play_mode_t play_mode;
/*!
mp3 info
*/
mpegaudioframeinfo_t *p_mp3_info;
/*!
TOC table
*/
//u8 TOC[100];
/*!
speed count
*/
u32 cnt;
/*!
buff played count
*/
u32 buff_played;
/*!
time jump
*/
u32 timejump;
/*!
file name
*/
u16 *p_file_name;
/*!
pic buf push sign
*/
BOOL is_push;
/*!
current playtime
*/
u16 cur_play_time;
/*!
show logo
*/
BOOL b_music_logo_show;
/*!
show lrc
*/
BOOL b_music_lrc_show;
/*!
current frame count
*/
u32 cur_frame_cnt;
/*!
jump frame count
*/
u32 save_frame_cnt;
/*!
true: jump mode, false: normal
*/
u8 jump;
/*!
jump mode,curn frame count
*/
u32 jump_cur_frame;
/*!
source url
*/
u8 *p_src_url;
/*!
first_frame
*/
BOOL first_frame;
}mp3_trans_filter_private_t;
/*!
the mp3_trans filter define
*/
typedef struct tag_mp3_trans_filter
{
/*!
public base class, must be the first member
*/
FATHER transf_filter_t m_filter;
/*!
private data buffer
*/
mp3_trans_filter_private_t private_data;
}mp3_trans_filter_t;
#endif // End for __MP3_TRANS_FILTER_INTRA_H_
|
sjackso/ambry | ambry-frontend/src/main/java/com.github.ambry.frontend/GetSignedUrlHandler.java | <filename>ambry-frontend/src/main/java/com.github.ambry.frontend/GetSignedUrlHandler.java
/*
* Copyright 2017 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.frontend;
import com.github.ambry.clustermap.ClusterMap;
import com.github.ambry.commons.BlobId;
import com.github.ambry.rest.RestMethod;
import com.github.ambry.rest.RestRequest;
import com.github.ambry.rest.RestRequestMetrics;
import com.github.ambry.rest.RestResponseChannel;
import com.github.ambry.rest.RestServiceErrorCode;
import com.github.ambry.rest.RestServiceException;
import com.github.ambry.rest.RestUtils;
import com.github.ambry.router.Callback;
import com.github.ambry.router.ReadableStreamChannel;
import com.github.ambry.utils.SystemTime;
import java.util.GregorianCalendar;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Handler for requests for signed URLs.
*/
class GetSignedUrlHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(GetSignedUrlHandler.class);
private final UrlSigningService urlSigningService;
private final SecurityService securityService;
private final IdConverter idConverter;
private final AccountAndContainerInjector accountAndContainerInjector;
private final FrontendMetrics metrics;
private final ClusterMap clusterMap;
/**
* Constructs a handler for handling requests for signed URLs.
* @param urlSigningService the {@link UrlSigningService} to use.
* @param securityService the {@link SecurityService} to use.
* @param idConverter the {@link IdConverter} to use.
* @param accountAndContainerInjector helper to resolve account and container for a given request.
* @param metrics {@link FrontendMetrics} instance where metrics should be recorded.
* @param clusterMap the {@link ClusterMap} in use.
*/
GetSignedUrlHandler(UrlSigningService urlSigningService, SecurityService securityService, IdConverter idConverter,
AccountAndContainerInjector accountAndContainerInjector, FrontendMetrics metrics, ClusterMap clusterMap) {
this.urlSigningService = urlSigningService;
this.securityService = securityService;
this.idConverter = idConverter;
this.accountAndContainerInjector = accountAndContainerInjector;
this.metrics = metrics;
this.clusterMap = clusterMap;
}
/**
* Handles a request for getting signed URLs.
* @param restRequest the {@link RestRequest} that contains the request parameters.
* @param restResponseChannel the {@link RestResponseChannel} where headers should be set.
* @param callback the {@link Callback} to invoke when the response is ready (or if there is an exception).
* @throws RestServiceException if required parameters are not found or are invalid
*/
void handle(RestRequest restRequest, RestResponseChannel restResponseChannel,
Callback<ReadableStreamChannel> callback) throws RestServiceException {
RestRequestMetrics requestMetrics =
restRequest.getSSLSession() != null ? metrics.getSignedUrlSSLMetrics : metrics.getSignedUrlMetrics;
restRequest.getMetricsTracker().injectMetrics(requestMetrics);
String restMethodInSignedUrlStr = RestUtils.getHeader(restRequest.getArgs(), RestUtils.Headers.URL_TYPE, true);
RestMethod restMethodInUrl;
try {
restMethodInUrl = RestMethod.valueOf(restMethodInSignedUrlStr);
} catch (IllegalArgumentException e) {
throw new RestServiceException("Unrecognized RestMethod: " + restMethodInSignedUrlStr,
RestServiceErrorCode.InvalidArgs);
}
securityService.processRequest(restRequest,
new SecurityProcessRequestCallback(restRequest, restMethodInUrl, restResponseChannel, callback));
}
/**
* Callback for {@link SecurityService#processRequest(RestRequest, Callback)} that subsequently calls
* {@link SecurityService#postProcessRequest(RestRequest, Callback)}. If post processing succeeds, a signed URL will
* be generated.
*/
private class SecurityProcessRequestCallback implements Callback<Void> {
private final RestRequest restRequest;
private final RestMethod restMethodInUrl;
private final RestResponseChannel restResponseChannel;
private final Callback<ReadableStreamChannel> callback;
private final long operationStartTimeMs;
SecurityProcessRequestCallback(RestRequest restRequest, RestMethod restMethodInUrl,
RestResponseChannel restResponseChannel, Callback<ReadableStreamChannel> callback) {
this.restRequest = restRequest;
this.restMethodInUrl = restMethodInUrl;
this.restResponseChannel = restResponseChannel;
this.callback = callback;
operationStartTimeMs = SystemTime.getInstance().milliseconds();
}
@Override
public void onCompletion(Void result, Exception exception) {
long processingStartTimeMs = SystemTime.getInstance().milliseconds();
metrics.getSignedUrlSecurityRequestTimeInMs.update(processingStartTimeMs - operationStartTimeMs);
try {
if (exception == null) {
switch (restMethodInUrl) {
case GET:
String blobIdStr = RestUtils.getHeader(restRequest.getArgs(), RestUtils.Headers.BLOB_ID, true);
IdConverterCallback idConverterCallback =
new IdConverterCallback(restRequest, restResponseChannel, callback);
idConverter.convert(restRequest, blobIdStr, idConverterCallback);
break;
case POST:
accountAndContainerInjector.injectAccountAndContainerForPostRequest(restRequest);
securityService.postProcessRequest(restRequest,
new SecurityPostProcessRequestCallback(restRequest, restResponseChannel, callback));
break;
default:
exception = new RestServiceException("Getting signed URLs for " + restMethodInUrl + " is not supported",
RestServiceErrorCode.BadRequest);
}
}
} catch (Exception e) {
exception = e;
} finally {
metrics.getSignedUrlSecurityRequestCallbackProcessingTimeInMs.update(
SystemTime.getInstance().milliseconds() - processingStartTimeMs);
if (exception != null) {
callback.onCompletion(null, exception);
}
}
}
}
/**
* Callback for calls to {@link IdConverter} if the signed URL required is a GET url.
*/
private class IdConverterCallback implements Callback<String> {
private final RestRequest restRequest;
private final RestResponseChannel restResponseChannel;
private final Callback<ReadableStreamChannel> callback;
private final long operationStartTimeMs;
IdConverterCallback(RestRequest restRequest, RestResponseChannel restResponseChannel,
Callback<ReadableStreamChannel> callback) {
this.restRequest = restRequest;
this.restResponseChannel = restResponseChannel;
this.callback = callback;
operationStartTimeMs = SystemTime.getInstance().milliseconds();
}
@Override
public void onCompletion(String result, Exception exception) {
long processingStartTimeMs = SystemTime.getInstance().milliseconds();
metrics.getSignedUrlSecurityRequestTimeInMs.update(processingStartTimeMs - operationStartTimeMs);
try {
if (exception == null) {
BlobId blobId = FrontendUtils.getBlobIdFromString(result, clusterMap);
accountAndContainerInjector.injectTargetAccountAndContainerFromBlobId(blobId, restRequest);
securityService.postProcessRequest(restRequest,
new SecurityPostProcessRequestCallback(restRequest, restResponseChannel, callback));
}
} catch (Exception e) {
exception = e;
} finally {
metrics.getSignedUrlSecurityRequestCallbackProcessingTimeInMs.update(
SystemTime.getInstance().milliseconds() - processingStartTimeMs);
if (exception != null) {
callback.onCompletion(null, exception);
}
}
}
}
/**
* Callback for {@link SecurityService#postProcessRequest(RestRequest, Callback)} that handles generating a signed URL
* if the security checks succeeded.
*/
private class SecurityPostProcessRequestCallback implements Callback<Void> {
private final RestRequest restRequest;
private final RestResponseChannel restResponseChannel;
private final Callback<ReadableStreamChannel> callback;
private final long operationStartTimeMs;
SecurityPostProcessRequestCallback(RestRequest restRequest, RestResponseChannel restResponseChannel,
Callback<ReadableStreamChannel> callback) {
this.restRequest = restRequest;
this.restResponseChannel = restResponseChannel;
this.callback = callback;
operationStartTimeMs = SystemTime.getInstance().milliseconds();
}
@Override
public void onCompletion(Void result, Exception exception) {
long processingStartTimeMs = SystemTime.getInstance().milliseconds();
metrics.getSignedUrlSecurityPostProcessRequestTimeInMs.update(processingStartTimeMs - operationStartTimeMs);
try {
if (exception == null) {
String signedUrl = urlSigningService.getSignedUrl(restRequest);
LOGGER.debug("Generated {} from {}", signedUrl, restRequest);
restResponseChannel.setHeader(RestUtils.Headers.DATE, new GregorianCalendar().getTime());
restResponseChannel.setHeader(RestUtils.Headers.SIGNED_URL, signedUrl);
restResponseChannel.setHeader(RestUtils.Headers.CONTENT_LENGTH, 0);
final long processResponseStartTimeMs = SystemTime.getInstance().milliseconds();
securityService.processResponse(restRequest, restResponseChannel, null,
(processResponseResult, processResponseException) -> {
metrics.getSignedUrlSecurityResponseTimeInMs.update(
SystemTime.getInstance().milliseconds() - processResponseStartTimeMs);
callback.onCompletion(null, processResponseException);
});
}
} catch (Exception e) {
exception = e;
} finally {
metrics.getSignedUrlProcessingTimeInMs.update(SystemTime.getInstance().milliseconds() - processingStartTimeMs);
if (exception != null) {
callback.onCompletion(null, exception);
}
}
}
}
}
|
kimixuchen/Codeforces | Codeforces #353 DIV2/B.cpp | <reponame>kimixuchen/Codeforces
/**
*Codeforces Round #353 DIV2 B
*18/05/16 07:44:53
*xuchen
* */
#include <stdio.h>
#include <iostream>
#include <cmath>
#include <cstring>
#include <map>
#include <stdlib.h>
#include <algorithm>
#include <queue>
using namespace std;
const int N = 100005;
int main(int argc, char* args[])
{
int n, a, b, c, d;
scanf("%d %d %d %d %d", &n, &a, &b, &c, &d);
int x3, x4, x5;
long long count = 0;
for(int x=1; x<=n; ++x)
{
x3 = x+b-c;
if(x3<1 || x3>n)
{
continue;
}
x4 = x+a-d;
if(x4<1 || x4>n)
{
continue;
}
x5 = x+a+b-c-d;
if(x5<1 || x5>n)
{
continue;
}
count += n;
}
printf("%lld\n", count);
return 0;
}
|
gregorydgraham/DBvolution | src/main/java/nz/co/gregs/dbvolution/databases/PostgresDBOverSSL.java | <filename>src/main/java/nz/co/gregs/dbvolution/databases/PostgresDBOverSSL.java
/*
* Copyright 2013 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nz.co.gregs.dbvolution.databases;
import java.sql.SQLException;
import javax.sql.DataSource;
import nz.co.gregs.dbvolution.DBDatabase;
import nz.co.gregs.dbvolution.databases.definitions.DBDefinition;
/**
* Extends the PostgreSQL database connection by adding SSL.
*
* <p style="color: #F90;">Support DBvolution at
* <a href="http://patreon.com/dbvolution" target=new>Patreon</a></p>
*
* @author <NAME>
*/
public class PostgresDBOverSSL extends PostgresDB {
/**
*
* Provides a convenient constructor for DBDatabases that have configuration
* details hardwired or are able to automatically retrieve the details.
*
* <p>
* This constructor creates an empty DBDatabase with only the default
* settings, in particular with no driver, URL, username, password, or
* {@link DBDefinition}
*
* <p>
* Most programmers should not call this constructor directly. Instead you
* should define a no-parameter constructor that supplies the details for
* creating an instance using a more complete constructor.
*
* <p>
* DBDatabase encapsulates the knowledge of the database, in particular the
* syntax of the database in the DBDefinition and the connection details from
* a DataSource.
*
* @see DBDefinition
*/
protected PostgresDBOverSSL() {
}
/**
* Creates a {@link DBDatabase } instance for the data source.
*
* @param ds ds
*/
public PostgresDBOverSSL(DataSource ds) {
super(ds);
}
/**
* Creates a DBDatabase for a PostgreSQL database over SSL.
*
* @param hostname host name
* @param databaseName databaseName
* @param port port
* @param username username
* @param password password
* @param urlExtras urlExtras
*/
public PostgresDBOverSSL(String hostname, int port, String databaseName, String username, String password, String urlExtras) {
super(hostname, port, databaseName, username, password, "ssl=true&sslfactory=org.postgresql.ssl.NonValidatingFactory" + (urlExtras == null || urlExtras.isEmpty() ? "" : "&" + urlExtras));
}
/**
* Creates a DBDatabase for a PostgreSQL database over SSL.
*
* @param hostname host name
* @param password password
* @param databaseName databaseName
* @param port port
* @param username username
*/
public PostgresDBOverSSL(String hostname, int port, String databaseName, String username, String password) {
this(hostname, port, databaseName, username, password, "");
}
@Override
public DBDatabase clone() throws CloneNotSupportedException {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
}
|
zaqwes8811/coordinator-tasks | buffer/scripts-emb-ext/projects/expose-free-function/setup.py | #-*- coding: utf-8 -*-
'''
TODO : узнать как выбирать компиятор и его ключи(директивы)
TODO : подключить чисто Си компилятор
python setup.py install => последний аргумент как-то используется системой сборки
Issues :
1. Не принимает *.c файлы
'''
import sys
import shutil
from distutils.core import setup
from distutils.extension import Extension
boost_root = '/opt/big-3rdparty/boost_1_57_0'
kLibraryDirs = [boost_root + '/stage_gcc_x64']
kLibsNames = ['boost_python']
pkg_name = 'PackageName'
srcs = ['hello_module.cc', 'hello_inc.cc']
include_dirs = [boost_root]
# deploy
target_pkg_dir = './'
module_name = 'hello'
path_to_module = 'build/lib.linux-x86_64-2.7/'
def main(module_name, srcs, include_dirs, fp_init):
one_extention = Extension(
module_name,
srcs,
include_dirs=include_dirs,
libraries=kLibsNames,
library_dirs=kLibraryDirs)
setup(name=pkg_name, ext_modules=[one_extention])
# Move result modules
shutil.copy(path_to_module+module_name+'.so', target_pkg_dir + module_name+'.so')
if __name__=='__main__':
sys.argv = ['setup.py', 'build']
main(module_name, srcs, include_dirs, None)
|
CaddyDz/Ruby | p3/ch14/threads/s1/state.rb | t = Thread.new do
puts "[Starting thread]"
Thread.stop
puts "[Resuming thread]"
end
puts "Status of thread: #{t.status}"
puts "Has the thread stopped? #{t.stop?}"
puts "Is the thread alive? #{t.alive?}"
puts
puts "Waking up thread and joining it..."
t.wakeup
t.join
puts
puts "Is the thread alive? #{t.alive?}"
puts "Inspect string for thread: #{t.inspect}"
|
harishpalk/Jadira | usertype.core/src/main/java/org/jadira/usertype/dateandtime/joda/PersistentDateTimeAsUtcString.java | <filename>usertype.core/src/main/java/org/jadira/usertype/dateandtime/joda/PersistentDateTimeAsUtcString.java
/*
* Copyright 2013 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jadira.usertype.dateandtime.joda;
import org.jadira.usertype.dateandtime.joda.columnmapper.StringColumnDateTimeMapper;
import org.jadira.usertype.spi.shared.AbstractParameterizedUserType;
import org.joda.time.DateTime;
/**
* Persist {@link DateTime} as a string of three parts:
* <ul>
* <li>the {@code DateTime} transformed into UTC time, formatted as such: {@code yyyy-MM-dd'T'HH:mm:ss.SSS}</li>
* <li>the underscore symbol (_)</li>
* <li>the id of the {@code DateTime}'s original time zone (for example Europe/London or UTC)</li>
* </ul>
* This user-type was created to workaround Hibernate's <a href="https://hibernate.atlassian.net/browse/HHH-5574">HHH-5574</a>
* bug by storing the complete {@code DateTime} data, including the specific time zone, not just the offset (ala ISO 8601), in
* one single, sortable field.
* @author dwijnand
*/
public class PersistentDateTimeAsUtcString extends AbstractParameterizedUserType<DateTime, String, StringColumnDateTimeMapper> {
private static final long serialVersionUID = 6477950463426162426L;
}
|
KenWoo/Algorithm | Algorithms/Medium/1503. Last Moment Before All Ants Fall Out of a Plank/answer.py | from tying import List
class Solution:
def getLastMoment(self, n: int, left: List[int], right: List[int]) -> int:
res = 0
for i in left:
res = max(res, i)
for i in right:
res = max(res, n-i)
return res
if __name__ == "__main__":
pass |
thekeenant/api.madgrades.com | db/migrate/20180625194343_add_gpa_to_grade_distribution.rb | class AddGpaToGradeDistribution < ActiveRecord::Migration[5.1]
def change
add_column :grade_distributions, :gpa, :decimal, precision: 7, scale: 5
end
end
|
horcu/balln | sample/src/main/java/com/horcu/apps/balln/models/game/AwayTeam.java | <reponame>horcu/balln<filename>sample/src/main/java/com/horcu/apps/balln/models/game/AwayTeam.java
package com.horcu.apps.balln.models.game;
import android.os.Parcel;
import android.os.Parcelable;
import com.fasterxml.jackson.annotation.JsonProperty;
import ollie.Model;
import ollie.annotation.Column;
import ollie.annotation.PrimaryKey;
import ollie.annotation.Table;
public class AwayTeam extends Model implements Parcelable
{
public AwayTeam(){}
public Long id;
public String abbr;
public String name;
public String market;
public String logo;
public Long teamColorsId;
public Long offenseId;
public Long defenseId;
public Long specialTeamsId;
public final static Creator<AwayTeam> CREATOR = new Creator<AwayTeam>() {
public AwayTeam createFromParcel(Parcel in) {
AwayTeam instance = new AwayTeam();
instance.id = ((Long) in.readValue((Long.class.getClassLoader())));
instance.abbr = ((String) in.readValue((String.class.getClassLoader())));
instance.name = ((String) in.readValue((String.class.getClassLoader())));
instance.market = ((String) in.readValue((String.class.getClassLoader())));
instance.logo = ((String) in.readValue((String.class.getClassLoader())));
instance.teamColorsId = ((Long) in.readValue((Long.class.getClassLoader())));
instance.offenseId = ((Long) in.readValue((Long.class.getClassLoader())));
instance.defenseId = ((Long) in.readValue((Long.class.getClassLoader())));
instance.specialTeamsId = ((Long) in.readValue((Long.class.getClassLoader())));
return instance;
}
public AwayTeam[] newArray(int size) {
return (new AwayTeam[size]);
}
}
;
/**
*
* @return
* The id
*/
public Long getId() {
return id;
}
/**
*
* @param id
* The id
*/
public void setId(Long id) {
this.id = id;
}
/**
*
* @return
* The id
*/
public String getAbbr() {
return abbr;
}
/**
*
* The id
*/
public void setAbbr(String abbr) {
this.abbr = abbr;
}
/**
*
* @return
* The name
*/
public String getName() {
return name;
}
/**
*
* @param name
* The name
*/
public void setName(String name) {
this.name = name;
}
/**
*
* @return
* The market
*/
public String getMarket() {
return market;
}
/**
*
* @param market
* The market
*/
public void setMarket(String market) {
this.market = market;
}
/**
*
* @return
* The logo
*/
public String getLogo() {
return logo;
}
/**
*
* @param logo
* The logo
*/
public void setLogo(String logo) {
this.logo = logo;
}
/**
*
* team colors
*/
public Long getTeamColorsId() {
return teamColorsId;
}
/**
*
* team colors
*/
public void setTeamColorsId(Long teamColorsId) {
this.teamColorsId = teamColorsId;
}
/**
*
* @return
* The offenseId
*/
public Long getOffenseId() {
return offenseId;
}
/**
*
* @param offenseId
* The offenseId
*/
public void setOffenseId(Long offenseId) {
this.offenseId = offenseId;
}
/**
*
* @return
* The defenseId
*/
public Long getDefenseId() {
return defenseId;
}
/**
*
* @param defenseId
* The defenseId
*/
public void setDefenseId(Long defenseId) {
this.defenseId = defenseId;
}
/**
*
* @return
* The specialTeamsId
*/
public Long getSpecialTeamsId() {
return specialTeamsId;
}
/**
*
* @param specialTeamsId
* The special_teams
*/
public void setSpecialTeamsId(Long specialTeamsId) {
this.specialTeamsId = specialTeamsId;
}
public void writeToParcel(Parcel dest, int flags) {
dest.writeValue(id);
dest.writeValue(abbr);
dest.writeValue(name);
dest.writeValue(market);
dest.writeValue(logo);
dest.writeValue(teamColorsId);
dest.writeValue(offenseId);
dest.writeValue(defenseId);
dest.writeValue(specialTeamsId);
}
public int describeContents() {
return 0;
}
}
|
zreed/webpieces | webserver/http-templating/src/main/java/org/webpieces/templating/impl/tags/StyleSheetTag.java | <reponame>zreed/webpieces
package org.webpieces.templating.impl.tags;
import java.io.PrintWriter;
import java.util.Map;
import org.webpieces.templating.api.HtmlTag;
import org.webpieces.templating.api.RouterLookup;
import org.webpieces.templating.api.TemplateUtil;
import org.webpieces.templating.impl.GroovyTemplateSuperclass;
import groovy.lang.Closure;
public class StyleSheetTag implements HtmlTag {
private RouterLookup lookup;
public StyleSheetTag(RouterLookup lookup) {
this.lookup = lookup;
}
// <link rel="stylesheet" href="/assets/crud/css/theme.css?hash=ehehehehe" type="text/css" />
@Override
public void runTag(Map<Object, Object> args, Closure<?> body, PrintWriter out, GroovyTemplateSuperclass template, String srcLocation) {
Object url = args.get("href");
if(url == null)
throw new IllegalArgumentException("#{stylesheet/}# tag must contain an href argument name like #{stylesheet href:'/assets/crud/css/theme.css'}#. "+srcLocation);
else if(body != null)
throw new IllegalArgumentException("Only #{stylesheet/}# can be used. You cannot do #{stylesheet}# #{/stylesheet} as the body is not used with this tag"+srcLocation);
String rel = "stylesheet";
String type = "text/css";
Object maybeRel = args.get("rel");
if(maybeRel != null)
rel = maybeRel + "";
Object maybeType = args.get("type");
if(maybeType != null)
type = maybeType + "";
String hash = lookup.pathToUrlEncodedHash(url+"");
if(hash != null)
url = url + "?hash="+hash;
out.println("<link rel=\""+rel+"\" type=\""+type+"\" href=\""+url+"\" " + TemplateUtil.serialize(args, "href") + "/>");
}
@Override
public String getName() {
return "stylesheet";
}
}
|
jeremiedecock/snippets | python/pyqt/pyqt5/widget_QTreeView_multiple_columns_mail_insert_example.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# See https://pythonspot.com/en/pyqt5-treeview/
import sys
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QStandardItemModel
from PyQt5.QtWidgets import QApplication, QTreeView
class MyModel(QStandardItemModel):
FROM, SUBJECT, DATE = range(3)
def __init__(self):
super().__init__(0, 3, None)
# Define and set the Model
self.setHeaderData(self.FROM, Qt.Horizontal, "From")
self.setHeaderData(self.SUBJECT, Qt.Horizontal, "Subject")
self.setHeaderData(self.DATE, Qt.Horizontal, "Date")
def addMail(self, mail_from, subject, date):
self.insertRow(0)
self.setData(self.index(0, self.FROM), mail_from)
self.setData(self.index(0, self.SUBJECT), subject)
self.setData(self.index(0, self.DATE), date)
if __name__ == '__main__':
app = QApplication(sys.argv)
dataView = QTreeView()
dataView.setRootIsDecorated(False)
dataView.setAlternatingRowColors(True)
model = MyModel()
# Add data
model.addMail('<EMAIL>', 'Your Github Donation', '03/25/2017 02:05 PM')
model.addMail('<EMAIL>', 'Github Projects', '02/02/2017 03:05 PM')
model.addMail('<EMAIL>', 'Your Phone Bill', '01/01/2017 04:05 PM')
dataView.setModel(model)
dataView.show()
# The mainloop of the application. The event handling starts from this point.
# The exec_() method has an underscore. It is because the exec is a Python keyword. And thus, exec_() was used instead.
exit_code = app.exec_()
# The sys.exit() method ensures a clean exit.
# The environment will be informed, how the application ended.
sys.exit(exit_code)
|
FunctionLab/sleipnir | src/seekmap.cpp | /*****************************************************************************
* This file is provided under the Creative Commons Attribution 3.0 license.
*
* You are free to share, copy, distribute, transmit, or adapt this work
* PROVIDED THAT you attribute the work to the authors listed below.
* For more information, please see the following web page:
* http://creativecommons.org/licenses/by/3.0/
*
* This file is a part of SEEK (Search-based exploration of expression compendium)
* which is authored and maintained by: <NAME> (<EMAIL>)
*
* If you use this file, please cite the following publication:
* <NAME>, <NAME>, <NAME>, <NAME>, <NAME>,
* <NAME>, <NAME>, <NAME>, <NAME>,
* <NAME>, <NAME>, <NAME> & <NAME>
* "Targeted exploration and analysis of large cross-platform human
* transcriptomic compendia" Nat Methods (2015)
*
* This file is a component of the Sleipnir library for functional genomics,
* authored by:
* <NAME> (<EMAIL>)
* <NAME>
* <NAME>
* <NAME> (<EMAIL>, primary contact)
*
* If you use this library for development, or use any other Sleipnir executable
* tools, please also cite the following publication:
* <NAME>, <NAME>, <NAME>, and
* <NAME>.
* "The Sleipnir library for computational functional genomics"
*****************************************************************************/
#include "seekmap.h"
namespace Sleipnir {
/*
* IntIntMap Data Structure
*/
void CSeekIntIntMap::Initialize(const utype &iSize) {
m_iF.resize(iSize);
m_iR.resize(iSize);
m_iSize = iSize;
Clear();
}
CSeekIntIntMap::CSeekIntIntMap(const utype &iSize) {
Initialize(iSize);
}
const vector <utype> &CSeekIntIntMap::GetAllForward() const {
return m_iF;
}
const vector <utype> &CSeekIntIntMap::GetAllReverse() const {
return m_iR;
}
CSeekIntIntMap::CSeekIntIntMap(const vector<char> &cP, const bool bReverse) {
Initialize(cP.size());
Reset(cP, bReverse);
}
CSeekIntIntMap::CSeekIntIntMap(const char *cP, const utype &iSize,
const bool bReverse) {
Initialize(iSize);
Reset(cP, bReverse);
}
CSeekIntIntMap::CSeekIntIntMap(CSeekIntIntMap *a) {
m_iNumSet = a->m_iNumSet;
m_iSize = a->m_iSize;
m_iF.resize(a->m_iF.size());
m_iR.resize(a->m_iR.size());
copy(a->m_iF.begin(), a->m_iF.end(), m_iF.begin());
copy(a->m_iR.begin(), a->m_iR.end(), m_iR.begin());
m_iterR = m_iR.begin() + m_iNumSet;
}
CSeekIntIntMap::~CSeekIntIntMap() {
m_iF.clear();
m_iR.clear();
m_iNumSet = 0;
m_iSize = 0;
}
utype CSeekIntIntMap::GetForward(const utype &i) const {
return m_iF[i];
}
utype CSeekIntIntMap::GetReverse(const utype &i) const {
return m_iR[i];
}
void CSeekIntIntMap::Add(const utype &i) {
m_iF[i] = m_iNumSet;
*m_iterR = i;
m_iterR++;
m_iNumSet++;
}
void CSeekIntIntMap::Clear() {
vector<utype>::iterator iterF = m_iF.begin();
vector<utype>::iterator iterR = m_iR.begin();
for (; iterF != m_iF.end(); iterF++, iterR++) {
*iterF = -1;
*iterR = -1;
}
m_iNumSet = 0;
m_iterR = m_iR.begin();
}
utype CSeekIntIntMap::GetNumSet() const {
return m_iNumSet;
}
utype CSeekIntIntMap::GetSize() const {
return m_iSize;
}
void CSeekIntIntMap::Reset(const char *cP, const bool bReverse) {
utype i;
if (bReverse == false) {
for (i = 0; i < m_iSize; i++) {
if (cP[i] == 1) {
Add(i);
}
}
} else {
for (i = 0; i < m_iSize; i++) {
if (cP[i] == 0) {
Add(i);
}
}
}
}
void CSeekIntIntMap::Reset(const vector<char> &cP, const bool bReverse) {
utype i;
if (bReverse == false) {
for (i = 0; i < m_iSize; i++) {
if (cP[i] == 1) {
Add(i);
}
}
} else {
for (i = 0; i < m_iSize; i++) {
if (cP[i] == 0) {
Add(i);
}
}
}
}
/*
* StrIntMap Data Structure
*/
CSeekStrIntMap::CSeekStrIntMap() {
m_mapstrint.clear();
m_mapintstr.clear();
}
CSeekStrIntMap::~CSeekStrIntMap() {
m_mapstrint.clear();
m_mapintstr.clear();
}
void CSeekStrIntMap::Clear() {
m_mapstrint.clear();
m_mapintstr.clear();
}
void CSeekStrIntMap::SetAll(const vector <string> &s) {
Clear();
utype i = 0;
for (i = 0; i < s.size(); i++) {
m_mapstrint[s[i]] = i;
m_mapintstr[i] = s[i];
}
}
void CSeekStrIntMap::Set(const string &s, const utype &i) {
m_mapstrint[s] = i;
m_mapintstr[i] = s;
}
map <string, utype> &CSeekStrIntMap::GetMapForward() {
return m_mapstrint;
}
map <utype, string> &CSeekStrIntMap::GetMapReverse() {
return m_mapintstr;
}
utype CSeekStrIntMap::Get(const string &s) const {
map<string, utype>::const_iterator iter = m_mapstrint.find(s);
return iter->second;
}
string CSeekStrIntMap::Get(const utype &i) const {
map<utype, string>::const_iterator iter = m_mapintstr.find(i);
return iter->second;
}
utype CSeekStrIntMap::GetSize() const {
return m_mapintstr.size();
}
vector <string> CSeekStrIntMap::GetAllString() const {
vector <string> vecStr;
vecStr.clear();
vecStr.resize(GetSize());
map<string, utype>::const_iterator iter = m_mapstrint.begin();
vector<string>::iterator iterV = vecStr.begin();
for (; iter != m_mapstrint.end(); iter++, iterV++)
*iterV = iter->first;
return vecStr;
}
vector <utype> CSeekStrIntMap::GetAllInteger() const {
vector <utype> vecInt;
vecInt.clear();
vecInt.resize(GetSize());
map<utype, string>::const_iterator iter = m_mapintstr.begin();
vector<utype>::iterator iterV = vecInt.begin();
for (; iter != m_mapintstr.end(); iter++, iterV++)
*iterV = iter->first;
return vecInt;
}
}
|
lixiupei/rssant | rssant_api/models/union_story.py | <filename>rssant_api/models/union_story.py
from django.utils import timezone
from django.db import transaction
from cached_property import cached_property
from rssant_feedlib.processor import story_html_to_text
from rssant_common.validator import StoryUnionId, FeedUnionId
from rssant_common.detail import Detail
from .feed import UserFeed
from .story import Story, UserStory, StoryDetailSchema, USER_STORY_DETAIL_FEILDS
from .errors import FeedNotFoundError, StoryNotFoundError
def convert_summary(summary):
return story_html_to_text(summary)
class UnionStory:
_STORY_FIELD_NAMES = None
@classmethod
def _story_field_names(cls):
if cls._STORY_FIELD_NAMES is None:
names = set()
for field in Story._meta.get_fields():
column = getattr(field, 'column', None)
if column:
names.add(column)
cls._STORY_FIELD_NAMES = list(sorted(names))
return cls._STORY_FIELD_NAMES
def __init__(self, story, *, user_id, user_feed_id, user_story=None, detail=False):
self._story = story
self._user_id = user_id
self._user_feed_id = user_feed_id
self._user_story = user_story
self._detail = detail
@cached_property
def id(self):
return StoryUnionId(self._user_id, self._story.feed_id, self._story.offset)
@property
def user_id(self):
return self._user_id
@cached_property
def feed_id(self):
return FeedUnionId(self._user_id, self._story.feed_id)
@property
def offset(self):
return self._story.offset
@property
def unique_id(self):
return self._story.unique_id
@property
def title(self):
return self._story.title
@property
def link(self):
return self._story.link
@property
def author(self):
return self._story.author
@property
def image_url(self):
return self._story.image_url
@property
def iframe_url(self):
return self._story.iframe_url
@property
def audio_url(self):
return self._story.audio_url
@property
def has_mathjax(self):
return self._story.has_mathjax
@property
def dt_published(self):
return self._story.dt_published
@property
def dt_updated(self):
return self._story.dt_updated
@property
def dt_created(self):
return self._story.dt_created
@property
def dt_synced(self):
return self._story.dt_synced
@property
def is_watched(self):
if not self._user_story:
return False
return self._user_story.is_watched
@property
def dt_watched(self):
if not self._user_story:
return None
return self._user_story.dt_watched
@property
def is_favorited(self):
if not self._user_story:
return False
return self._user_story.is_favorited
@property
def dt_favorited(self):
if not self._user_story:
return None
return self._user_story.dt_favorited
@property
def content_hash_base64(self):
return self._story.content_hash_base64
@cached_property
def summary(self):
return convert_summary(self._story.summary)
@property
def content(self):
return self._story.content
def to_dict(self):
ret = dict(
id=self.id,
user=dict(id=self.user_id),
feed=dict(id=self.feed_id),
offset=self.offset,
title=self.title,
link=self.link,
has_mathjax=self.has_mathjax,
is_watched=self.is_watched,
is_favorited=self.is_favorited,
)
detail = Detail.from_schema(self._detail, StoryDetailSchema)
for k in detail.include_fields:
ret[k] = getattr(self, k)
return ret
@staticmethod
def _check_user_feed_by_story_unionid(story_unionid):
user_id, feed_id, offset = story_unionid
q = UserFeed.objects.only('id').filter(user_id=user_id, feed_id=feed_id)
try:
user_feed = q.get()
except UserFeed.DoesNotExist:
raise StoryNotFoundError()
return user_feed.id
@staticmethod
def get_by_id(story_unionid, detail=False):
user_feed_id = UnionStory._check_user_feed_by_story_unionid(story_unionid)
user_id, feed_id, offset = story_unionid
q = UserStory.objects.select_related('story')
q = q.filter(user_id=user_id, feed_id=feed_id, offset=offset)
if not detail:
q = q.defer(*USER_STORY_DETAIL_FEILDS)
try:
user_story = q.get()
except UserStory.DoesNotExist:
user_story = None
try:
story = Story.get_by_offset(feed_id, offset, detail=detail)
except Story.DoesNotExist:
raise StoryNotFoundError()
else:
story = user_story.story
return UnionStory(
story,
user_id=user_id,
user_feed_id=user_feed_id,
user_story=user_story,
detail=detail
)
@staticmethod
def get_by_feed_offset(feed_unionid, offset, detail=False):
story_unionid = StoryUnionId(*feed_unionid, offset)
return UnionStory.get_by_id(story_unionid, detail=detail)
@staticmethod
def _merge_storys(storys, user_storys, *, user_id, user_feeds=None, detail=False):
user_storys_map = {x.story_id: x for x in user_storys}
if user_feeds:
user_feeds_map = {x.feed_id: x.id for x in user_feeds}
else:
user_feeds_map = {x.feed_id: x.user_feed_id for x in user_storys}
ret = []
for story in storys:
user_story = user_storys_map.get(story.id)
user_feed_id = user_feeds_map.get(story.feed_id)
ret.append(UnionStory(
story,
user_id=user_id,
user_feed_id=user_feed_id,
user_story=user_story,
detail=detail
))
return ret
@staticmethod
def query_by_feed(feed_unionid, offset=None, size=10, detail=False):
user_id, feed_id = feed_unionid
q = UserFeed.objects.select_related('feed')\
.filter(user_id=user_id, feed_id=feed_id)\
.only('id', 'story_offset', 'feed_id', 'feed__id', 'feed__total_storys')
try:
user_feed = q.get()
except UserFeed.DoesNotExist as ex:
raise FeedNotFoundError() from ex
total = user_feed.feed.total_storys
if offset is None:
offset = user_feed.story_offset
q = Story.objects.filter(feed_id=feed_id, offset__gte=offset)
detail = Detail.from_schema(detail, StoryDetailSchema)
q = q.defer(*detail.exclude_fields)
q = q.order_by('offset')[:size]
storys = list(q.all())
story_ids = [x.id for x in storys]
q = UserStory.objects.filter(user_id=user_id, feed_id=feed_id, story_id__in=story_ids)
q = q.exclude(is_favorited=False, is_watched=False)
user_storys = list(q.all())
ret = UnionStory._merge_storys(
storys, user_storys, user_feeds=[user_feed], user_id=user_id, detail=detail)
return total, offset, ret
@classmethod
def query_recent_by_user(cls, user_id, feed_unionids=None, days=14, limit=300, detail=False):
"""
Deprecated since 1.4.2, use batch_get_by_feed_offset instead
"""
if (not feed_unionids) and feed_unionids is not None:
return [] # when feed_unionids is empty list, return empty list
if feed_unionids:
feed_ids = [x.feed_id for x in feed_unionids]
feed_ids = cls._query_user_feed_ids(user_id, feed_ids)
else:
feed_ids = cls._query_user_feed_ids(user_id)
dt_begin = timezone.now() - timezone.timedelta(days=days)
q = Story.objects.filter(feed_id__in=feed_ids)\
.filter(dt_published__gte=dt_begin)
detail = Detail.from_schema(detail, StoryDetailSchema)
q = q.defer(*detail.exclude_fields)
q = q.order_by('-dt_published')[:limit]
storys = list(q.all())
union_storys = cls._query_union_storys(
user_id=user_id, storys=storys, detail=detail)
return union_storys
@classmethod
def _query_user_feed_ids(cls, user_id, feed_ids=None):
q = UserFeed.objects.only('id', 'feed_id')
if feed_ids is None:
q = q.filter(user_id=user_id)
else:
q = q.filter(user_id=user_id, feed_id__in=feed_ids)
user_feeds = list(q.all())
feed_ids = [x.feed_id for x in user_feeds]
return feed_ids
@classmethod
def _query_union_storys(cls, user_id, storys, detail):
story_ids = [x.id for x in storys]
feed_ids = list(set([x.feed_id for x in storys]))
q = UserStory.objects.filter(
user_id=user_id, feed_id__in=feed_ids, story_id__in=story_ids)
q = q.exclude(is_favorited=False, is_watched=False)
user_storys = list(q.all())
union_storys = UnionStory._merge_storys(
storys, user_storys, user_id=user_id, detail=detail)
return union_storys
@classmethod
def _validate_story_keys(cls, user_id, story_keys):
if not story_keys:
return []
# verify feed_id is subscribed by user
feed_ids = list(set(x[0] for x in story_keys))
feed_ids = set(cls._query_user_feed_ids(user_id, feed_ids))
verified_story_keys = []
for feed_id, offset in story_keys:
if feed_id in feed_ids:
verified_story_keys.append((feed_id, offset))
return verified_story_keys
@classmethod
def batch_get_by_feed_offset(cls, user_id, story_keys, detail=False):
"""
story_keys: List[Tuple[feed_id, offset]]
"""
story_keys = cls._validate_story_keys(user_id, story_keys)
if not story_keys:
return []
detail = Detail.from_schema(detail, StoryDetailSchema)
select_fields = set(cls._story_field_names()) - set(detail.exclude_fields)
select_fields_quoted = ','.join(['"{}"'.format(x) for x in select_fields])
# Note: below query can not use index, it's very slow
# WHERE ("feed_id","offset")=Any(%s)
# WHERE ("feed_id","offset")=Any(ARRAY[(XX, YY), ...])
where_items = []
for feed_id, offset in story_keys:
# ensure integer, avoid sql inject attack
feed_id, offset = int(feed_id), int(offset)
where_items.append(f'("feed_id"={feed_id} AND "offset"={offset})')
where_clause = ' OR '.join(where_items)
sql = f"""
SELECT {select_fields_quoted}
FROM rssant_api_story
WHERE {where_clause}
"""
storys = list(Story.objects.raw(sql))
union_storys = cls._query_union_storys(
user_id=user_id, storys=storys, detail=detail)
return union_storys
@staticmethod
def _query_by_tag(user_id, is_favorited=None, is_watched=None, detail=False):
q = UserStory.objects.select_related('story').filter(user_id=user_id)
detail = Detail.from_schema(detail, StoryDetailSchema)
exclude_fields = [f'story__{x}' for x in detail.exclude_fields]
q = q.defer(*exclude_fields)
if is_favorited is not None:
q = q.filter(is_favorited=is_favorited)
if is_watched is not None:
q = q.filter(is_watched=is_watched)
user_storys = list(q.all())
storys = [x.story for x in user_storys]
union_storys = UnionStory._merge_storys(storys, user_storys, user_id=user_id, detail=detail)
return union_storys
@staticmethod
def query_favorited(user_id, detail=False):
return UnionStory._query_by_tag(user_id, is_favorited=True, detail=detail)
@staticmethod
def query_watched(user_id, detail=False):
return UnionStory._query_by_tag(user_id, is_watched=True, detail=detail)
@staticmethod
def _set_tag_by_id(story_unionid, is_favorited=None, is_watched=None):
union_story = UnionStory.get_by_id(story_unionid)
user_feed_id = union_story._user_feed_id
user_story = union_story._user_story
with transaction.atomic():
if user_story is None:
user_id, feed_id, offset = story_unionid
user_story = UserStory(
user_id=user_id,
feed_id=feed_id,
user_feed_id=user_feed_id,
story_id=union_story._story.id,
offset=union_story._story.offset
)
if is_favorited is not None:
user_story.is_favorited = is_favorited
user_story.dt_favorited = timezone.now()
if is_watched is not None:
user_story.is_watched = is_watched
user_story.dt_watched = timezone.now()
user_story.save()
if is_favorited or is_watched:
union_story._story.is_user_marked = True
union_story._story.save()
union_story._user_story = user_story
return union_story
@staticmethod
def set_favorited_by_id(story_unionid, is_favorited):
return UnionStory._set_tag_by_id(story_unionid, is_favorited=is_favorited)
@staticmethod
def set_watched_by_id(story_unionid, is_watched):
return UnionStory._set_tag_by_id(story_unionid, is_watched=is_watched)
@staticmethod
def set_favorited_by_feed_offset(feed_unionid, offset, is_favorited):
story_unionid = StoryUnionId(*feed_unionid, offset)
return UnionStory.set_favorited_by_id(story_unionid, is_favorited=is_favorited)
@staticmethod
def set_watched_by_feed_offset(feed_unionid, offset, is_watched):
story_unionid = StoryUnionId(*feed_unionid, offset)
return UnionStory.set_watched_by_id(story_unionid, is_watched=is_watched)
|
AndreiADASKA/andreiadaska.github.io | ita/triangle/src/main.js | import { getApplicationDiv } from "./lib.js";
import { getHelpText, validateTriangle } from "./triangles/validation.js";
import { isTriangle } from "./triangles/geometry.js";
import { getAnswerPhrase } from "./triangles/output.js";
let sides;
let lowerBound;
let upperBound;
function renderErrorScreen(messages, helpText) {
const fragment = new DocumentFragment();
const errorListElement = document.createElement("ul");
messages.forEach((message) => {
const messageItem = document.createElement("li");
messageItem.className = "message";
messageItem.innerHTML = message;
errorListElement.append(messageItem);
});
const helpTextElement = document.createElement("p");
helpTextElement.className = "helptext";
helpTextElement.innerHTML = helpText;
fragment.append(errorListElement);
fragment.append(helpTextElement);
return fragment;
}
function renderAnswerScreen(answerPhrase, answer) {
const answerElement = document.createElement("p");
answerElement.innerHTML = answerPhrase;
answerElement.className = answer ? "good" : " nogood";
return answerElement;
}
function render(appRoot) {
const validationResult = validateTriangle(sides, lowerBound, upperBound);
if (!validationResult.isValid) {
appRoot.append(
renderErrorScreen(validationResult.errorMessages, getHelpText(lowerBound, upperBound))
);
return;
}
const answer = isTriangle(sides);
appRoot.append(renderAnswerScreen(getAnswerPhrase(sides, answer), answer));
}
function main() {
const appDiv = getApplicationDiv("#app");
if (appDiv !== null) {
render(appDiv);
} else {
console.log("App div not found");
}
}
document.querySelector(".btn").onclick = () => {
sides = [
{ sideName: "a", sideLength: parseInt(document.querySelector(".side_a").value) },
{ sideName: "b", sideLength: parseInt(document.querySelector(".side_b").value) },
{ sideName: "c", sideLength: parseInt(document.querySelector(".side_c").value) },
];
lowerBound = +document.querySelector(".lower_num").value;
upperBound = +document.querySelector(".upper_num").value;
main();
};
export { main };
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.