repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
joeharrison91/service-manual-publisher | app/forms/guide_form.rb | class GuideForm < BaseGuideForm
attr_accessor :topic_section_id
def slug_prefix
"/service-manual"
end
private
def load_custom_attributes
self.topic_section_id = topic_section.try(:id)
end
def set_custom_attributes
if topic_section_id.present?
topic_section_guide.topic_section_id = topic_section_id
end
end
def topic_section_guide
@_topic_section_guide ||=
guide.topic_section_guides[0] || guide.topic_section_guides.build
end
def topic_section
TopicSection
.joins(:topic_section_guides)
.find_by('topic_section_guides.guide_id = ?', guide.id)
end
end
|
amolofos/kata | Problems/RemoveOneElementToMakeTheArrayStrictlyIncreasing/java/src/test/java/com/dkafetzi/kata/SolutionTest.java | package com.dkafetzi.kata;
import static org.junit.Assert.assertEquals;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
class SolutionTest {
private final static Logger LOGGER = LoggerFactory.getLogger(SolutionTest.class);
@Test
@DisplayName("Simple test 1")
void testSolution1(TestInfo testInfo) {
LOGGER.debug("Simple test 1");
/*
* Example 1:
* Input: nums = [1,2,10,5,7]
* Output: true
*/
int[] input = {1, 2, 3, 4, 5, 6, 7};
boolean expectedOutput = true;
boolean output;
Solution solution = new Solution();
output = solution.canBeIncreasing(input);
LOGGER.info("O(n): Input: [{}], Expected output: {}, Output: {}.", input, expectedOutput, output);
assertEquals(expectedOutput, output);
}
@Test
@DisplayName("Simple test 2")
void testSolution2(TestInfo testInfo) {
LOGGER.debug("Simple test 2");
/*
* Example 2:
* Input: nums = [2,3,1,2]
* Output: false
*/
int[] input = {2, 3, 1, 2};
boolean expectedOutput = false;
boolean output;
Solution solution = new Solution();
output = solution.canBeIncreasing(input);
LOGGER.info("O(n): Input: [{}], Expected output: {}, Output: {}.", input, expectedOutput, output);
assertEquals(expectedOutput, output);
}
@Test
@DisplayName("Simple test 3")
void testSolution3(TestInfo testInfo) {
LOGGER.debug("Simple test 3");
/*
* Example 3:
* Input: nums = [1, 1, 1]
* Output: false
*/
int[] input = {1, 1, 1};
boolean expectedOutput = false;
boolean output;
Solution solution = new Solution();
output = solution.canBeIncreasing(input);
LOGGER.info("O(n): Input: [{}], Expected output: {}, Output: {}.", input, expectedOutput, output);
assertEquals(expectedOutput, output);
}
@Test
@DisplayName("Simple test 4")
void testSolution4(TestInfo testInfo) {
LOGGER.debug("Simple test 4");
/*
* Example 4:
* Input: nums = [1, 2, 3]
* Output: true
*/
int[] input = {1, 2, 3};
boolean expectedOutput = true;
boolean output;
Solution solution = new Solution();
output = solution.canBeIncreasing(input);
LOGGER.info("O(n): Input: [{}], Expected output: {}, Output: {}.", input, expectedOutput, output);
assertEquals(expectedOutput, output);
}
} |
suewonjp/civilizer | src/main/java/com/civilizer/web/view/TagBean.java | package com.civilizer.web.view;
import java.io.Serializable;
import com.civilizer.domain.Tag;
@SuppressWarnings("serial")
public final class TagBean implements Serializable {
private Tag tag;
// number of fragments associated with this tag only
private long fragmentCount = 0;
// number of fragments associated with this tag and its descendants
private long fragmentCountWtHrc = 0;
public void clear() {
if (tag != null) {
tag.setId(null);
tag.setTagName("");
}
}
public Tag getTag() {
return tag;
}
public void setTag(Tag tag) {
this.tag = tag;
}
public long getFragmentCount() {
return fragmentCount;
}
public void setFragmentCount(long fragmentCount) {
this.fragmentCount = fragmentCount;
}
public long getFragmentCountWtHrc() {
return fragmentCountWtHrc;
}
public void setFragmentCountWtHrc(long count) {
this.fragmentCountWtHrc = count;
}
public String typeName() {
return Tag.isTrivialTag(tag.getId()) ? "trivial-tag" : "special-tag";
}
}
|
fosonmeng/virtual-touch | src/touchaction/constructor.js | <gh_stars>0
import {
TOUCH_ACTION_COMPUTE,
TOUCH_ACTION_NONE,
TOUCH_ACTION_PAN_X,
TOUCH_ACTION_PAN_Y,
} from './consts';
import {
DIRECTION_VERTICAL,
DIRECTION_HORIZONTAL,
} from '../input/consts';
import each from '../utils/each';
import valOrFunc from '../utils/val-or-func';
import inStr from '../utils/in-str';
import cleanTouchActions from './clean-touch-actions.js';
export default class TouchAction {
constructor(manager, value) {
this.manager = manager;
this.set(value);
}
set(value) {
if (value === TOUCH_ACTION_COMPUTE) {
value = this.compute();
}
this.actions = value.toLowerCase().trim();
}
update() {
this.set(this.manager.options.touchAction);
}
compute() {
let actions = [];
each(this.manager.recognizers, (recognizer) => {
if (valOrFunc(recognizer.options.enable, [recognizer])) {
Array.prototype.push.apply(actions, recognizer.getTouchAction());
}
});
return cleanTouchActions(actions.join(' '));
}
preventDefaults(input) {
let { srcEvent } = input;
let direction = input.offsetDirection;
if (this.manager.session.prevented) {
srcEvent.preventDefault();
return;
}
let { actions } = this;
let hasNone = inStr(actions, TOUCH_ACTION_NONE);
let hasPanY = inStr(actions, TOUCH_ACTION_PAN_Y);
let hasPanX = inStr(actions, TOUCH_ACTION_PAN_X);
if (hasNone) {
let isTapPointer = input.pointers.length === 1;
let isTapMovement = input.distance < 2;
let isTapTouchTime = input.deltaTime < 250;
if (isTapPointer && isTapMovement && isTapTouchTime) {
return;
}
}
if (hasPanX && hasPanY) {
return;
}
if (hasNone || (
(hasPanX && direction & DIRECTION_HORIZONTAL) ||
(hasPanY && direction & DIRECTION_VERTICAL)
)) {
return this.preventSrc(srcEvent);
}
}
preventSrc(srcEvent) {
this.manager.session.prevented = true;
srcEvent.preventDefault();
}
}
|
upperlevel/quakecraft | src/main/java/xyz/upperlevel/quakecraft/events/KillStreakReachEvent.java | <filename>src/main/java/xyz/upperlevel/quakecraft/events/KillStreakReachEvent.java<gh_stars>1-10
package xyz.upperlevel.quakecraft.events;
import lombok.Getter;
import lombok.Setter;
import org.bukkit.event.Cancellable;
import org.bukkit.event.Event;
import org.bukkit.event.HandlerList;
import xyz.upperlevel.quakecraft.phases.game.GamePhase;
import xyz.upperlevel.quakecraft.phases.game.Gamer;
import xyz.upperlevel.quakecraft.phases.game.KillStreak;
import xyz.upperlevel.uppercore.placeholder.message.Message;
@Getter
@Setter
public class KillStreakReachEvent extends Event implements Cancellable {
private static final HandlerList handlers = new HandlerList();
private final GamePhase phase;
private final Gamer player;
private final KillStreak streak;
private Message message;
private boolean cancelled;
public KillStreakReachEvent(GamePhase phase, Gamer player, KillStreak streak, Message message) {
this.phase = phase;
this.player = player;
this.streak = streak;
this.message = message;
}
@Override
public HandlerList getHandlers() {
return handlers;
}
public static HandlerList getHandlerList() {
return handlers;
}
}
|
crazymaza/job4j | chapter_002/src/main/java/ru/job4j/tracker/BaseAction.java | <filename>chapter_002/src/main/java/ru/job4j/tracker/BaseAction.java
package ru.job4j.tracker;
abstract class BaseAction implements UserAction {
private final int numberOfMenu;
private final String name;
public BaseAction(int numberOfMenu, String name) {
this.numberOfMenu = numberOfMenu;
this.name = name;
}
public int key() {
return this.numberOfMenu;
}
public String info() {
return String.format("%s. %s", this.numberOfMenu, this.name);
}
}
|
mjung85/iotsys | iotsys-enocean-library/test/org/opencean/core/utils/BitsTest.java | <gh_stars>10-100
package org.opencean.core.utils;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import org.opencean.core.utils.Bits;
public class BitsTest {
@Test
public void getBitFirstBit() {
byte b = 1;
assertTrue(Bits.isBitSet(b, 0));
}
@Test
public void getBitSecondBit() {
byte b = 2;
assertTrue(Bits.isBitSet(b, 1));
}
@Test
public void getBitFirstBitFalse() {
byte b = 2;
assertFalse(Bits.isBitSet(b, 0));
}
@Test
public void getBitSecondBitFalse() {
byte b = 1;
assertFalse(Bits.isBitSet(b, 1));
}
@Test
public void setBitFirstBit() {
byte b = 0;
assertEquals(1, Bits.setBit(b, 0, true));
}
@Test
public void setBitFirstBitAlreadySet() {
byte b = 1;
assertEquals(1, Bits.setBit(b, 0, true));
}
@Test
public void setBitSecondBit() {
byte b = 0;
assertEquals(2, Bits.setBit(b, 1, true));
}
@Test
public void setBitSecondBitFalse() {
byte b = 3;
assertEquals(1, Bits.setBit(b, 1, false));
}
}
|
jnthn/intellij-community | java/java-tests/testData/codeInsight/daemonCodeAnalyzer/quickFix/extractStreamMap/afterGeneric.java | // "Extract variable 'set' to 'map' operation" "true"
import java.util.*;
import java.util.stream.*;
public class Test {
void testMap(List<Map<String, String>> list) {
list.stream().map(Map::keySet).flatMap(set -> set.stream()).forEach(System.out::println);
}
} |
Shelvak/monitor | app/controllers/taggings_controller.rb | <reponame>Shelvak/monitor<filename>app/controllers/taggings_controller.rb
class TaggingsController < ApplicationController
respond_to :js, :json
before_action :authorize, :set_issue
before_action :set_tagging, only: [:show, :destroy]
before_action :set_title, except: [:destroy]
def new
@tagging = @issue.taggings.new
respond_with @tagging
end
def create
@tagging = @new_tagging = @issue.taggings.new tagging_params
@tagging = current_user.taggings.new if @tagging.save
respond_with @tagging
end
def destroy
@tagging.destroy
respond_with @tagging
end
private
def set_issue
@issue = issues.find params[:issue_id]
end
def set_tagging
@tagging = @issue.taggings.find params[:id]
end
def tagging_params
params.require(:tagging).permit :tag_id
end
def issues
current_user.supervisor? ? Issue.all : current_user.issues
end
end
|
aplocon/sis | core/sis-metadata/src/main/java/org/apache/sis/internal/xml/LegacyNamespaces.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.sis.internal.xml;
import org.apache.sis.util.Version;
/**
* Legacy XML namespaces, and {@link Version} constants for identifying when those namespaces were used.
*
* @author <NAME> (Geomatys)
* @author <NAME> (Image Matters)
* @version 1.0
*
* @see org.apache.sis.xml.Namespaces
*
* @since 0.4
* @module
*/
public final class LegacyNamespaces {
/**
* Miscellaneous version constants used for ISO standards.
*/
public static final Version VERSION_2007 = new Version("2007"),
VERSION_2014 = new Version("2014"),
VERSION_2016 = new Version("2016");
/**
* Miscellaneous version constants used for GML versions.
*/
public static final Version VERSION_3_0 = new Version("3.0"),
VERSION_3_2 = new Version("3.2");
/**
* First GML version of the new {@code xmlns}.
* GML 3.2.0 schemas are defined in the namespace {@code http://www.opengis.net/gml} whereas
* GML 3.2.1 schemas are defined in the namespace {@code http://www.opengis.net/gml/3.2}.
*/
public static final Version VERSION_3_2_1 = new Version("3.2.1");
/**
* The {@value} URL, which was used for all GML versions before 3.2.
* This URL should not be used in JAXB annotations, even if the annotated element is really for that
* legacy GML version. Instead, namespace replacements are applied on-the-fly at marshalling time.
*/
public static final String GML = "http://www.opengis.net/gml";
/**
* The <code>{@value}</code> URL, used in ISO 19139:2007.
* The usual prefix for this namespace is {@code "gmd"}.
*/
public static final String GMD = "http://www.isotc211.org/2005/gmd";
/**
* The <code>{@value}</code> URL as an alias for {@link #GMI}.
* Was used in some XML files before a more official URL was set.
*/
public static final String GMI_ALIAS = "http://www.isotc211.org/2005/gmi";
/**
* The <code>{@value}</code> URL.
* The usual prefix for this namespace is {@code "gmi"}.
*/
public static final String GMI = "http://standards.iso.org/iso/19115/-2/gmi/1.0";
/**
* The <code>{@value}</code> URL, used in ISO 19139:2007.
* The usual prefix for this namespace is {@code "gmx"}.
*/
public static final String GMX = "http://www.isotc211.org/2005/gmx";
/**
* The <code>{@value}</code> URL, used in ISO 19139:2007.
* The usual prefix for this namespace is {@code "gco"}.
* Replaced by {@link org.apache.sis.xml.Namespaces#GCO}.
*/
public static final String GCO = "http://www.isotc211.org/2005/gco";
/**
* The <code>{@value}</code> URL, used in ISO 19139:2007.
* The usual prefix for this namespace is {@code "srv"}.
* Replaced by {@link org.apache.sis.xml.Namespaces#SRV}.
*/
public static final String SRV = "http://www.isotc211.org/2005/srv";
/**
* The <code>{@value}</code> URL, used in ISO 19110.
* The usual prefix for this namespace is {@code "gfc"}.
* Replaced by {@link org.apache.sis.xml.Namespaces#GFC}.
*/
public static final String GFC = "http://www.isotc211.org/2005/gfc";
/**
* The <code>{@value}</code> URL, used in ISO 19139:2007.
* The usual prefix for this namespace is {@code "gts"}.
*/
public static final String GTS = "http://www.isotc211.org/2005/gts";
/**
* Do not allow instantiation of this class.
*/
private LegacyNamespaces() {
}
}
|
agrc/wfrc | cypress/integration/map_widget_spec.js | describe('map-widget', () => {
it('button toggles pane visibility', () => {
cy.loadApp();
cy.findByText(/filter/i).should('be.visible');
cy.get('[title="Filter"] > .svg-inline--fa').click();
cy.findByText(/filter/i).should('not.be.visible');
cy.get('[title="Filter"] > .svg-inline--fa').click();
cy.findByText(/filter/i).should('be.visible');
});
it('close button closes pane', () => {
cy.loadApp();
cy.findByText(/filter/i).should('be.visible');
cy.get('.map-widget-card > .card-header > .buttons-container > .close > span').click({ multiple: true });
cy.findByText(/filter/i).should('not.be.visible');
cy.findByText(/project information/i).should('not.be.visible');
});
});
|
Xamaneone/Python-OOP | defining_classes _exercise/todo_list/project/test.py | from task import Task
from section import Section
import unittest
class Test(unittest.TestCase):
def test_task_init(self):
task = Task("Tst", "27.04.2020")
message = f"{task.name} - {task.due_date}"
expected = "Tst - 27.04.2020"
self.assertEqual(message, expected)
def test_change_name_working(self):
task = Task("Tst", "27.04.2020")
task.change_name("New name")
message = task.name
expected = "New name"
self.assertEqual(message, expected)
def test_change_name_same_name(self):
task = Task("Tst", "27.04.2020")
message = task.change_name("Tst")
expected = "Name cannot be the same."
self.assertEqual(message, expected)
def test_change_due_date_working(self):
task = Task("Tst", "27.04.2020")
task.change_due_date("21.05.2020")
message = task.due_date
expected = "21.05.2020"
self.assertEqual(message, expected)
def test_edit_comment_working(self):
task = Task("Tst", "27.04.2020")
task.add_comment("pay the bills")
message = task.edit_comment(0, "finish my homework")
expected = "finish my homework"
self.assertEqual(message, expected)
def test_edit_comment_not_found(self):
task = Task("Tst", "27.04.2020")
task.add_comment("pay the bills")
message = task.edit_comment(1, "finish my homework")
expected = "Cannot find comment."
self.assertEqual(message, expected)
def test_section_init(self):
section = Section("New section")
message = f"{section.name} - {len(section.tasks)}"
expected = "New section - 0"
self.assertEqual(message, expected)
def test_add_task(self):
section = Section("New section")
task = Task("Tst", "27.04.2020")
message = section.add_task(task)
expected = "Task Name: Tst - Due Date: 27.04.2020 is added to the section"
self.assertEqual(message, expected)
def test_add_task_already_added(self):
section = Section("New section")
task = Task("Tst", "27.04.2020")
section.add_task(task)
message = section.add_task(task)
expected = "Task is already in the section New section"
self.assertEqual(message, expected)
def test_complete_task(self):
section = Section("New section")
task = Task("Tst", "27.04.2020")
section.add_task(task)
section.complete_task("Tst")
message = task.completed
self.assertTrue(message)
def test_complete_task_message(self):
section = Section("New section")
task = Task("Tst", "27.04.2020")
section.add_task(task)
message = section.complete_task("Tst")
expected = "Completed task Tst"
self.assertEqual(message, expected)
def test_complete_not_found(self):
section = Section("New section")
message = section.complete_task("Tst")
expected = "Could not find task with the name Tst"
self.assertEqual(message, expected)
def test_clean_section(self):
section = Section("New section")
task = Task("Tst", "27.04.2020")
section.add_task(task)
section.complete_task("Tst")
message = section.clean_section()
expected = "Cleared 1 tasks."
self.assertEqual(message, expected)
def test_view_section(self):
section = Section("New section")
message = section.view_section()
expected = "Section New section:\n"
self.assertEqual(message, expected)
if __name__ == '__main__':
unittest.main() |
Alekssasho/sge_source | libs/sge_engine/src/sge_engine/traits/TraitPath.h | #pragma once
#include "sge_engine/Actor.h"
#include "sge_utils/utils/optional.h"
namespace sge {
struct TraitPath3D;
enum BounceType {
bounceType_bounce,
bouceType_reset,
bounceType_stop,
bounceType_onForwardOffBackwards,
bounceType_idle,
};
float computePathLength(const std::vector<vec3f>& path);
vec3f samplePathLazy(const std::vector<vec3f>& path, float distance);
//-----------------------------------------------------------------------
//
//-----------------------------------------------------------------------
namespace PathLengthFollow {
struct SGE_ENGINE_API Settings {
bool isReversed =
false; // true if the movement starts form the end of the path. (the includes the distance progress along the path).
float speed = 1.f;
float restingTime = 0.f;
BounceType bounceType = bounceType_bounce; // what should happen when the end of the path is reached.
};
struct SGE_ENGINE_API State {
float timeLeftToRest = 0.f; // The time that the follower need to spend resting.
float distanceAlongPath =
0.f; // The distance travelled in the paths object space. the value is maintained the same if reversed is specified to the
// settings, the only thing that is changed is that the starting point is now the end point.
float evalDistanceAlongPath = 0.f; // The actual distance used for evaluation along the path.
bool speedIsPositive = true;
bool isResting() const { return timeLeftToRest > 0.f; }
};
/// @param recursionDepth Also leave default value. Used to prevent endless recursion if there is a mistake inside the function.
SGE_ENGINE_API State compute(const float pathLength,
const float dt,
GameWorld* const world,
const Settings& settings,
const State& prevState,
const int recursionDepth = 0);
} // namespace PathLengthFollow
//-----------------------------------------------------------------------
//
//-----------------------------------------------------------------------
namespace PathFollow {
struct SGE_ENGINE_API Settings {
ObjectId pathId;
bool isReversed =
false; // true if the movement starts form the end of the path. (the includes the distance progress along the path).
float speed = 4.f;
float restingTime = 0.5f;
BounceType bounceType = bounceType_bounce; // what should happen when the end of the path is reached.
};
struct SGE_ENGINE_API State {
float timeLeftToRest = 0.f; // The time that the follower need to spend resting.
float distanceAlongPath =
0.f; // The distance travelled in the paths object space. the value is maintained the same if reversed is specified to the
// settings, the only thing that is changed is that the starting point is now the end point.
float evalDistanceAlongPath = 0.f; // The actual distance used for evaluation along the path.
bool speedIsPositive = true;
vec3f ptWs = vec3f(0.f);
bool isResting() const { return timeLeftToRest > 0.f; }
};
/// @param recursionDepth Also leave default value. Used to prevent endless recursion if there is a mistake inside the function.
SGE_ENGINE_API Optional<State> compute(const float dt, GameWorld* const world, const Settings& settings, const State& prevState);
}; // namespace PathFollow
//-----------------------------------------------------------------------
//
//-----------------------------------------------------------------------
DefineTypeIdInline(TraitPath3D, 20'03'06'0000);
struct SGE_ENGINE_API TraitPath3D : public Trait {
SGE_TraitDecl_Base(TraitPath3D);
// Checks if the curve is empty.
virtual bool isEmpty() const = 0;
// Evaluates the the curve at the specified distance form the begining.
virtual bool evaluateAtDistance(vec3f* outPosition, vec3f* outTanget, float const distance) = 0;
// Retrieves the length, or an approximation of it.
virtual float getTotalLength() = 0;
};
} // namespace sge
|
pichsy/xbaseutils | utils/src/main/java/com/pichs/common/utils/utils/SPHelper.java | <reponame>pichsy/xbaseutils
package com.pichs.common.utils.utils;
import android.content.Context;
import com.pichs.common.utils.BaseSPHelper;
public class SPHelper extends BaseSPHelper {
private final static String spName = "xp_base_sp_helper_info";
private static SPHelper INSTANCE;
protected SPHelper(Context context) {
super(context);
}
public static SPHelper getInstance(Context context) {
if (INSTANCE == null) {
synchronized (SPHelper.class) {
if (INSTANCE == null) {
INSTANCE = new SPHelper(context);
}
}
}
return INSTANCE;
}
@Override
protected String getSpName() {
return spName;
}
}
|
Marcoakira/Desafios_Python_do_Curso_Guanabara | Mundo3/Desafio101.py | # desafio101 o programa recebe a data de nascimento e retorna se a pessoa tem : voto obrigatorio, opcional, ou nao é votante.
def voto(nasc):
from datetime import date
votante = date.today().year - nasc
if votante < 16:
return print(f' voce possui {votante} anos. Ainda não pode votar')
elif votante >16 and votante < 18 or votante >= 65:
print(f'Você tem {votante} anos, e seu voto é opcional')
elif votante >= 18 or votante <65:
print(f'você tem {votante} anos, e seu voto é \033[1:32MOBRIGATORIO\033[M')
votacao = int(input('Qual sua data de nascimento?'))
voto(votacao)
|
EzioL/leetcode | src/main/java/_01_06_CompressString.java | <filename>src/main/java/_01_06_CompressString.java
/**
* Here be dragons !
*
* @author: Ezio
* created on 2020/3/16
*/
public class _01_06_CompressString {
static class Solution {
public String compressString(String S) {
if (S == null) {
return null;
}
if (S.trim().length() == 0) {
return "";
}
char c = S.charAt(0);
int f = 0;
StringBuilder r = new StringBuilder();
for (int i = 0; i < S.length(); i++) {
char s = S.charAt(i);
if (i == 0) {
c = s;
f = 1;
} else {
if (s == c) {
f++;
} else {
r.append(c).append(f);
f = 1;
c = s;
}
}
}
if (f > 0) {
r.append(c).append(f);
}
if (r.toString().length() >= S.length()) {
return S;
}
return r.toString();
}
}
public static void main(String[] args) {
Solution solution = new Solution();
System.out.println(solution.compressString("aabcccccaaa"));
System.out.println(solution.compressString("abbccd"));
}
}
|
etrex/kamiflex | example/dialog.rb | require_relative '../lib/kamiflex'
require 'clipboard'
def border(color)
{
borderColor: color,
borderWidth: :light
}
end
def green_box(options = {})
horizontal_box **border("#00FF00").merge(options) do
yield if block_given?
end
end
def blue_box(options = {})
horizontal_box **border("#0000FF").merge(options) do
yield if block_given?
end
end
def blue_vertical_box(options = {})
vertical_box **border("#0000FF").merge(options) do
yield if block_given?
end
end
def dialog_box(message)
horizontal_box position: :absolute,
offsetStart: "1%", offsetEnd: "1%",
offsetBottom: "#{100/62.0}%", height: "32%",
borderColor: "#000000", borderWidth: "semi-bold",
backgroundColor: "#876444",
cornerRadius: :xl,
paddingAll: "2px" do
horizontal_box width: "100%", height: "100%",
borderColor: "#d1baa5", borderWidth: "semi-bold",
backgroundColor: "#f7edc8",
cornerRadius: :lg,
paddingAll: "15px" do
text message, wrap: true, size: :xs, maxLines: 3
end
end
end
def avatar_image_box(url)
horizontal_box position: :absolute,
offsetStart: "60%", width: "50%",
offsetBottom: "35%", height: "40%" do
image url, size: :full, aspectRatio: "2:1"
end
end
def avatar_name_box(name)
# 陰影
horizontal_box position: :absolute,
offsetStart: "60%", width: "50%",
offsetBottom: "28.5%", height: "8%",
backgroundColor: "#00000066",
cornerRadius: :xl do
end
# 文字
horizontal_box position: :absolute,
offsetStart: "60%", width: "50%",
offsetBottom: "30%", height: "8%",
borderColor: "#f7edc8", borderWidth: "medium",
backgroundColor: "#876444",
cornerRadius: :xl do
text name, size: :xs, color: "#FFFFFF", weight: :bold,
align: :center, gravity: :center
end
end
text_message = "在 卡米哥 的「 深入 Flex Message - 以對話遊戲為例 」主題中,將會分享:使用 Flex Message 時的實作細節,包含遊戲風格的對話框介面、特殊樣式的按鈕、隱藏式的感應區、跨裝置的排版等。也會說明在設計一個新的 Flex Message 版型時,所採用的開發流程。"
url = "https://i.imgur.com/WWbqH9A.png"
kamigo_url = "https://www.kamigo.tw/assets/kamigo-c3b10dff4cdb60fa447496b22edad6c32fffde96de20262efba690892e4461e8.png"
bg_url = "https://1.bp.blogspot.com/-IRrkMk_r1_c/XJB6NdPZATI/AAAAAAABSAg/zJxTXh-zSIAHpcq8vIrsnzlhTnUVc9h1ACLcBGAs/s800/bg_kamogawa.jpg"
body_url = "https://i.imgur.com/zUttWD1.png"
json = Kamiflex.json(self) do
alt_text "hello world!"
bubble size: :giga do
body paddingAll: "0px", backgroundColor: "#333333" do
image body_url, size: :full, aspectRatio: "100:62"
# 背景
image bg_url, size: :full, position: :absolute, aspectRatio: "800:450"
# 對話框
dialog_box text_message
# 角色圖
avatar_image_box kamigo_url
# 角色名稱
avatar_name_box "卡米狗"
end
end
end
puts json
Clipboard.copy json |
nodejayes/ts-tooling | src/types/datetime/daterange/daterange.js | <gh_stars>0
const {TimeSpan} = require('../timespan/timespan');
/**
* some Calculations for DateTime Ranges
*
* @memberof module:types/daterange
*/
class DateRange {
/**
* the Start DateTime
*
* @readonly
* @return {DateTime}
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* const range = new DateRange(a, b);
* // log 2019-01-01T00:00:00 into console
* console.info(range.Begin)
*/
get Begin() {
return this.begin;
}
/**
* the End DateTime
*
* @readonly
* @return {DateTime}
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* const range = new DateRange(a, b);
* // log 2019-01-10T00:00:00 into console
* console.info(range.End)
*/
get End() {
return this.end;
}
/**
* create a new DateRange from two DateTime Objects
*
* @param begin {DateTime} the start DateTime of the DateRange
* @param to {DateTime} the end DateTime of the DateRange
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* // create a date range object from 2019-01-01 to 2019-01-10
* const range = new DateRange(a, b);
*/
constructor(begin, to) {
if (!begin) {
throw new Error('the begin date must be defined');
}
if (!to) {
throw new Error('the to date must be defined');
}
if (begin.IsAfter(to)) {
throw new Error('the start date must be before the end date');
}
this.begin = begin;
this.end = to;
}
/**
* overlaps the given DateRange the current DateRange
*
* @param range {DateRange} the DateRange to check
* @return {boolean}
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* const range = new DateRange(a, b);
* const bothIn = new DateRange(DateTime.FromISOString('2019-01-02T00:00:00'), DateTime.FromISOString('2019-01-03T00:00:00'));
* const beginIn = new DateRange(DateTime.FromISOString('2019-01-02T00:00:00'), DateTime.FromISOString('2019-01-11T00:00:00'));
* const endIn = new DateRange(DateTime.FromISOString('2018-12-02T00:00:00'), DateTime.FromISOString('2019-01-02T00:00:00'));
* const bothOut = new DateRange(DateTime.FromISOString('2018-12-02T00:00:00'), DateTime.FromISOString('2019-01-12T00:00:00'));
* const bothBefore = new DateRange(DateTime.FromISOString('2018-12-02T00:00:00'), DateTime.FromISOString('2018-12-03T00:00:00'));
* const bothAfter = new DateRange(DateTime.FromISOString('2019-01-12T00:00:00'), DateTime.FromISOString('2019-01-13T00:00:00'));
* // is true
* range.Overlaps(bothIn);
* range.Overlaps(beginIn);
* range.Overlaps(endIn);
* range.Overlaps(bothOut);
* range.Overlaps(range);
* // is false
* range.Overlaps(bothBefore);
* range.Overlaps(bothAfter);
*/
Overlaps(range) {
return this.IsIn(range.Begin) || this.IsIn(range.End) ||
(this.IsBefore(range.Begin) && this.IsAfter(range.End));
}
/**
* is the given DateTime in the DateRange
*
* @param value {DateTime} the DateTime to check
* @return {boolean}
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* const range = new DateRange(a, b);
* const inRange = DateTime.FromISOString('2019-01-02T00:00:00');
* const notInRange = DateTime.FromISOString('2019-01-12T00:00:00');
* // is true
* range.IsIn(inRange);
* range.IsIn(a);
* range.IsIn(b);
* // is false
* range.IsIn(notInRange);
*/
IsIn(value) {
return !this.IsBefore(value) && !this.IsAfter(value);
}
/**
* is the given DateTime before the DateRange
*
* @param value {DateTime} the DateTime to check
* @return {boolean}
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* const range = new DateRange(a, b);
* const before = DateTime.FromISOString('2018-12-02T00:00:00');
* const after = DateTime.FromISOString('2019-01-12T00:00:00');
* // is true
* range.IsBefore(before);
* // is false
* range.IsBefore(a);
* range.IsBefore(b);
* range.IsBefore(after);
*/
IsBefore(value) {
return value.IsBefore(this.begin);
}
/**
* is the given DateTime after the DateRange
*
* @param value {DateTime} the DateTime to check
* @return {boolean}
* @example
* const a = DateTime.FromISOString('2019-01-01T00:00:00');
* const b = DateTime.FromISOString('2019-01-10T00:00:00');
* const range = new DateRange(a, b);
* const before = DateTime.FromISOString('2018-12-02T00:00:00');
* const after = DateTime.FromISOString('2019-01-12T00:00:00');
* // is true
* range.IsAfter(after);
* // is false
* range.IsAfter(b);
* range.IsAfter(a);
* range.IsAfter(before);
*/
IsAfter(value) {
return value.IsAfter(this.end);
}
/**
* print the DateRange into a String
*
* @param fmt {string?} the format of the DateTime strings look {@link DateTime} for more informations
* @returns {string}
* @example
* const dateRange = new DateRange(new DateTime('UTC', 2020, 1, 1, 1), new DateTime('UTC', 2020, 1, 2, 1));
* // returns [2020-01-01 01:00:00 => 2020-01-02 01:00:00]
* dateRange.ToString();
* // returns [2020-01-01 01:00:00 000 => 2020-01-02 01:00:00 000]
* dateRange.ToString('yyyy-MM-dd hh:mm:ss SSS');
*/
ToString(fmt) {
return `[${this.Begin.ToString(fmt)} => ${this.End.ToString(fmt)}]`;
}
/**
* give the Time Between start and end in the DateRange as a TimeSpan
* only positive TimeSpans are returned!
*
* @returns {TimeSpan}
* @example
* // logs into console 86400000
* const v = new DateRange(new DateTime('UTC', 2020, 1, 1, 1), new DateTime('UTC', 2020, 1, 2, 1));
* console.info(v.TimeBetween().TotalMilliseconds);
*/
TimeBetween() {
return TimeSpan.FromMilliseconds(this.end.ToUnixTimestamp() - this.begin.ToUnixTimestamp());
}
}
module.exports = {DateRange};
|
OutoftheBoxFTC/Summer-Motion-Profiling | TeamCode/src/main/java/opmode/FunctionalityTest.java | package opmode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.HashMap;
import hardware.ReadData;
import hardware.Hardware;
import math.Vector4;
import state.DriveState;
import state.LogicState;
/**
* This class is a raw debug class of all sensors/functionality to test against expected behaviour.
* Y: Front left wheel fwd (a)
* X: Front right wheel fwd (b)
* A: Back left wheel fwd (c)
* B: Back right wheel fwd (d)
*
* When moving robot fwd, left and right increases
* When rotating cc, aux and right increases, left decreases
*
* Gyro starts at 0, increases to 2pi, and returns to 0 after 1 full cc rotation
*/
@TeleOp(name = "Functionality Test")
public class FunctionalityTest extends BasicOpmode {
public FunctionalityTest() {
super(1, false);
}
@Override
protected void setup() {
robot.registerDevice(Hardware.HardwareDevice.DRIVE_MOTORS);
robot.registerDevice(Hardware.HardwareDevice.HUB_1_BULK);
robot.registerDevice(Hardware.HardwareDevice.GYRO);
HashMap<String, LogicState> logicStates = new HashMap<>();
logicStates.put("init", new LogicState(stateMachine) {
@Override
public void update(ReadData data) {
if(isStarted()){
deactivateThis();
stateMachine.activateLogic("Sensor Readout");
stateMachine.setActiveDriveState("Controller Drive");
telemetry.clearAllHeadersExcept("Main Loop FPS", "Hardware FPS", "Activated Logic States", "vel");
}
}
});
logicStates.put("Sensor Readout", new LogicState(stateMachine) {
boolean rightFlipped = false;
double previousRight = 0;
@Override
public void update(ReadData data) {
if(previousRight - data.getRight() > 1000){
rightFlipped = true;
}
telemetry.setHeader("left", String.valueOf(data.getLeft()));
telemetry.setHeader("Right", String.valueOf(data.getRight()));
telemetry.setHeader("Aux", String.valueOf(data.getAux()));
telemetry.setHeader("Gyro", String.valueOf(data.getGyro()));
telemetry.setHeader("Flipped", rightFlipped);
if(isStopRequested()){
stateMachine.deactivateLogic("Sensor Readout");
}
}
});
HashMap<String, DriveState> driveStates = new HashMap<>();
driveStates.put("Controller Drive", new DriveState(stateMachine) {
@Override
public Vector4 getWheelVelocities() {
//a, b, c, d
return new Vector4(gamepad1.y.isActive()?0.5:0, gamepad1.x.isActive()?0.5:0, gamepad1.a.isActive()?0.5:0, gamepad1.b.isActive()?0.5:0);
}
});
stateMachine.appendLogicStates(logicStates);
stateMachine.appendDriveStates(driveStates);
stateMachine.activateLogic("init");
}
} |
InsightEdge/xap | xap-core/xap-datagrid/src/main/java/com/j_spaces/jdbc/builder/range/FunctionCallDescription.java | /*
* Copyright (c) 2008-2016, GigaSpaces Technologies, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.j_spaces.jdbc.builder.range;
import com.gigaspaces.internal.io.IOUtils;
import com.gigaspaces.query.sql.functions.SqlFunctionExecutionContext;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
/**
* Created by <NAME> on 2/9/16.
*
* @since 11.0
*/
@com.gigaspaces.api.InternalApi
public class FunctionCallDescription implements Externalizable, SqlFunctionExecutionContext {
private static final long serialVersionUID = 1L;
private String name;
private List<Object> args;
private int columnIndex;
public FunctionCallDescription() {
args = Collections.emptyList();
}
public FunctionCallDescription(String name, int columnIndex, List<Object> args) {
this.name = name;
this.columnIndex = columnIndex;
this.args = args;
}
public String getName() {
return name;
}
public void setArgs(List<Object> args) {
this.args = args;
}
public int getColumnIndex() {
return columnIndex;
}
@Override
public int getNumberOfArguments() {
return args.size();
}
@Override
public Object getArgument(int index) {
return args.get(index);
}
public FunctionCallDescription setColumnValue(Object value) {
this.args.set(columnIndex, value);
return this;
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
IOUtils.writeString(out, name);
out.writeInt(columnIndex);
out.writeObject(args);
}
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
name = IOUtils.readString(in);
columnIndex = in.readInt();
//noinspection unchecked
args = (List<Object>) in.readObject();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
FunctionCallDescription that = (FunctionCallDescription) o;
return columnIndex == that.columnIndex &&
Objects.equals(name, that.name) &&
Objects.equals(args, that.args);
}
@Override
public int hashCode() {
return Objects.hash(name, args, columnIndex);
}
}
|
knightliao/vpaas | vpaas-lc/vpaas-lc-server/vpaas-lc-server-service/vpaas-lc-server-connect/src/main/java/com/github/knightliao/vpaas/lc/server/connect/netty/statistics/service/impl/LcCounterServiceImpl.java | package com.github.knightliao.vpaas.lc.server.connect.netty.statistics.service.impl;
import com.github.knightliao.vpaas.lc.server.connect.netty.server.ILcServer;
import com.github.knightliao.vpaas.lc.server.connect.netty.server.LcServerContext;
import com.github.knightliao.vpaas.lc.server.connect.netty.service.ILcService;
import com.github.knightliao.vpaas.lc.server.connect.netty.statistics.dto.LcCountInfoDto;
import com.github.knightliao.vpaas.lc.server.connect.netty.statistics.service.ILcCounterService;
import com.github.knightliao.vpaas.lc.server.connect.support.dto.param.LcServiceParam;
/**
* @author knightliao
* @email <EMAIL>
* @date 2021/8/4 20:12
*/
public class LcCounterServiceImpl implements ILcCounterService {
@Override
public void incrSend() {
if (isCountStatistic()) {
// get
LcCountInfoDto lcCountInfoDto = getLcCountInfoDto();
// incr
lcCountInfoDto.getSentNum().incrementAndGet();
// set last send time
lcCountInfoDto.setLastSentTimeStamp(System.currentTimeMillis());
}
}
@Override
public void incrReceive() {
if (isCountStatistic()) {
// get
LcCountInfoDto lcCountInfoDto = getLcCountInfoDto();
// incr
lcCountInfoDto.getReceiveNum().incrementAndGet();
// set last send time
lcCountInfoDto.setLastReceiveTimeStamp(System.currentTimeMillis());
}
}
@Override
public void setMaxChannelNum(long channelNum) {
getLcCountInfoDto().setMaxChannelNum(channelNum);
}
@Override
public void incrHeartbeatNum() {
if (isCountStatistic()) {
// get
LcCountInfoDto lcCountInfoDto = getLcCountInfoDto();
// incr
lcCountInfoDto.getHeartbeatNum().incrementAndGet();
}
}
@Override
public boolean isCountStatistic() {
ILcServer lcServer = LcServerContext.getContext().getServer();
if (lcServer != null) {
ILcService lcService = (ILcService) lcServer;
LcServiceParam serviceParam = lcService.getLcServiceParam();
if (serviceParam.isOpenCount()) {
return true;
}
}
return false;
}
private LcCountInfoDto getLcCountInfoDto() {
ILcServer lcServer = LcServerContext.getContext().getServer();
ILcService lcService = (ILcService) lcServer;
LcServiceParam serviceParam = lcService.getLcServiceParam();
return serviceParam.getCountInfoDto();
}
}
|
emartech/rdb-connector-collection | redshift/src/it/scala/com/emarsys/rdb/connector/redshift/RedshiftInsertSpec.scala | <reponame>emartech/rdb-connector-collection
package com.emarsys.rdb.connector.redshift
import akka.actor.ActorSystem
import akka.testkit.TestKit
import com.emarsys.rdb.connector.redshift.utils.{SelectDbInitHelper, SelectDbWithSchemaInitHelper}
import com.emarsys.rdb.connector.test.InsertItSpec
import scala.concurrent.duration._
class RedshiftInsertSpec extends TestKit(ActorSystem("RedshiftInsertSpec")) with InsertItSpec with SelectDbInitHelper {
val aTableName: String = tableName
val bTableName: String = s"temp_$uuid"
override val awaitTimeout = 15.seconds
override def afterAll(): Unit = {
shutdown()
super.afterAll()
}
}
class RedshiftInsertWithSchemaSpec
extends TestKit(ActorSystem("RedshiftInsertWithSchemaSpec"))
with InsertItSpec
with SelectDbWithSchemaInitHelper {
val aTableName: String = tableName
val bTableName: String = s"temp_$uuid"
override val awaitTimeout = 15.seconds
override def afterAll(): Unit = {
shutdown()
super.afterAll()
}
}
|
soustab10/cv-frontend | src/utils.js | import simulationArea from './simulationArea';
import {
scheduleUpdate, play, updateCanvasSet, errorDetectedSet, errorDetectedGet,
} from './engine';
window.globalScope = undefined;
window.lightMode = false; // To be deprecated
window.projectId = undefined;
window.id = undefined;
window.loading = false; // Flag - all assets are loaded
let prevErrorMessage; // Global variable for error messages
let prevShowMessage; // Global variable for error messages
export function generateId() {
let id = '';
const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
for (let i = 0; i < 20; i++) { id += possible.charAt(Math.floor(Math.random() * possible.length)); }
return id;
}
// To strip tags from input
export function stripTags(string = '') {
console.log(string)
return string.toString().replace(/(<([^>]+)>)/ig, '').trim();
}
export function clockTick() {
if (!simulationArea.clockEnabled) return;
if (errorDetectedGet()) return;
updateCanvasSet(true);
globalScope.clockTick();
play();
scheduleUpdate(0, 20);
}
/**
* Helper function to show error
* @param {string} error -The error to be shown
* @category utils
*/
export function showError(error) {
errorDetectedSet(true);
// if error ha been shown return
if (error === prevErrorMessage) return;
prevErrorMessage = error;
const id = Math.floor(Math.random() * 10000);
$('#MessageDiv').append(`<div class='alert alert-danger' role='alert' id='${id}'> ${error}</div>`);
setTimeout(() => {
prevErrorMessage = undefined;
$(`#${id}`).fadeOut();
}, 1500);
}
// Helper function to show message
export function showMessage(mes) {
if (mes === prevShowMessage) return;
prevShowMessage = mes;
const id = Math.floor(Math.random() * 10000);
$('#MessageDiv').append(`<div class='alert alert-success' role='alert' id='${id}'> ${mes}</div>`);
setTimeout(() => {
prevShowMessage = undefined;
$(`#${id}`).fadeOut();
}, 2500);
}
export function distance(x1, y1, x2, y2) {
return Math.sqrt((x2 - x1) ** 2) + ((y2 - y1) ** 2);
}
/**
* Helper function to return unique list
* @param {Array} a - any array
* @category utils
*/
export function uniq(a) {
const seen = {};
const tmp = a.filter((item) => (seen.hasOwnProperty(item) ? false : (seen[item] = true)));
return tmp;
}
|
yzj97/vue-static | src/finance/api/accountCycle.js | export default {
accountCycleListPage: {
url: 'back-finance-web/accountCycleConfig/listPage.do',
method: 'post'
},
generateList: {
url: 'back-finance-web/accountCycleConfig/generate.do',
method: 'post'
},
saveAccountCycle: {
url: 'back-finance-web/accountCycleConfig/add.do',
method: 'post'
},
updateAccountCycle: {
url: 'back-finance-web/accountCycleConfig/update.do',
method: 'post'
},
accountCycleDetailList: {
url: 'back-finance-web/accountCycleConfig/list.do',
method: 'post'
}
}
|
JKot-Coder/slang | tools/gfx/vulkan/vk-util.cpp | // vk-util.cpp
#include "vk-util.h"
#include "core/slang-math.h"
#include <stdlib.h>
#include <stdio.h>
namespace gfx {
/* static */VkFormat VulkanUtil::getVkFormat(Format format)
{
switch (format)
{
case Format::R32G32B32A32_TYPELESS: return VK_FORMAT_R32G32B32A32_SFLOAT;
case Format::R32G32B32_TYPELESS: return VK_FORMAT_R32G32B32_SFLOAT;
case Format::R32G32_TYPELESS: return VK_FORMAT_R32G32_SFLOAT;
case Format::R32_TYPELESS: return VK_FORMAT_R32_SFLOAT;
case Format::R16G16B16A16_TYPELESS: return VK_FORMAT_R16G16B16A16_SFLOAT;
case Format::R16G16_TYPELESS: return VK_FORMAT_R16G16_SFLOAT;
case Format::R16_TYPELESS: return VK_FORMAT_R16_SFLOAT;
case Format::R8G8B8A8_TYPELESS: return VK_FORMAT_R8G8B8A8_UNORM;
case Format::R8G8_TYPELESS: return VK_FORMAT_R8G8_UNORM;
case Format::R8_TYPELESS: return VK_FORMAT_R8_UNORM;
case Format::B8G8R8A8_TYPELESS: return VK_FORMAT_B8G8R8A8_UNORM;
case Format::R32G32B32A32_FLOAT: return VK_FORMAT_R32G32B32A32_SFLOAT;
case Format::R32G32B32_FLOAT: return VK_FORMAT_R32G32B32_SFLOAT;
case Format::R32G32_FLOAT: return VK_FORMAT_R32G32_SFLOAT;
case Format::R32_FLOAT: return VK_FORMAT_R32_SFLOAT;
case Format::R16G16B16A16_FLOAT: return VK_FORMAT_R16G16B16A16_SFLOAT;
case Format::R16G16_FLOAT: return VK_FORMAT_R16G16_SFLOAT;
case Format::R16_FLOAT: return VK_FORMAT_R16_SFLOAT;
case Format::R32G32B32A32_UINT: return VK_FORMAT_R32G32B32A32_UINT;
case Format::R32G32B32_UINT: return VK_FORMAT_R32G32B32_UINT;
case Format::R32G32_UINT: return VK_FORMAT_R32G32_UINT;
case Format::R32_UINT: return VK_FORMAT_R32_UINT;
case Format::R16G16B16A16_UINT: return VK_FORMAT_R16G16B16A16_UINT;
case Format::R16G16_UINT: return VK_FORMAT_R16G16_UINT;
case Format::R16_UINT: return VK_FORMAT_R16_UINT;
case Format::R8G8B8A8_UINT: return VK_FORMAT_R8G8B8A8_UINT;
case Format::R8G8_UINT: return VK_FORMAT_R8G8_UINT;
case Format::R8_UINT: return VK_FORMAT_R8_UINT;
case Format::R32G32B32A32_SINT: return VK_FORMAT_R32G32B32A32_SINT;
case Format::R32G32B32_SINT: return VK_FORMAT_R32G32B32_SINT;
case Format::R32G32_SINT: return VK_FORMAT_R32G32_SINT;
case Format::R32_SINT: return VK_FORMAT_R32_SINT;
case Format::R16G16B16A16_SINT: return VK_FORMAT_R16G16B16A16_SINT;
case Format::R16G16_SINT: return VK_FORMAT_R16G16_SINT;
case Format::R16_SINT: return VK_FORMAT_R16_SINT;
case Format::R8G8B8A8_SINT: return VK_FORMAT_R8G8B8A8_SINT;
case Format::R8G8_SINT: return VK_FORMAT_R8G8_SINT;
case Format::R8_SINT: return VK_FORMAT_R8_SINT;
case Format::R16G16B16A16_UNORM: return VK_FORMAT_R16G16B16A16_UNORM;
case Format::R16G16_UNORM: return VK_FORMAT_R16G16_UNORM;
case Format::R16_UNORM: return VK_FORMAT_R16_UNORM;
case Format::R8G8B8A8_UNORM: return VK_FORMAT_R8G8B8A8_UNORM;
case Format::R8G8B8A8_UNORM_SRGB: return VK_FORMAT_R8G8B8A8_SRGB;
case Format::R8G8_UNORM: return VK_FORMAT_R8G8_UNORM;
case Format::R8_UNORM: return VK_FORMAT_R8_UNORM;
case Format::B8G8R8A8_UNORM: return VK_FORMAT_B8G8R8A8_UNORM;
case Format::B8G8R8A8_UNORM_SRGB: return VK_FORMAT_B8G8R8A8_SRGB;
case Format::R16G16B16A16_SNORM: return VK_FORMAT_R16G16B16A16_SNORM;
case Format::R16G16_SNORM: return VK_FORMAT_R16G16_SNORM;
case Format::R16_SNORM: return VK_FORMAT_R16_SNORM;
case Format::R8G8B8A8_SNORM: return VK_FORMAT_R8G8B8A8_SNORM;
case Format::R8G8_SNORM: return VK_FORMAT_R8G8_SNORM;
case Format::R8_SNORM: return VK_FORMAT_R8_SNORM;
case Format::D32_FLOAT: return VK_FORMAT_D32_SFLOAT;
case Format::D16_UNORM: return VK_FORMAT_D16_UNORM;
case Format::B4G4R4A4_UNORM: return VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT;
case Format::B5G6R5_UNORM: return VK_FORMAT_R5G6B5_UNORM_PACK16;
case Format::B5G5R5A1_UNORM: return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
case Format::R9G9B9E5_SHAREDEXP: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
case Format::R10G10B10A2_TYPELESS: return VK_FORMAT_A2B10G10R10_UINT_PACK32;
case Format::R10G10B10A2_UINT: return VK_FORMAT_A2B10G10R10_UINT_PACK32;
case Format::R10G10B10A2_UNORM: return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
case Format::R11G11B10_FLOAT: return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
case Format::BC1_UNORM: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
case Format::BC1_UNORM_SRGB: return VK_FORMAT_BC1_RGBA_SRGB_BLOCK;
case Format::BC2_UNORM: return VK_FORMAT_BC2_UNORM_BLOCK;
case Format::BC2_UNORM_SRGB: return VK_FORMAT_BC2_SRGB_BLOCK;
case Format::BC3_UNORM: return VK_FORMAT_BC3_UNORM_BLOCK;
case Format::BC3_UNORM_SRGB: return VK_FORMAT_BC3_SRGB_BLOCK;
case Format::BC4_UNORM: return VK_FORMAT_BC4_UNORM_BLOCK;
case Format::BC4_SNORM: return VK_FORMAT_BC4_SNORM_BLOCK;
case Format::BC5_UNORM: return VK_FORMAT_BC5_UNORM_BLOCK;
case Format::BC5_SNORM: return VK_FORMAT_BC5_SNORM_BLOCK;
case Format::BC6H_UF16: return VK_FORMAT_BC6H_UFLOAT_BLOCK;
case Format::BC6H_SF16: return VK_FORMAT_BC6H_SFLOAT_BLOCK;
case Format::BC7_UNORM: return VK_FORMAT_BC7_UNORM_BLOCK;
case Format::BC7_UNORM_SRGB: return VK_FORMAT_BC7_SRGB_BLOCK;
default: return VK_FORMAT_UNDEFINED;
}
}
/* static */SlangResult VulkanUtil::toSlangResult(VkResult res)
{
return (res == VK_SUCCESS) ? SLANG_OK : SLANG_FAIL;
}
VkShaderStageFlags VulkanUtil::getShaderStage(SlangStage stage)
{
switch (stage)
{
case SLANG_STAGE_ANY_HIT:
return VK_SHADER_STAGE_ANY_HIT_BIT_KHR;
case SLANG_STAGE_CALLABLE:
return VK_SHADER_STAGE_CALLABLE_BIT_KHR;
case SLANG_STAGE_CLOSEST_HIT:
return VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR;
case SLANG_STAGE_COMPUTE:
return VK_SHADER_STAGE_COMPUTE_BIT;
case SLANG_STAGE_DOMAIN:
return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
case SLANG_STAGE_FRAGMENT:
return VK_SHADER_STAGE_FRAGMENT_BIT;
case SLANG_STAGE_GEOMETRY:
return VK_SHADER_STAGE_GEOMETRY_BIT;
case SLANG_STAGE_HULL:
return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
case SLANG_STAGE_INTERSECTION:
return VK_SHADER_STAGE_INTERSECTION_BIT_KHR;
case SLANG_STAGE_RAY_GENERATION:
return VK_SHADER_STAGE_RAYGEN_BIT_KHR;
case SLANG_STAGE_VERTEX:
return VK_SHADER_STAGE_VERTEX_BIT;
default:
assert(!"unsupported stage.");
return VkShaderStageFlags(-1);
}
}
VkImageLayout VulkanUtil::getImageLayoutFromState(ResourceState state)
{
switch (state)
{
case ResourceState::ShaderResource:
return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
case ResourceState::UnorderedAccess:
return VK_IMAGE_LAYOUT_GENERAL;
case ResourceState::Present:
return VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
case ResourceState::CopySource:
return VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
case ResourceState::CopyDestination:
return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
case ResourceState::RenderTarget:
return VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
case ResourceState::DepthWrite:
return VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
case ResourceState::DepthRead:
return VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
case ResourceState::ResolveSource:
return VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
case ResourceState::ResolveDestination:
return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
default:
return VK_IMAGE_LAYOUT_UNDEFINED;
}
return VkImageLayout();
}
/* static */Slang::Result VulkanUtil::handleFail(VkResult res)
{
if (res != VK_SUCCESS)
{
assert(!"Vulkan returned a failure");
}
return toSlangResult(res);
}
/* static */void VulkanUtil::checkFail(VkResult res)
{
assert(res != VK_SUCCESS);
assert(!"Vulkan check failed");
}
/* static */VkPrimitiveTopology VulkanUtil::getVkPrimitiveTopology(PrimitiveTopology topology)
{
switch (topology)
{
case PrimitiveTopology::TriangleList: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
default: break;
}
assert(!"Unknown topology");
return VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
}
VkImageLayout VulkanUtil::mapResourceStateToLayout(ResourceState state)
{
switch (state)
{
case ResourceState::Undefined:
return VK_IMAGE_LAYOUT_UNDEFINED;
case ResourceState::ShaderResource:
return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
case ResourceState::UnorderedAccess:
return VK_IMAGE_LAYOUT_GENERAL;
case ResourceState::RenderTarget:
return VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
case ResourceState::DepthRead:
return VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
case ResourceState::DepthWrite:
return VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
case ResourceState::Present:
return VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
case ResourceState::CopySource:
return VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
case ResourceState::CopyDestination:
return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
case ResourceState::ResolveSource:
return VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
case ResourceState::ResolveDestination:
return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
default:
return VK_IMAGE_LAYOUT_UNDEFINED;
}
}
Result AccelerationStructureBuildGeometryInfoBuilder::build(
const IAccelerationStructure::BuildInputs& buildInputs,
IDebugCallback* debugCallback)
{
buildInfo.dstAccelerationStructure = VK_NULL_HANDLE;
switch (buildInputs.kind)
{
case IAccelerationStructure::Kind::BottomLevel:
buildInfo.type = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR;
break;
case IAccelerationStructure::Kind::TopLevel:
buildInfo.type = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR;
break;
default:
debugCallback->handleMessage(
DebugMessageType::Error,
DebugMessageSource::Layer,
"invalid value of IAccelerationStructure::Kind encountered in buildInputs.kind");
return SLANG_E_INVALID_ARG;
}
if (buildInputs.flags & IAccelerationStructure::BuildFlags::Enum::PerformUpdate)
{
buildInfo.mode = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR;
}
else
{
buildInfo.mode = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR;
}
if (buildInputs.flags & IAccelerationStructure::BuildFlags::Enum::AllowCompaction)
{
buildInfo.flags |= VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR;
}
if (buildInputs.flags & IAccelerationStructure::BuildFlags::Enum::AllowUpdate)
{
buildInfo.flags |= VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR;
}
if (buildInputs.flags & IAccelerationStructure::BuildFlags::Enum::MinimizeMemory)
{
buildInfo.flags |= VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR;
}
if (buildInputs.flags & IAccelerationStructure::BuildFlags::Enum::PreferFastBuild)
{
buildInfo.flags |= VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR;
}
if (buildInputs.flags & IAccelerationStructure::BuildFlags::Enum::PreferFastTrace)
{
buildInfo.flags |= VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR;
}
if (buildInputs.kind == IAccelerationStructure::Kind::BottomLevel)
{
m_geometryInfos.setCount(buildInputs.descCount);
primitiveCounts.setCount(buildInputs.descCount);
memset(
m_geometryInfos.getBuffer(),
0,
sizeof(VkAccelerationStructureGeometryKHR) * buildInputs.descCount);
for (int i = 0; i < buildInputs.descCount; i++)
{
auto& geomDesc = buildInputs.geometryDescs[i];
m_geometryInfos[i].sType = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR;
if (geomDesc.flags & IAccelerationStructure::GeometryFlags::NoDuplicateAnyHitInvocation)
{
m_geometryInfos[i].flags |= VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR;
}
else if (geomDesc.flags & IAccelerationStructure::GeometryFlags::Opaque)
{
m_geometryInfos[i].flags |= VK_GEOMETRY_OPAQUE_BIT_KHR;
}
auto& vkGeomData = m_geometryInfos[i].geometry;
switch (geomDesc.type)
{
case IAccelerationStructure::GeometryType::Triangles:
m_geometryInfos[i].geometryType = VK_GEOMETRY_TYPE_TRIANGLES_KHR;
vkGeomData.triangles.sType =
VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR;
vkGeomData.triangles.vertexFormat =
VulkanUtil::getVkFormat(geomDesc.content.triangles.vertexFormat);
vkGeomData.triangles.vertexData.deviceAddress =
geomDesc.content.triangles.vertexData;
vkGeomData.triangles.vertexStride = geomDesc.content.triangles.vertexStride;
vkGeomData.triangles.maxVertex = geomDesc.content.triangles.vertexCount - 1;
switch (geomDesc.content.triangles.indexFormat)
{
case Format::R32_UINT:
vkGeomData.triangles.indexType = VK_INDEX_TYPE_UINT32;
break;
case Format::R16_UINT:
vkGeomData.triangles.indexType = VK_INDEX_TYPE_UINT16;
break;
case Format::Unknown:
vkGeomData.triangles.indexType = VK_INDEX_TYPE_NONE_KHR;
break;
default:
debugCallback->handleMessage(
DebugMessageType::Error,
DebugMessageSource::Layer,
"unsupported value of Format encountered in "
"GeometryDesc::content.triangles.indexFormat");
return SLANG_E_INVALID_ARG;
}
vkGeomData.triangles.indexData.deviceAddress = geomDesc.content.triangles.indexData;
vkGeomData.triangles.transformData.deviceAddress =
geomDesc.content.triangles.transform3x4;
primitiveCounts[i] = Slang::Math::Max(
geomDesc.content.triangles.vertexCount,
geomDesc.content.triangles.indexCount) /
3;
break;
case IAccelerationStructure::GeometryType::ProcedurePrimitives:
m_geometryInfos[i].geometryType = VK_GEOMETRY_TYPE_AABBS_KHR;
vkGeomData.aabbs.sType =
VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR;
vkGeomData.aabbs.data.deviceAddress = geomDesc.content.proceduralAABBs.data;
vkGeomData.aabbs.stride = geomDesc.content.proceduralAABBs.stride;
primitiveCounts[i] =
(uint32_t)buildInputs.geometryDescs[i].content.proceduralAABBs.count;
break;
default:
debugCallback->handleMessage(
DebugMessageType::Error,
DebugMessageSource::Layer,
"invalid value of IAccelerationStructure::GeometryType encountered in "
"buildInputs.geometryDescs");
return SLANG_E_INVALID_ARG;
}
}
buildInfo.geometryCount = buildInputs.descCount;
buildInfo.pGeometries = m_geometryInfos.getBuffer();
}
else
{
m_vkInstanceInfo.geometryType = VK_GEOMETRY_TYPE_INSTANCES_KHR;
m_vkInstanceInfo.geometry.instances.sType =
VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR;
m_vkInstanceInfo.geometry.instances.arrayOfPointers = 0;
m_vkInstanceInfo.geometry.instances.data.deviceAddress = buildInputs.instanceDescs;
buildInfo.pGeometries = &m_vkInstanceInfo;
buildInfo.geometryCount = 1;
primitiveCounts.setCount(1);
primitiveCounts[0] = buildInputs.descCount;
}
return SLANG_OK;
}
} // namespace gfx
|
abrams27/mimuw | sem2/po/kolokwia/kolos2017/src/Wyspa.java | import java.util.Arrays;
import java.util.Random;
public class Wyspa {
private Jablko[] jablonie;
private int liczbaJabloni;
public Wyspa() {
Random gen = new Random();
this.liczbaJabloni = gen.nextInt(7);
jablonie = new Jablko[liczbaJabloni];
for (int i = 0; i < liczbaJabloni; i++) {
jablonie[i] = new Jablko();
}
}
public Jablko[] jablonie() {
return jablonie;
}
public void usunJablko(Jablko jablko) {
int indeks = 0;
for (int i = 0; i < liczbaJabloni; i++) {
if (jablonie[i] == jablko) {
indeks = i;
}
}
Jablko pom = jablonie[indeks];
jablonie[indeks] = jablonie[liczbaJabloni - 1];
jablonie[liczbaJabloni - 1] = pom;
liczbaJabloni--;
jablonie = Arrays.copyOf(jablonie, liczbaJabloni);
}
}
|
mateuszchudyk/intel-graphics-compiler | visa/iga/IGALibrary/Backend/Messages/MessageDecoder.cpp | <filename>visa/iga/IGALibrary/Backend/Messages/MessageDecoder.cpp
/*========================== begin_copyright_notice ============================
Copyright (C) 2020-2021 Intel Corporation
SPDX-License-Identifier: MIT
============================= end_copyright_notice ===========================*/
#include "MessageDecoder.hpp"
#include <tuple>
#include <utility>
using namespace iga;
void MessageDecoder::decodePayloadSizes() {
bool hasMLenRLenInDesc = true;
bool hasXLenInExDesc = platform() < Platform::XE_HPG;
auto plural = [](int x) {return x == 1 ? "" : "s";};
if (hasMLenRLenInDesc) {
decodeDescField("Mlen", 25, 4,
[&] (std::stringstream &ss, uint32_t val) {
ss << val << " address register" << plural(val) << " written";
});
decodeDescField("Rlen", 20, 5,
[&] (std::stringstream &ss, uint32_t val) {
ss << val << " register" << plural(val) << " read back";
});
}
if (hasXLenInExDesc) {
decodeDescField("Xlen", 32 + 6, 5,
[&] (std::stringstream &ss, uint32_t val) {
ss << val << " data register" << plural(val) << " written";
});
}
if (platform() <= Platform::GEN11) {
decodeDescField("SFID", 32 + 0, 4,
[] (std::stringstream &ss, uint32_t val) {
ss << val << " shared function ID";
});
}
}
///////////////////////////////////////////////////////////////////////////////
// shared LSC fields (gateway EOT uses this)
void MessageDecoder::addLscFenceFields(
std::stringstream &sym, std::stringstream &descs)
{
auto ft = getDescBits(12, 3);
std::stringstream ftss;
switch (ft) {
case 0:
sym << ".none";
ftss << "no op";
break;
case 1:
sym << ".evict";
ftss << "evict (dirty lines invalidated and evicted)";
break;
case 2:
sym << ".invalidate";
ftss << "invalidate (all clean lines, but do not evict)";
break;
case 3:
sym << ".discard";
ftss << "discard (dirty and clean lines invalidated "
"without eviction)";
break;
case 4:
sym << ".clean";
ftss <<
"clean (dirty lines written out, "
"but kept in clean state)";
break;
case 5:
sym << ".flushl3";
ftss << "flush L3 only"; // XeHPG only
break;
default:
sym << ".0x" << std::hex << ft << "?";
ftss << "invalid flush type";
error(12, 3, "invalid flush type");
}
descs << " " << ftss.str();
addField("FlushOp", 12, 3, ft, ftss.str());
//
addLscFenceScopeField(sym, descs);
}
void MessageDecoder::addLscFenceScopeField(
std::stringstream &sym, std::stringstream &descs)
{
descs << " scoped to";
std::stringstream scss;
auto sc = getDescBits(9, 3);
switch (sc) {
case 0: sym << ".group"; scss << "thread group"; break;
case 1: sym << ".local"; scss << "slice"; break;
case 2: sym << ".tile"; scss << "tile"; break;
case 3: sym << ".gpu"; scss << "gpu"; break;
case 4: sym << ".gpus"; scss << "all gpus"; break;
case 5: sym << ".sysrel"; scss << "system release"; break;
case 6: sym << ".sysacq"; scss << "system acquire"; break;
default:
sym << ".0x" << std::hex << sc << "?";
scss << "invalid flush scope";
error(9, 3, "invalid flush scope");
}
descs << " " << scss.str();
//
addField("FlushScope", 9, 3, sc, scss.str());
}
void MessageDecoder::addLscFencePortFields(
std::stringstream &sym, std::stringstream &descs)
{
auto ports = getDescBits(12, 4);
sym << ".0x" << std::hex << std::uppercase << ports;
std::stringstream ss;
//
if (ports == 0x0) {
ss << "None";
descs << " unfenced";
} else {
descs << " fencing ";
for (int i = 0; i < 4; i++) {
if (ports & (1 << i)) {
if (ports & (((1 << i) - 1))) {
ss << "|";
descs << ", ";
}
static const char *SYMS[] {"SLM", "UGM", "UGML", "TGM"};
ss << SYMS[i];
descs << SYMS[i];
}
}
}
addField("FenceDataPorts", 12, 4, ports, ss.str());
}
|
SENA-CEET/1262154-G1G2-Trimestre-2 | java/poo/ClasesRelaciones/src/main/java/co/edu/sena/clasesrelaciones/asociacion/ejemplo1/APP.java | <filename>java/poo/ClasesRelaciones/src/main/java/co/edu/sena/clasesrelaciones/asociacion/ejemplo1/APP.java
package co.edu.sena.clasesrelaciones.asociacion.ejemplo1;
/**
* Created by Enrique on 13/03/2017.
*/
public class APP {
public static void main(String[] args) {
Caballo c1 = new Caballo(new Ojo("rojo"), new Ojo("azul"), new Rinion("grande"),
new Rinion("mediano"), new Corazon(120));
System.out.println(c1.getCorazon().getLatidos());
}
}
|
paralin/go-rift-api | models/lol_lobby_lobby_bot_champion.go | <reponame>paralin/go-rift-api
// Code generated by go-swagger; DO NOT EDIT.
package models
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"strconv"
strfmt "github.com/go-openapi/strfmt"
"github.com/go-openapi/errors"
"github.com/go-openapi/swag"
)
// LolLobbyLobbyBotChampion lol lobby lobby bot champion
// swagger:model LolLobbyLobbyBotChampion
type LolLobbyLobbyBotChampion struct {
// active
Active bool `json:"active,omitempty"`
// bot difficulties
BotDifficulties []LolLobbyLobbyBotDifficulty `json:"botDifficulties"`
// id
ID int32 `json:"id,omitempty"`
// name
Name string `json:"name,omitempty"`
}
// Validate validates this lol lobby lobby bot champion
func (m *LolLobbyLobbyBotChampion) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateBotDifficulties(formats); err != nil {
res = append(res, err)
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
func (m *LolLobbyLobbyBotChampion) validateBotDifficulties(formats strfmt.Registry) error {
if swag.IsZero(m.BotDifficulties) { // not required
return nil
}
for i := 0; i < len(m.BotDifficulties); i++ {
if err := m.BotDifficulties[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("botDifficulties" + "." + strconv.Itoa(i))
}
return err
}
}
return nil
}
// MarshalBinary interface implementation
func (m *LolLobbyLobbyBotChampion) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *LolLobbyLobbyBotChampion) UnmarshalBinary(b []byte) error {
var res LolLobbyLobbyBotChampion
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}
|
suyanlong/chain33-sdk-java | src/main/java/cn/chain33/javasdk/model/rpcresult/TxResult.java | <filename>src/main/java/cn/chain33/javasdk/model/rpcresult/TxResult.java
package cn.chain33.javasdk.model.rpcresult;
public class TxResult {
private String hash;
private Long height;
private Integer index;
public String getHash() {
return hash;
}
public void setHash(String hash) {
this.hash = hash;
}
public Long getHeight() {
return height;
}
public void setHeight(Long height) {
this.height = height;
}
public Integer getIndex() {
return index;
}
public void setIndex(Integer index) {
this.index = index;
}
@Override
public String toString() {
return "TxResult [hash=" + hash + ", height=" + height + ", index=" + index + "]";
}
}
|
Rexogamer/DiscordBot | src/main/java/core/commands/MbizThisYearCommand.java | <filename>src/main/java/core/commands/MbizThisYearCommand.java
package core.commands;
import core.parsers.ChartSmartYearParser;
import core.parsers.ChartableParser;
import core.parsers.params.ChartYearParameters;
import dao.ChuuService;
import java.util.Arrays;
import java.util.List;
public class MbizThisYearCommand extends MusicBrainzCommand {
private static final int CUSTOM_SEARCH_SPACE = 2000;
public MbizThisYearCommand(ChuuService dao) {
super(dao);
this.searchSpace = CUSTOM_SEARCH_SPACE;
}
@Override
public ChartableParser<ChartYearParameters> initParser() {
return new ChartSmartYearParser(getService());
}
@Override
public boolean doDiscogs() {
return false;
}
@Override
public String getDescription() {
return "Gets your top albums of the year queried.\t" +
"NOTE: The further the year is from the current year, the less precise the command will be";
}
@Override
public List<String> getAliases() {
return Arrays.asList("aoty", "albumoftheyear");
}
@Override
public String getName() {
return "Albums Of The Year!";
}
}
|
MrAwesomeRocks/caelus-cml | src/libraries/edgeMesh/edgeFormats/nas/NASedgeFormat.cpp | /*---------------------------------------------------------------------------*\
Copyright (C) 2011-2015 OpenFOAM Foundation
-------------------------------------------------------------------------------
License
This file is part of CAELUS.
CAELUS is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CAELUS is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with CAELUS. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
#include "NASedgeFormat.hpp"
#include "IFstream.hpp"
#include "IStringStream.hpp"
#include "PackedBoolList.hpp"
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
CML::fileFormats::NASedgeFormat::NASedgeFormat
(
const fileName& filename
)
{
read(filename);
}
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
bool CML::fileFormats::NASedgeFormat::read
(
const fileName& filename
)
{
clear();
IFstream is(filename);
if (!is.good())
{
FatalErrorInFunction
<< "Cannot read file " << filename
<< exit(FatalError);
}
DynamicList<point> dynPoints;
DynamicList<edge> dynEdges;
DynamicList<label> pointId; // Nastran index of points
while (is.good())
{
string line;
is.getLine(line);
// Skip empty or comment
if (line.empty() || line[0] == '$')
{
continue;
}
// Check if character 72 is continuation
if (line.size() > 72 && line[72] == '+')
{
line = line.substr(0, 72);
while (true)
{
string buf;
is.getLine(buf);
if (buf.size() > 72 && buf[72] == '+')
{
line += buf.substr(8, 64);
}
else
{
line += buf.substr(8, buf.size()-8);
break;
}
}
}
// Read first word
IStringStream lineStream(line);
word cmd;
lineStream >> cmd;
if (cmd == "CBEAM" || cmd == "CROD")
{
edge e;
// label groupId = readLabel(IStringStream(line.substr(16,8))());
e[0] = readLabel(IStringStream(line.substr(24,8))());
e[1] = readLabel(IStringStream(line.substr(32,8))());
// discard groupID
dynEdges.append(e);
}
else if (cmd == "GRID")
{
label index = readLabel(IStringStream(line.substr(8,8))());
scalar x = parseNASCoord(line.substr(24, 8));
scalar y = parseNASCoord(line.substr(32, 8));
scalar z = parseNASCoord(line.substr(40, 8));
pointId.append(index);
dynPoints.append(point(x, y, z));
}
else if (cmd == "GRID*")
{
// Long format is on two lines with '*' continuation symbol
// on start of second line.
// Typical line (spaces compacted)
// GRID* 126 0 -5.55999875E+02 -5.68730474E+02
// * 2.14897901E+02
label index = readLabel(IStringStream(line.substr(8,16))());
scalar x = parseNASCoord(line.substr(40, 16));
scalar y = parseNASCoord(line.substr(56, 16));
is.getLine(line);
if (line[0] != '*')
{
FatalErrorInFunction
<< "Expected continuation symbol '*' when reading GRID*"
<< " (double precision coordinate) format" << nl
<< "Read:" << line << nl
<< "File:" << is.name() << " line:" << is.lineNumber()
<< exit(FatalError);
}
scalar z = parseNASCoord(line.substr(8, 16));
pointId.append(index);
dynPoints.append(point(x, y, z));
}
}
// transfer to normal lists
storedPoints().transfer(dynPoints);
pointId.shrink();
dynEdges.shrink();
// Build inverse mapping (NASTRAN pointId -> index)
Map<label> mapPointId(2*pointId.size());
forAll(pointId, i)
{
mapPointId.insert(pointId[i], i);
}
// note which points were really used and which can be culled
PackedBoolList usedPoints(points().size());
// Pass1: relabel edges
// ~~~~~~~~~~~~~~~~~~~~
forAll(dynEdges, i)
{
edge& e = dynEdges[i];
e[0] = mapPointId[e[0]];
e[1] = mapPointId[e[1]];
usedPoints.set(e[0]);
usedPoints.set(e[1]);
}
pointId.clearStorage();
mapPointId.clear();
// not all the points were used, cull them accordingly
if (unsigned(points().size()) != usedPoints.count())
{
label nUsed = 0;
pointField& pts = storedPoints();
forAll(pts, pointI)
{
if (usedPoints.get(pointI))
{
if (nUsed != pointI)
{
pts[nUsed] = pts[pointI];
}
// map prev -> new id
mapPointId[pointI] = nUsed;
++nUsed;
}
}
pts.setSize(nUsed);
// renumber edge vertices
forAll(dynEdges, edgeI)
{
edge& e = dynEdges[edgeI];
e[0] = mapPointId[e[0]];
e[1] = mapPointId[e[1]];
}
}
// transfer to normal lists
storedEdges().transfer(dynEdges);
return true;
}
// ************************************************************************* //
|
bertux/driftctl | pkg/resource/aws/aws_sqs_queue_policy_test.go | package aws_test
import (
"testing"
"time"
"github.com/aws/aws-sdk-go/service/sqs"
"github.com/cloudskiff/driftctl/test/acceptance/awsutils"
"github.com/sirupsen/logrus"
"github.com/cloudskiff/driftctl/test/acceptance"
)
func TestAcc_AwsSqsQueuePolicy(t *testing.T) {
acceptance.Run(t, acceptance.AccTestCase{
Path: "./testdata/acc/aws_sqs_queue_policy",
Args: []string{"scan", "--filter", "Type=='aws_sqs_queue_policy'"},
Checks: []acceptance.AccCheck{
{
Env: map[string]string{
"AWS_REGION": "us-east-1",
},
PreExec: func() {
err := acceptance.RetryFor(60*time.Second, func(doneCh chan struct{}) error {
return sqs.New(awsutils.Session()).ListQueuesPages(&sqs.ListQueuesInput{},
func(resp *sqs.ListQueuesOutput, lastPage bool) bool {
logrus.Debugf("Retrieved %d SQS queues", len(resp.QueueUrls))
if len(resp.QueueUrls) == 3 {
doneCh <- struct{}{}
}
return !lastPage
},
)
})
if err != nil {
t.Fatal("Timeout while fetching SQS queues")
}
},
Check: func(result *acceptance.ScanResult, stdout string, err error) {
if err != nil {
t.Fatal(err)
}
result.AssertInfrastructureIsInSync()
result.Equal(2, result.Summary().TotalManaged)
},
},
},
})
}
|
sanksons/reflorest | src/common/logger/writers/stdoutwriter/impl.go | <filename>src/common/logger/writers/stdoutwriter/impl.go
package stdoutwriter
import (
"fmt"
"github.com/sanksons/reflorest/src/common/logger/formatter"
"github.com/sanksons/reflorest/src/common/logger/message"
)
//FileWriter is a file logger structure
type StdoutWriter struct {
// formatter
myFormat formatter.FormatInterface
}
//GetNewObj returns a file logger with log file name fname, having configuration
//specified in conf and allowedLogLevel specifies the log level that are actually to
//be logged
func GetNewObj() *StdoutWriter {
obj := new(StdoutWriter)
return obj
}
// Write write message to file
func (fw *StdoutWriter) Write(msg *message.LogMsg) {
str, _ := fw.myFormat.GetFormattedLog(msg).(string)
fmt.Printf("%s\n", str)
}
// SetFormatter get formatted object
func (fw *StdoutWriter) SetFormatter(format formatter.FormatInterface) {
fw.myFormat = format
}
|
jmasterx/StemwaterSpades | Spades Game/Game/Particle/ParticleSystem.hpp | #ifndef PARTICLE_SYSTEM_HPP
#define PARTICLE_SYSTEM_HPP
#include "Game/Particle/Particle.hpp"
#include "Game/Utility/Vec2.hpp"
#include "Game/Resource/Sprite.hpp"
#include "Game/Engine/GraphicsContext.hpp"
#include <Agui/Agui.hpp>
#include <stdlib.h>
#include <list>
#include <vector>
namespace cge
{
class ParticleSystem
{
std::list<Particle> m_particles;
Vec2 m_gravity;
Vec2 m_worldPos;
std::vector<Vec2> m_environmentalForces;
int m_maxLife;
bool m_regenerate;
Vec2 m_imgSize;
Sprite* m_image;
protected:
std::list<Particle>& getParticles();
public:
ParticleSystem(void);
virtual Particle generateParticle(Particle* currentGen = NULL);
Vec2 calcEnvironmentVector() const;
void addEnvironmentalForce(const Vec2& force);
void clearEnvironmentalForces();
void setForceOfGravity(const Vec2& gravity);
const Vec2& getForceOfGravity() const;
void setWorldPosition(const Vec2& worldPos);
const Vec2& getWorldPosition() const;
virtual void render(GraphicsContext& g);
void setParticleMaxLife(int frames);
int getParticleMaxLife() const;
int getParticleCount() const;
void setRegenerate(bool regenerate);
bool isRegenerating() const;
void setImage(Sprite* sprite);
void setImageRenderSize(const Vec2& size);
const Vec2& getImageRenderSize() const;
Sprite* getImage() const;
virtual ~ParticleSystem(void);
};
}
#endif |
jasonlong/classroom | spec/models/group_assignment_invitation_spec.rb | <filename>spec/models/group_assignment_invitation_spec.rb
require 'rails_helper'
RSpec.describe GroupAssignmentInvitation, type: :model do
it { is_expected.to have_one(:grouping).through(:group_assignment) }
it { is_expected.to have_one(:organization).through(:group_assignment) }
it { is_expected.to have_many(:groups).through(:grouping) }
it { is_expected.to belong_to(:group_assignment) }
it_behaves_like 'a default scope where deleted_at is not present'
describe 'validations and uniqueness' do
subject { GroupAssignmentInvitation.new }
it { is_expected.to validate_presence_of(:group_assignment) }
it { is_expected.to validate_presence_of(:key) }
it { is_expected.to validate_uniqueness_of(:key) }
end
it 'should have a key after initialization' do
group_assignment_invitation = GroupAssignmentInvitation.new
expect(group_assignment_invitation.key).to_not be_nil
end
describe '#redeem_for', :vcr do
let(:invitee) { GitHubFactory.create_classroom_student }
let(:organization) { GitHubFactory.create_owner_classroom_org }
let(:grouping) { Grouping.create(title: 'Grouping', organization: organization) }
let(:group_assignment) do
GroupAssignment.create(creator: organization.users.first,
title: 'JavaScript',
organization: organization,
public_repo: false,
grouping: grouping)
end
let(:group_assignment_invitation) { GroupAssignmentInvitation.create(group_assignment: group_assignment) }
after(:each) do
RepoAccess.destroy_all
Group.destroy_all
GroupAssignmentRepo.destroy_all
end
it 'returns the full repo name of the users GitHub repository' do
full_repo_name = group_assignment_invitation.redeem_for(invitee, nil, 'Code Squad')
expect(full_repo_name).to eql("#{organization.title}/#{group_assignment.title}-Code-Squad")
end
end
describe '#to_param' do
let(:group_assignment_invitation) { create(:group_assignment_invitation) }
it 'should return the key' do
expect(group_assignment_invitation.to_param).to eql(group_assignment_invitation.key)
end
end
end
|
naeramarth7/joynr | java/messaging/bounceproxy/bounceproxy-controller-persistence/ehcache/src/main/java/io/joynr/messaging/bounceproxy/controller/directory/ehcache/BounceProxyEhcacheAdapter.java | /*
* #%L
* %%
* Copyright (C) 2011 - 2017 BMW Car IT GmbH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.joynr.messaging.bounceproxy.controller.directory.ehcache;
import io.joynr.messaging.bounceproxy.controller.directory.BounceProxyDirectory;
import io.joynr.messaging.bounceproxy.controller.directory.BounceProxyRecord;
import io.joynr.messaging.info.BounceProxyInformation;
import io.joynr.messaging.info.BounceProxyStatusInformation;
import io.joynr.messaging.info.ControlledBounceProxyInformation;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import net.sf.ehcache.Cache;
import net.sf.ehcache.CacheManager;
import net.sf.ehcache.Element;
import net.sf.ehcache.distribution.CacheManagerPeerProvider;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.inject.Inject;
import com.google.inject.name.Named;
/**
* {@link BounceProxyDirectory} implementation which uses Ehcache to store
* bounce proxy information.
*
* @author christina.strobel
*
*/
public class BounceProxyEhcacheAdapter implements BounceProxyDirectory {
private static final Logger log = LoggerFactory.getLogger(BounceProxyEhcacheAdapter.class);
public final static String PROPERTY_BP_CACHE_NAME = "joynr.bounceproxy.controller.bp_cache_name";
public final static String PROPERTY_BP_CACHE_CONFIGURATION = "joynr.bounceproxy.controller.bp_cache_config_file";
private final CacheManager manager;
private final String cacheName;
@Inject
public BounceProxyEhcacheAdapter(@Named(PROPERTY_BP_CACHE_NAME) String cacheName, CacheManager cacheManager) {
this.cacheName = cacheName;
this.manager = cacheManager;
}
@Override
public List<BounceProxyRecord> getAssignableBounceProxies() {
if (log.isTraceEnabled()) {
log.trace("Retrieving assignable bounce proxies from cache {}", cacheName);
tracePeers();
}
List<BounceProxyRecord> result = new LinkedList<BounceProxyRecord>();
Cache cache = manager.getCache(cacheName);
@SuppressWarnings("rawtypes")
List keys = cache.getKeys();
Map<Object, Element> elements = cache.getAll(keys);
for (Element element : elements.values()) {
BounceProxyRecord bounceProxyRecord = getBounceProxyRecordFromElement(element);
if (bounceProxyRecord.getStatus().isAssignable()) {
result.add(bounceProxyRecord);
}
}
return result;
}
@Override
public void updateChannelAssignment(String ccid, BounceProxyInformation bpInfo) throws IllegalArgumentException {
if (log.isTraceEnabled()) {
log.trace("Update channel assignment for bounce proxy {} in cache {}", bpInfo.getId(), cacheName);
tracePeers();
}
Cache cache = manager.getCache(cacheName);
Element element = cache.get(bpInfo.getId());
if (element == null) {
throw new IllegalArgumentException("No bounce proxy with ID '" + bpInfo.getId() + "' exists");
}
BounceProxyRecord bpRecord = getBounceProxyRecordFromElement(element);
bpRecord.addAssignedChannel(ccid);
Element updatedElement = new Element(bpInfo.getId(), bpRecord);
cache.put(updatedElement);
}
@Override
public BounceProxyRecord getBounceProxy(String bpId) throws IllegalArgumentException {
if (log.isTraceEnabled()) {
log.trace("Retrieving bounce proxy {} from cache {}", bpId, cacheName);
tracePeers();
}
Cache cache = manager.getCache(cacheName);
Element element = cache.get(bpId);
if (element == null) {
throw new IllegalArgumentException("No bounce proxy with ID '" + bpId + "' exists");
}
return getBounceProxyRecordFromElement(element);
}
protected BounceProxyRecord getBounceProxyRecordFromElement(Element element) {
BounceProxyRecord bpRecord = (BounceProxyRecord) element.getObjectValue();
bpRecord.setFreshness(element.getLatestOfCreationAndUpdateTime());
return bpRecord;
}
@Override
public boolean containsBounceProxy(String bpId) {
if (log.isTraceEnabled()) {
log.trace("containsBounceProxy {} in cache {}", bpId, cacheName);
tracePeers();
}
Cache cache = manager.getCache(cacheName);
return cache.get(bpId) != null;
}
@Override
public void addBounceProxy(ControlledBounceProxyInformation bpInfo) throws IllegalArgumentException {
if (log.isTraceEnabled()) {
log.trace("addBounceProxy {} to cache {}", bpInfo.getId(), cacheName);
tracePeers();
}
Cache cache = manager.getCache(cacheName);
Element element = new Element(bpInfo.getId(), new BounceProxyRecord(bpInfo));
cache.put(element);
}
@Override
public void updateBounceProxy(BounceProxyRecord bpRecord) throws IllegalArgumentException {
if (log.isTraceEnabled()) {
log.trace("updateBounceProxy {} in cache {}", bpRecord.getBounceProxyId(), cacheName);
tracePeers();
}
Cache cache = manager.getCache(cacheName);
Element element = new Element(bpRecord.getBounceProxyId(), bpRecord);
cache.put(element);
}
@Override
public List<BounceProxyStatusInformation> getBounceProxyStatusInformation() {
if (log.isTraceEnabled()) {
log.trace("getBounceProxyStatusInformation from cache {}", cacheName);
tracePeers();
}
List<BounceProxyStatusInformation> result = new LinkedList<BounceProxyStatusInformation>();
Cache cache = manager.getCache(cacheName);
@SuppressWarnings("rawtypes")
List keys = cache.getKeys();
Map<Object, Element> elements = cache.getAll(keys);
for (Element element : elements.values()) {
result.add(getBounceProxyRecordFromElement(element));
}
return result;
}
private void tracePeers() {
CacheManagerPeerProvider peerProvider = manager.getCacheManagerPeerProvider("RMI");
int peers = peerProvider.listRemoteCachePeers(manager.getEhcache(cacheName)).size();
log.trace("Found {} remote cache peer(s)", peers);
}
}
|
tharindusathis/sourcecodes-of-CodeReadingTheOpenSourcePerspective | XFree86-3.3/xc/programs/Xserver/hw/xfree86/accel/agx/Bt481.h | /* $XFree86: xc/programs/Xserver/hw/xfree86/accel/agx/Bt481.h,v 3.4 1996/12/23 06:32:19 dawes Exp $ */
/*
* Copyright 1993 by <NAME> <<EMAIL>>
* Copyright 1994 by <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, distribute, and sell this software and its
* documentation for any purpose is hereby granted without fee, provided that
* the above copyright notice appear in all copies and that both that
* copyright notice and this permission notice appear in supporting
* documentation, and that the name of David Wexelblat not be used in
* advertising or publicity pertaining to distribution of the software without
* specific, written prior permission. David Wexelblat makes no representations
* about the suitability of this software for any purpose. It is provided
* "as is" without express or implied warranty.
*
* <NAME> AND <NAME> DISCLAIM ALL WARRANTIES WITH REGARD
* TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS, IN NO EVENT SHALL THE AUTORS BE LIABLE
* FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
* IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
* ARISING OUT OF OR IN CONNECTION WITH THE USE OR
* PERFORMANCE OF THIS SOFTWARE.
*
* Modified for the AGX and Bt481/2 by <NAME>
*
*/
/* $XConsortium: Bt481.h /main/4 1996/02/21 17:15:32 kaleb $ */
#ifndef XF86_BT481_H
#define XF86_BT481_H
#include "compiler.h"
#include <X11/Xfuncproto.h>
/* directly addressed registers */
#define BT481_WRITE_ADDR 0x00
#define BT481_RAMDAC_DATA 0x01
#define BT481_PIXEL_MASK 0x02
#define BT481_READ_ADDR 0x03
#define BT482_CURS_WR_ADDR 0x04
#define BT482_CURS_RAM_DATA 0x05
#define BT481_COMMAND_REG_A 0x06
#define BT482_CURS_RD_ADDR 0x07
/* indirectly addressed register addresses */
#define BT481_IND_PIXEL_MASK 0x00
#define BT481_OVERLAY_MASK 0x01
#define BT481_COMMAND_REG_B 0x02
#define BT482_CURSOR_REG 0x03
#define BT482_CURS_X_LOW 0x04
#define BT482_CURS_X_HIGH 0x05
#define BT482_CURS_Y_LOW 0x06
#define BT482_CURS_Y_HIGH 0x07
#define BT481_8BPP_PSUEDO_COLOR 0x00
#define BT481_15BPP_EDGE_TRIGGR 0x80
#define BT481_16BPP_EDGE_TRIGGR 0xC0
#define BT481_15BPP_LEVL_TRIGGR 0xA0
#define BT481_16BPP_LEVL_TRIGGR 0xE0
#define BT481_24BPP_EDGE_TRIGGR 0x90 /* RGBP unpacked 32BPP/8BPP-Psuedo */
#define BT481_24BPP_LEVL_TRIGGR 0xF0 /* RGB packed 24BPP */
_XFUNCPROTOBEGIN
extern void xf86OutBt481IndReg(
#if NeedFunctionPrototypes
unsigned char,
unsigned char,
unsigned char
#endif
);
extern unsigned char xf86InBt481IndReg(
#if NeedFunctionPrototypes
unsigned char
#endif
);
extern void xf86Bt481HWSave(
#if NeedFunctionPrototypes
union xf86RamDacSave *
#endif
);
extern void xf86Bt481HWRestore(
#if NeedFunctionPrototypes
union xf86RamDacSave *
#endif
);
extern void xf86Bt481Init(
#if NeedFunctionPrototypes
void
#endif
);
_XFUNCPROTOEND
#endif /* XF86_BT481_H */
|
agramonte/corona | librtt/Display/Rtt_PlatformBitmapTexture.h | <reponame>agramonte/corona<gh_stars>1000+
//////////////////////////////////////////////////////////////////////////////
//
// This file is part of the Corona game engine.
// For overview and more information on licensing please refer to README.md
// Home page: https://github.com/coronalabs/corona
// Contact: <EMAIL>
//
//////////////////////////////////////////////////////////////////////////////
#ifndef _Rtt_PlatformBitmapTexture_H__
#define _Rtt_PlatformBitmapTexture_H__
#include "Renderer/Rtt_Texture.h"
#include "Display/Rtt_PlatformBitmap.h"
// ----------------------------------------------------------------------------
namespace Rtt
{
class PlatformBitmap;
// ----------------------------------------------------------------------------
// Adapter for PlatformBitmap
class PlatformBitmapTexture : public Texture
{
public:
typedef Texture Super;
public:
static Texture::Format ConvertFormat( PlatformBitmap::Format format );
PlatformBitmapTexture( Rtt_Allocator *allocator, PlatformBitmap& bitmap );
public:
virtual void Allocate();
virtual void Deallocate();
virtual U32 GetWidth() const;
virtual U32 GetHeight() const;
virtual Format GetFormat() const;
virtual Filter GetFilter() const;
virtual Wrap GetWrapX() const;
virtual Wrap GetWrapY() const;
virtual size_t GetSizeInBytes() const;
virtual U8 GetByteAlignment() const;
virtual const U8* GetData() const;
virtual void ReleaseData();
public:
PlatformBitmap& GetBitmap() const { return fBitmap; }
protected:
PlatformBitmap& fBitmap;
};
// ----------------------------------------------------------------------------
} // namespace Rtt
// ----------------------------------------------------------------------------
#endif // _Rtt_PlatformBitmapTexture_H__
|
steva44/OpenSees | SRC/element/PFEMElement/TetMeshGenerator.h | <reponame>steva44/OpenSees<filename>SRC/element/PFEMElement/TetMeshGenerator.h
/* ****************************************************************** **
** OpenSees - Open System for Earthquake Engineering Simulation **
** Pacific Earthquake Engineering Research Center **
** **
** **
** (C) Copyright 1999, The Regents of the University of California **
** All Rights Reserved. **
** **
** Commercial use of this program without express permission of the **
** University of California, Berkeley, is strictly prohibited. See **
** file 'COPYRIGHT' in main directory for information on usage and **
** redistribution, and for a DISCLAIMER OF ALL WARRANTIES. **
** **
** Developed by: **
** <NAME> (<EMAIL>) **
** <NAME> (<EMAIL>) **
** <NAME> (<EMAIL>) **
** **
** ****************************************************************** */
// $Revision $
// $Date $
// Written: <NAME>
//
// Description: This file defines the class 'TetMeshGenerator', which
// is a c++ wrapper of 'TetGen' program.
#ifndef TetMeshGenerator_h
#define TetMeshGenerator_h
#include <tetgen.h>
#include <vector>
class TetMeshGenerator
{
public:
typedef std::vector<int> Polygon;
typedef std::vector<Polygon> Facet;
public:
TetMeshGenerator();
~TetMeshGenerator();
// mesh
int mesh(double vol, bool pointOnBoundary=true);
int remesh(double alpha);
// inputs
int addPoint(double x, double y, double z, int mark);
int addHole(double x, double y, double z);
int addFacet(const Facet& facet, int mark);
// outputs
int getNumPoints() const;
void getPoint(int i, double& x, double& y, double& z, int& mark);
int getNumTets() const;
void getTet(int i, int& p1, int& p2, int& p3, int&p4);
void getNeighbor(int i, int& t1, int& t2, int& t3, int& t4);
int getNumFaces() const;
void getTriFace(int i, int& p1, int& p2, int& p3, int& mark);
int getNumEdges() const;
void getEdge(int i, int& p1, int& p2, int& mark);
// clear
void clear();
private:
void reset();
tetgenio in, out;
std::vector<double> pointlist;
std::vector<int> pointmarkerlist;
std::vector<Facet> facetlist;
std::vector<int> facetmarkerlist;
std::vector<int> tetrahedronlist;
std::vector<int> neighborlist;
std::vector<double> holelist;
std::vector<int> trifacelist;
std::vector<int> trifacemarkerlist;
std::vector<int> edgelist;
std::vector<int> edgemarkerlist;
int numberofcorners;
};
#endif
|
shineTeam7/tank | develop/server/project/base/src/main/java/com/home/base/constlist/generate/GTriggerFunctionType.java | package com.home.base.constlist.generate;
/** (generated by shine) */
public class GTriggerFunctionType
{
/** 起始 */
public static final int off=1000;
/** 计数 */
public static final int count=1001;
public static final int GTestFunc=1000;
}
|
rokkish/growi | packages/app/src/migrations/20191126173016-adjust-pages-path.js | <reponame>rokkish/growi
import mongoose from 'mongoose';
import { pathUtils, getMongoUri, mongoOptions } from '@growi/core';
import loggerFactory from '~/utils/logger';
const logger = loggerFactory('growi:migrate:adjust-pages-path');
module.exports = {
async up(db) {
logger.info('Apply migration');
mongoose.connect(getMongoUri(), mongoOptions);
const Page = require('~/server/models/page')();
// retrieve target data
const pages = await Page.find({ path: /^(?!\/)/ });
// create requests for bulkWrite
const requests = pages.map((page) => {
const adjustedPath = pathUtils.addHeadingSlash(page.path);
return {
updateOne: {
filter: { _id: page._id },
update: { $set: { path: adjustedPath } },
},
};
});
if (requests.length > 0) {
await db.collection('pages').bulkWrite(requests);
}
logger.info('Migration has successfully applied');
},
down(db) {
// do not rollback
},
};
|
wiltonlazary/snappydata | cluster/src/test/scala/org/apache/spark/sql/kafka010/SnappyStructuredKafkaSuite.scala | <gh_stars>1000+
/*
* Copyright (c) 2017-2019 TIBCO Software Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package org.apache.spark.sql.kafka010
import java.util.concurrent.atomic.AtomicInteger
import io.snappydata.{Property, SnappyFunSuite}
import org.apache.kafka.common.TopicPartition
import org.scalatest.concurrent.Eventually
import org.scalatest.{BeforeAndAfter, BeforeAndAfterAll}
import org.apache.spark.SparkConf
import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.functions.{count, window}
import org.apache.spark.sql.streaming.ProcessingTime
case class Account(accountName: String)
class SnappyStructuredKafkaSuite extends SnappyFunSuite with Eventually
with BeforeAndAfter with BeforeAndAfterAll {
private lazy val session = snc.sparkSession
private var kafkaTestUtils: KafkaTestUtils = _
protected override def newSparkConf(addOn: (SparkConf) => SparkConf): SparkConf = {
super.newSparkConf((conf: SparkConf) => {
// conf.set(Property.TestDisableCodeGenFlag.name , "false")
conf
})
}
override def beforeAll() {
super.beforeAll()
kafkaTestUtils = new KafkaTestUtils
kafkaTestUtils.setup()
}
override def afterAll() {
super.afterAll()
if (kafkaTestUtils != null) {
kafkaTestUtils.teardown()
kafkaTestUtils = null
}
}
def framework: String = this.getClass.getSimpleName
private val topicId = new AtomicInteger(0)
private def newTopic(): String = s"topic-${topicId.getAndIncrement()}"
test("SnappyData Structured Streaming with Kafka") {
import session.implicits._
snc.sql("drop table if exists users")
snc.sql("create table users (id int, name string) using column options(key_columns 'id')")
val topic = newTopic()
kafkaTestUtils.createTopic(topic, partitions = 3)
kafkaTestUtils.sendMessages(topic,
(100 to 200).map(i => i.toString + ",name_" + i).toArray, Some(0))
kafkaTestUtils.sendMessages(topic,
(10 to 20).map(i => i.toString + ",name_" + i).toArray, Some(1))
kafkaTestUtils.sendMessages(topic, Array("1,name_1"), Some(2))
val streamingDF = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("subscribe", topic)
.option("startingOffsets", "earliest")
.load
implicit val encoder = RowEncoder(snc.table("users").schema)
val streamingQuery = streamingDF
.selectExpr("CAST(value AS STRING)")
.as[String]
.map(_.split(","))
.map(r => {
Row(r(0).toInt, r(1))
})
.writeStream
.format("snappysink")
.queryName("simple")
.outputMode("append")
.trigger(ProcessingTime("1 seconds"))
.option("tablename", "APP.USERS").option("streamqueryid", "abc")
.option("checkpointLocation", "/tmp/snappyTable")
.start
streamingQuery.processAllAvailable()
assert(113 == session.sql("select * from APP.USERS").count)
}
test("ETL Job") {
import session.implicits._
val topic = newTopic()
kafkaTestUtils.createTopic(topic, partitions = 3)
val partitions = Map(
new TopicPartition(topic, 0) -> 0L,
new TopicPartition(topic, 1) -> 0L,
new TopicPartition(topic, 2) -> 0L
)
val startingOffsets = JsonUtils.partitionOffsets(partitions)
val streamingDF = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("kafka.metadata.max.age.ms", "1")
.option("maxOffsetsPerTrigger", 10)
.option("subscribe", topic)
.option("startingOffsets", startingOffsets)
.load
val streamingQuery = streamingDF
.selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)")
.as[(String, String)]
.writeStream
.format("memory")
// .option("checkpointLocation", "/tmp/etl")
.queryName("snappyTable")
.outputMode("append")
.trigger(ProcessingTime("1 seconds"))
.start
kafkaTestUtils.sendMessages(topic, (100 to 200).map(_.toString).toArray, Some(0))
kafkaTestUtils.sendMessages(topic, (10 to 20).map(_.toString).toArray, Some(1))
kafkaTestUtils.sendMessages(topic, Array("1"), Some(2))
streamingQuery.processAllAvailable()
assert(113 == session.sql("select * from snappyTable").count)
}
test("infinite streaming aggregation") {
import session.implicits._
val topic = newTopic()
kafkaTestUtils.createTopic(topic, partitions = 3)
val partitions = Map(
new TopicPartition(topic, 0) -> 0L,
new TopicPartition(topic, 1) -> 0L,
new TopicPartition(topic, 2) -> 0L
)
val startingOffsets = JsonUtils.partitionOffsets(partitions)
val streamingDF = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("kafka.metadata.max.age.ms", "1")
.option("maxOffsetsPerTrigger", 10)
.option("subscribe", topic)
.option("startingOffsets", startingOffsets)
.option("failOnDataLoss", "false")
.load
val streamingQuery = streamingDF
.selectExpr("CAST(key AS STRING)", "CAST(value AS STRING)").groupBy("value").count()
.as[(String, String)]
.writeStream
.format("memory")
.option("checkpointLocation", "/tmp/infinite-" + System.currentTimeMillis())
.queryName("snappyAggrTable")
.outputMode("complete")
.trigger(ProcessingTime("1 seconds"))
.start
kafkaTestUtils.sendMessages(topic, (100 to 150).map(_.toString).toArray, Some(0))
kafkaTestUtils.sendMessages(topic, (125 to 150).map(_.toString).toArray, Some(1))
kafkaTestUtils.sendMessages(topic, (100 to 124).map(_.toString).toArray, Some(2))
streamingQuery.processAllAvailable()
assert(51 == session.sql("select * from snappyAggrTable").count)
assert(2.0 == session.sql("select avg(count) from snappyAggrTable").collect()(0).getDouble(0))
}
test("sliding window aggregation") {
import session.implicits._
val topic = newTopic()
kafkaTestUtils.createTopic(topic, partitions = 3)
val partitions = Map(
new TopicPartition(topic, 0) -> 0L,
new TopicPartition(topic, 1) -> 0L,
new TopicPartition(topic, 2) -> 0L
)
val startingOffsets = JsonUtils.partitionOffsets(partitions)
kafkaTestUtils.sendMessages(topic, (100 to 150).map(_.toString).toArray, Some(0))
kafkaTestUtils.sendMessages(topic, (125 to 150).map(_.toString).toArray, Some(1))
kafkaTestUtils.sendMessages(topic, (100 to 124).map(_.toString).toArray, Some(2))
val streamingDF = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("kafka.metadata.max.age.ms", "1")
.option("maxOffsetsPerTrigger", 10)
.option("subscribe", topic)
.option("startingOffsets", startingOffsets)
.option("failOnDataLoss", "false")
.load
val windowedAggregation = streamingDF
.groupBy(window($"timestamp", "1 seconds") as 'window)
.agg(count("*") as 'count)
.select($"window".getField("start") as 'window, $"count")
val streamingQuery = windowedAggregation
.writeStream
.format("memory")
.option("checkpointLocation", "/tmp/snappyWindowAggrTable")
.outputMode("complete")
.queryName("snappyWindowAggrTable")
.start()
streamingQuery.processAllAvailable()
logInfo(session.sql("select * from snappyWindowAggrTable").limit(200).collect().mkString("\n"))
streamingQuery.stop()
}
test("streaming join to snappy table") {
import session.implicits._
val rdd = snc.sparkContext.parallelize((15 to 25).map(i => Account(i.toString)))
val dfBlackList = snc.createDataFrame(rdd)
// create a SnappyData table
snc.createTable("blacklist", "row", dfBlackList.schema, Map.empty[String, String])
import org.apache.spark.sql.snappy._
dfBlackList.write.putInto("blacklist") // populate the table 'blacklist'.
val topic = newTopic()
kafkaTestUtils.createTopic(topic, partitions = 3)
// Read the accounts from Kafka source
val acctStreamingDF = session
.readStream
.format("kafka")
.option("kafka.bootstrap.servers", kafkaTestUtils.brokerAddress)
.option("subscribe", topic)
.option("startingOffsets", "earliest").load
.selectExpr("CAST(value AS STRING) accountName").as[(String)]
val streamingQuery = acctStreamingDF.join(session.table("blacklist"), "accountName")
.writeStream
.outputMode("append")
.format("memory")
.queryName("snappyResultTable")
.trigger(ProcessingTime("1 seconds"))
.start
kafkaTestUtils.sendMessages(topic, (10 to 18).map(_.toString).toArray, Some(1))
kafkaTestUtils.sendMessages(topic, (20 to 30).map(_.toString).toArray, Some(2))
streamingQuery.processAllAvailable()
assert(10 == session.sql("select * from snappyResultTable").count)
}
// Unsupported operations with streaming DataFrames/Datasets -
// Multiple streaming aggregations (i.e. a chain of aggregations on a
// streaming DF) are not yet supported on streaming Datasets.
// Limit and take first N rows are not supported on streaming Datasets.
// Distinct operations on streaming Datasets are not supported.
// Sorting operations are supported on streaming Datasets only after
// an aggregation and in Complete Output Mode.
// Outer joins between a streaming and a static Datasets are conditionally supported.
// Full outer join with a streaming Dataset is not supported
// Left outer join with a streaming Dataset on the right is not supported
// Right outer join with a streaming Dataset on the left is not supported
// Any kind of joins between two streaming Datasets are not yet supported.
// They are actions that will immediately run queries and return results,
// which does not make sense on a streaming Dataset.
// count() - Cannot return a single count from a streaming Dataset.
// Instead, use ds.groupBy.count() which returns a streaming Dataset containing a running count.
// foreach() - Instead use ds.writeStream.foreach(...).
// show() - Instead use the console sink.
// sorting on the input stream is not supported, as it requires keeping
// track of all the data received in the stream.
// This is therefore fundamentally hard to execute efficiently.
}
|
zwx14700/pravega | client/src/test/java/io/pravega/client/stream/notifications/CustomNotifier.java | <gh_stars>1-10
/**
* Copyright (c) Dell Inc., or its subsidiaries. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
package io.pravega.client.stream.notifications;
import java.util.concurrent.ScheduledExecutorService;
import io.pravega.client.stream.notifications.notifier.AbstractNotifier;
public class CustomNotifier extends AbstractNotifier<CustomNotification> {
public CustomNotifier(final NotificationSystem system, final ScheduledExecutorService executor) {
super(system, executor);
}
@Override
public String getType() {
return CustomNotification.class.getSimpleName();
}
}
|
ppartarr/azure-sdk-for-java | sdk/resourcemanager/azure-resourcemanager-appplatform/src/main/java/com/azure/resourcemanager/appplatform/models/TestKeys.java | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.appplatform.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
/** The TestKeys model. */
@Fluent
public final class TestKeys {
@JsonIgnore private final ClientLogger logger = new ClientLogger(TestKeys.class);
/*
* Primary key
*/
@JsonProperty(value = "primaryKey")
private String primaryKey;
/*
* Secondary key
*/
@JsonProperty(value = "secondaryKey")
private String secondaryKey;
/*
* Primary test endpoint
*/
@JsonProperty(value = "primaryTestEndpoint")
private String primaryTestEndpoint;
/*
* Secondary test endpoint
*/
@JsonProperty(value = "secondaryTestEndpoint")
private String secondaryTestEndpoint;
/*
* Indicates whether the test endpoint feature enabled or not
*/
@JsonProperty(value = "enabled")
private Boolean enabled;
/**
* Get the primaryKey property: Primary key.
*
* @return the primaryKey value.
*/
public String primaryKey() {
return this.primaryKey;
}
/**
* Set the primaryKey property: Primary key.
*
* @param primaryKey the primaryKey value to set.
* @return the TestKeys object itself.
*/
public TestKeys withPrimaryKey(String primaryKey) {
this.primaryKey = primaryKey;
return this;
}
/**
* Get the secondaryKey property: Secondary key.
*
* @return the secondaryKey value.
*/
public String secondaryKey() {
return this.secondaryKey;
}
/**
* Set the secondaryKey property: Secondary key.
*
* @param secondaryKey the secondaryKey value to set.
* @return the TestKeys object itself.
*/
public TestKeys withSecondaryKey(String secondaryKey) {
this.secondaryKey = secondaryKey;
return this;
}
/**
* Get the primaryTestEndpoint property: Primary test endpoint.
*
* @return the primaryTestEndpoint value.
*/
public String primaryTestEndpoint() {
return this.primaryTestEndpoint;
}
/**
* Set the primaryTestEndpoint property: Primary test endpoint.
*
* @param primaryTestEndpoint the primaryTestEndpoint value to set.
* @return the TestKeys object itself.
*/
public TestKeys withPrimaryTestEndpoint(String primaryTestEndpoint) {
this.primaryTestEndpoint = primaryTestEndpoint;
return this;
}
/**
* Get the secondaryTestEndpoint property: Secondary test endpoint.
*
* @return the secondaryTestEndpoint value.
*/
public String secondaryTestEndpoint() {
return this.secondaryTestEndpoint;
}
/**
* Set the secondaryTestEndpoint property: Secondary test endpoint.
*
* @param secondaryTestEndpoint the secondaryTestEndpoint value to set.
* @return the TestKeys object itself.
*/
public TestKeys withSecondaryTestEndpoint(String secondaryTestEndpoint) {
this.secondaryTestEndpoint = secondaryTestEndpoint;
return this;
}
/**
* Get the enabled property: Indicates whether the test endpoint feature enabled or not.
*
* @return the enabled value.
*/
public Boolean enabled() {
return this.enabled;
}
/**
* Set the enabled property: Indicates whether the test endpoint feature enabled or not.
*
* @param enabled the enabled value to set.
* @return the TestKeys object itself.
*/
public TestKeys withEnabled(Boolean enabled) {
this.enabled = enabled;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
}
}
|
InCadence/coalesce | src/Coalesce.Services/Search/service-data/src/test/java/com/incadencecorp/coalesce/services/search/service/rest/TemplateDataControllerTest.java | <filename>src/Coalesce.Services/Search/service-data/src/test/java/com/incadencecorp/coalesce/services/search/service/rest/TemplateDataControllerTest.java<gh_stars>1-10
/*-----------------------------------------------------------------------------'
Copyright 2017 - InCadence Strategic Solutions Inc., All Rights Reserved
Notwithstanding any contractor copyright notice, the Government has Unlimited
Rights in this work as defined by DFARS 252.227-7013 and 252.227-7014. Use
of this work other than as specifically authorized by these DFARS Clauses may
violate Government rights in this work.
DFARS Clause reference: 252.227-7013 (a)(16) and 252.227-7014 (a)(16)
Unlimited Rights. The Government has the right to use, modify, reproduce,
perform, display, release or disclose this computer software and to have or
authorize others to do so.
Distribution Statement D. Distribution authorized to the Department of
Defense and U.S. DoD contractors only in support of U.S. DoD efforts.
-----------------------------------------------------------------------------*/
package com.incadencecorp.coalesce.services.search.service.rest;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.incadencecorp.coalesce.api.Views;
import com.incadencecorp.coalesce.framework.CoalesceFramework;
import com.incadencecorp.coalesce.framework.datamodel.*;
import com.incadencecorp.coalesce.framework.persistance.derby.DerbyPersistor;
import com.incadencecorp.coalesce.services.search.service.data.controllers.TemplateDataController;
import com.incadencecorp.coalesce.services.search.service.data.model.CoalesceObjectImpl;
import com.incadencecorp.coalesce.services.search.service.data.model.FieldData;
import org.junit.Assert;
import org.junit.Test;
import java.rmi.RemoteException;
import java.util.List;
import java.util.UUID;
public class TemplateDataControllerTest {
@Test
public void testSavingTemplate() throws Exception
{
TemplateDataController controller = createController();
Assert.assertEquals(1, controller.getEntityTemplateMetadata().size());
String key = controller.getEntityTemplateMetadata().get(0).getKey();
CoalesceEntity template1 = controller.getTemplate(key);
TestEntity entity = new TestEntity();
entity.initialize(template1);
entity.setName("HelloWorld");
CoalesceEntityTemplate template2 = CoalesceEntityTemplate.create(entity);
Assert.assertNotEquals(template2.getKey(), entity.getKey());
Assert.assertEquals(template2.getKey(), controller.setTemplate(template2.getKey(), entity));
Assert.assertEquals(template2.getKey(), controller.setTemplate("new", entity));
Assert.assertEquals(2, controller.getEntityTemplateMetadata().size());
Assert.assertEquals(2, controller.getEntityTemplateMetadata().size());
Assert.assertEquals(template1.getKey(), controller.getTemplate(template1.getKey()).getKey());
Assert.assertEquals(template2.getKey(), controller.getTemplate(template2.getKey()).getKey());
}
// @Test
// public void testInvalidCases() throws Exception
// {
// TemplateDataController controller = createController();
//
// Assert.assertEquals(1, controller.getEntityTemplateMetadata().size());
// String key = controller.getEntityTemplateMetadata().get(0).getKey();
//
// String randomKey = UUID.randomUUID().toString();
//
// // Test Invalid Keys
// Assert.assertNull(controller.getTemplate(randomKey));
// Assert.assertEquals(0, controller.getRecordSets(randomKey).size());
// Assert.assertEquals(0, controller.getRecordSetFields(randomKey,
// randomKey).size());
// Assert.assertEquals(0, controller.getRecordSetFields(key,
// randomKey).size());
// Assert.assertEquals(false, controller.setTemplate(null));
// }
@Test(expected = RemoteException.class)
public void testInValidTemplate() throws Exception
{
TemplateDataController controller = createController();
controller.getTemplate(UUID.randomUUID().toString());
}
@Test(expected = RemoteException.class)
public void testInValidNullTemplate() throws Exception
{
TemplateDataController controller = createController();
controller.getTemplate(null);
}
@Test(expected = RemoteException.class)
public void testInValidTemplateRecordset() throws Exception
{
TemplateDataController controller = createController();
controller.getRecordSets(UUID.randomUUID().toString());
}
@Test(expected = RemoteException.class)
public void testInValidTemplateRecordsetAndFields() throws Exception
{
TemplateDataController controller = createController();
String randomKey = UUID.randomUUID().toString();
Assert.assertEquals(0, controller.getRecordSetFields(randomKey, randomKey).size());
}
@Test(expected = RemoteException.class)
public void testInValidTemplateFields() throws Exception
{
TemplateDataController controller = createController();
Assert.assertEquals(1, controller.getEntityTemplateMetadata().size());
String key = controller.getEntityTemplateMetadata().get(0).getKey();
controller.getRecordSetFields(key, UUID.randomUUID().toString());
}
@Test
public void testRecordsets() throws Exception
{
TemplateDataController controller = createController();
CoalesceEntity entity = CoalesceEntity.create("template controller test", "unit test", "1");
entity.initialize();
CoalesceSection section = CoalesceSection.create(entity, "section");
TestRecord.createCoalesceRecordset(section, "rs-1").setMaxRecords(1);
TestRecord.createCoalesceRecordset(section, "rs-2");
CoalesceEntityTemplate template = CoalesceEntityTemplate.create(entity);
Assert.assertEquals(template.getKey(), controller.setTemplate(template.getKey(), template.createNewEntity()));
List<CoalesceObjectImpl> results = controller.getRecordSets(template.getKey());
// Verify Recordsets
Assert.assertEquals(4, results.size());
Assert.assertEquals(CoalesceEntity.class.getSimpleName(), results.get(0).getName());
Assert.assertEquals(CoalesceEntity.class.getSimpleName(), results.get(0).getKey());
Assert.assertEquals(CoalesceLinkage.class.getSimpleName(), results.get(1).getName());
Assert.assertEquals(CoalesceLinkage.class.getSimpleName(), results.get(1).getKey());
Assert.assertEquals("rs-1", results.get(2).getName());
Assert.assertEquals("rs-2", results.get(3).getName());
// Verify Fields
List<FieldData> fieldResults = controller.getRecordSetFields(template.getKey(), results.get(0).getKey());
Assert.assertEquals("objectkey", fieldResults.get(0).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_TITLE, fieldResults.get(1).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_NAME, fieldResults.get(2).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_SOURCE, fieldResults.get(3).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_DATECREATED, fieldResults.get(4).getName());
Assert.assertEquals("creator", fieldResults.get(5).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_LASTMODIFIED, fieldResults.get(6).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_MODIFIEDBY, fieldResults.get(7).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_STATUS, fieldResults.get(8).getName());
Assert.assertEquals(CoalesceEntity.ATTRIBUTE_ENTITYID, fieldResults.get(9).getName());
Assert.assertEquals(ECoalesceFieldDataTypes.GUID_TYPE, fieldResults.get(0).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.STRING_TYPE, fieldResults.get(1).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.STRING_TYPE, fieldResults.get(2).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.STRING_TYPE, fieldResults.get(3).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.DATE_TIME_TYPE, fieldResults.get(4).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.STRING_TYPE, fieldResults.get(5).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.DATE_TIME_TYPE, fieldResults.get(6).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.STRING_TYPE, fieldResults.get(7).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.ENUMERATION_TYPE, fieldResults.get(8).getDataType());
Assert.assertEquals(ECoalesceFieldDataTypes.STRING_TYPE, fieldResults.get(9).getDataType());
fieldResults = controller.getRecordSetFields(template.getKey(), results.get(1).getKey());
Assert.assertTrue(fieldResults.size() > 0);
controller.deleteTemplate(template.getKey());
}
/**
* This test ensures that the label and default value attributes are preserved when saving and loading a template through the controller.
*/
@Test
public void testLabelsAndDefaultValues() throws Exception
{
TemplateDataController controller = createController();
CoalesceEntity entity = CoalesceEntity.create("template controller test", "unit test", "1");
entity.initialize();
CoalesceSection section = CoalesceSection.create(entity, "section");
CoalesceRecordset recordset = CoalesceRecordset.create(section, "rs");
CoalesceFieldDefinition fd = recordset.createFieldDefinition("myField", ECoalesceFieldDataTypes.STRING_TYPE);
fd.setLabel("Hello");
fd.setDefaultValue("Hello");
ObjectMapper mapper = new ObjectMapper();
String json = mapper.writerWithView(Views.Template.class).writeValueAsString(entity);
controller.createTemplateJson(json);
CoalesceEntity template = controller.getTemplate(entity.getName(), entity.getSource(), entity.getVersion());
CoalesceRecordset templateRS = template.getCoalesceRecordsetForNamePath(recordset.getNamePath());
Assert.assertNotNull(templateRS);
CoalesceFieldDefinition templateFD = templateRS.getFieldDefinition(fd.getName());
Assert.assertEquals(fd.getLabel(), templateFD.getLabel());
Assert.assertEquals(fd.getDefaultValue(), templateFD.getDefaultValue());
controller.deleteTemplate(template.getKey());
}
private TemplateDataController createController() throws Exception
{
TestEntity entity = new TestEntity();
entity.initialize();
CoalesceEntityTemplate template = CoalesceEntityTemplate.create(entity);
CoalesceFramework framework = new CoalesceFramework();
framework.setAuthoritativePersistor(new DerbyPersistor());
framework.saveCoalesceEntityTemplate(template);
TemplateDataController controller = new TemplateDataController(framework);
Assert.assertNotNull(controller.getTemplate(template.getKey()));
return controller;
}
}
|
AY2122-CS2103-W17-1/tp | src/test/java/seedu/contax/testutil/TypicalTags.java | package seedu.contax.testutil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import seedu.contax.model.AddressBook;
import seedu.contax.model.tag.Tag;
public class TypicalTags {
public static final Tag CLIENTS = new TagBuilder().build();
public static final Tag FAMILY = new TagBuilder().withName("family").build();
public static final Tag COLLEAGUES = new TagBuilder().withName("colleagues").build();
public static final Tag NEIGHBOURS = new TagBuilder().withName("neighbours").build();
public static AddressBook getTagOnlyAddressBook() {
AddressBook ab = new AddressBook();
for (Tag tag : getTypicalTags()) {
ab.addTag(tag);
}
return ab;
}
public static List<Tag> getTypicalTags() {
return new ArrayList<>(Arrays.asList(CLIENTS, FAMILY, COLLEAGUES, NEIGHBOURS));
}
}
|
e-neko/react-redux-grid | src/reducers/components/plugins/pager.js | import { OrderedMap } from 'immutable';
import {
PAGE_LOCAL,
PAGE_REMOTE
} from '../../../constants/ActionTypes';
import handleActions from './../../../util/handleActions';
import {
pageLocal,
pageRemote
} from './../../actionHelpers/plugins/pager';
const initialState = new OrderedMap();
export default handleActions({
[PAGE_LOCAL]: pageLocal,
[PAGE_REMOTE]: pageRemote
}, initialState);
|
IronTooch-Forks/guacamole-website | doc/1.4.0/libguac/search/variables_2.js | var searchData=
[
['channels_667',['channels',['../structguac__audio__stream.html#af9d1ad90194e24c2967e2f9f18de0ad6',1,'guac_audio_stream']]],
['client_668',['client',['../structguac__audio__stream.html#a1771fa5ff88b8f5d4ca4cd5e77a1ffba',1,'guac_audio_stream::client()'],['../structguac__user.html#a5e296149a26932dfeb7d8b0bac933ecf',1,'guac_user::client()']]],
['clipboard_5fhandler_669',['clipboard_handler',['../structguac__user.html#a77f042802092c8bbf4407836f3185343',1,'guac_user']]],
['connected_5fusers_670',['connected_users',['../structguac__client.html#a73aeff08af6cf6cadd7abce74c7ff4b2',1,'guac_client']]],
['connection_5fid_671',['connection_id',['../structguac__client.html#ad38b9ded1f04bfe8eb235e551fc08094',1,'guac_client']]],
['context_672',['context',['../structguac__socket__ssl__data.html#a22b295ba9b80cc15a5dfa4c06aea541f',1,'guac_socket_ssl_data']]]
];
|
benrayfield/occamsworkspace | selfContained/data/code/immutable/util/BlobUtil.java | <filename>selfContained/data/code/immutable/util/BlobUtil.java
package immutable.util;
public class BlobUtil{
public static void arraycopy(Blob from, int fromIndex, float[] to, int toIndex, int len){
for(int i=0; i<len; i++) to[toIndex+i] = from.f(fromIndex+i);
}
public static void arraycopy(Blob from, int fromIndex, double[] to, int toIndex, int len){
for(int i=0; i<len; i++) to[toIndex+i] = from.d(fromIndex+i);
}
public static void arraycopy(Blob from, int fromIndex, int[] to, int toIndex, int len){
for(int i=0; i<len; i++) to[toIndex+i] = from.i(fromIndex+i);
}
public static void arraycopy(Blob from, int fromIndex, long[] to, int toIndex, int len){
for(int i=0; i<len; i++) to[toIndex+i] = from.j(fromIndex+i);
}
public static void arraycopy(Blob from, int fromIndex, byte[] to, int toIndex, int len){
for(int i=0; i<len; i++) to[toIndex+i] = from.b(fromIndex+i);
}
}
|
zhongwood/open-capacity-platform | business-center/user-center/src/main/java/com/open/capacity/user/service/SysMenuService.java | <gh_stars>10-100
package com.open.capacity.user.service;
import java.util.List;
import java.util.Set;
import com.open.capacity.model.system.SysMenu;
public interface SysMenuService {
/**
* 添加菜单
* @param menu
*/
void save(SysMenu menu);
/**
* 更新菜单
* @param menu
*/
void update(SysMenu menu);
/**
* 删除菜单
* @param id
*/
void delete(Long id);
/**
* 角色分配菜单
* @param roleId
* @param menuIds
*/
void setMenuToRole(Long roleId, Set<Long> menuIds);
/**
* 角色菜单列表
* @param roleIds
* @return
*/
List<SysMenu> findByRoles(Set<Long> roleIds);
/**
* 菜单列表
* @return
*/
List<SysMenu> findAll();
/**
* ID获取菜单
* @param id
* @return
*/
SysMenu findById(Long id);
/**
* 角色ID获取菜单
* @param roleId
* @return
*/
Set<Long> findMenuIdsByRoleId(Long roleId);
List<SysMenu> findOnes();
}
|
vinnyfs89/vota-cultura | webapp/src/modules/conta/store/actions.js | <filename>webapp/src/modules/conta/store/actions.js
import { remove, includes } from 'lodash';
import * as usuarioService from '../service/usuario';
import * as types from './types';
import { obterInformacoesJWT } from '../../shared/service/helpers/jwt';
/* eslint-disable import/prefer-default-export */
export const autenticarUsuario = async ({ commit, dispatch }, usuario) => {
commit(types.AUTENTICAR_USUARIO);
return usuarioService.login(usuario)
.then((response) => {
const { data } = response;
if (!!data && data.access_token) {
localStorage.setItem('token_usuario', data.access_token);
}
dispatch(
'app/setMensagemSucesso',
'Login efetuado com sucesso!',
{ root: true },
);
dispatch('conta/tratarUsuarioLogado', null, { root: true });
return response;
}).catch((error) => {
dispatch(
'app/setMensagemErro',
error.response.data.error,
{ root: true },
);
throw new TypeError(error, 'autenticarUsuario', 10);
});
};
export const tratarUsuarioLogado = ({ commit, state }) => {
commit(types.TRATAR_USUARIO, localStorage.getItem('token_usuario'));
const informacoesToken = obterInformacoesJWT(localStorage.getItem('token_usuario'));
if (informacoesToken.user && Object.keys(state.usuario).length < 1) {
const { perfil } = informacoesToken.user;
if (Object.keys(perfil).length > 0) {
commit(types.DEFINIR_PERFIL, perfil);
delete informacoesToken.user.perfil;
}
commit(types.DEFINIR_USUARIO, informacoesToken.user);
}
};
export const ativarUsuario = async ({ commit }, ativacao) => {
commit(types.ATIVAR_USUARIO, ativacao);
usuarioService.ativarUsuario(ativacao);
};
export const cadastrarUsuario = async ({ commit, dispatch }, usuario) => usuarioService.cadastrarUsuario(usuario).then((response) => {
const { data } = response.data;
commit(types.ATRIBUIR_USUARIO_CADASTRADO_LISTA, data);
dispatch(
'app/setMensagemSucesso',
'Usuário cadastrado com sucesso!',
{ root: true },
);
return response;
}).catch((error) => {
dispatch(
'app/setMensagemErro',
error.response.data.message,
{ root: true },
);
throw new TypeError(error);
});
export const atualizarUsuario = async ({ commit, dispatch }, usuario) => usuarioService.atualizarUsuario(usuario).then((response) => {
const { data } = response.data;
commit(types.ATUALIZAR_USUARIO_LISTA, data);
dispatch(
'app/setMensagemSucesso',
'Usuário atualizado com sucesso.',
{ root: true },
);
return response;
}).catch((error) => {
dispatch(
'app/setMensagemErro',
error.response.data.message,
{ root: true },
);
throw new TypeError(error);
});
export const salvarUsuario = async ({ dispatch, commit }, usuario) => {
if (usuario.co_usuario) {
return dispatch('atualizarUsuario', usuario);
}
return dispatch('cadastrarUsuario', usuario);
};
export const recuperarSenha = async ({ commit }, payload) => {
commit(types.RECUPERAR_SENHA, payload);
return usuarioService.recuperarSenha(payload);
};
export const usuarioAlterarSenha = async ({ commit }, { coUsuario, usuario }) => {
commit(types.ALTERAR_SENHA, { coUsuario, usuario });
usuarioService.usuarioAlterarSenha(
coUsuario,
usuario,
);
};
export const logout = async ({ commit }) => {
commit(types.LOGOUT, {});
usuarioService.logout({});
};
export const alterarSenha = async ({ commit }, { codigoAlteracao, usuario }) => usuarioService.alterarSenha(codigoAlteracao, usuario).then((response) => {
commit(types.LOGOUT);
return response;
});
export const solicitarPrimeiroAcesso = async ({ commit }, payload) => {
commit(types.SOLICITAR_PRIMEIRO_ACESSO, payload);
return usuarioService.solicitarPrimeiroAcesso(payload);
};
export const buscarUsuariosPerfis = async ({ commit }) => {
usuarioService.obterUsuarios().then((response) => {
const { data } = response.data;
commit(types.LISTAR_USUARIOS, data);
});
};
export const buscarPerfis = async ({ commit }) => {
commit(types.BUSCAR_PERFIS);
return usuarioService.obterPerfis().then((response) => {
const { data } = response.data;
commit(types.DEFINIR_PERFIS, data);
return response;
});
};
export const buscarPerfisAlteracao = async ({ state, commit, dispatch }) => dispatch('buscarPerfis').then((response) => {
const { data } = response.data;
data.map((perfil) => {
if (perfil.no_perfil !== 'administrador' && perfil.no_perfil !== 'avaliador') {
perfil.disabled = true;
}
});
commit(types.DEFINIR_PERFIS_ALTERACAO, data);
return response;
});
|
NillerMedDild/MiningGadgets | src/main/java/com/direwolf20/mininggadgets/client/MiningGadgetsJEI.java | //package com.direwolf20.mininggadgets.client;
//
//import com.direwolf20.mininggadgets.client.screens.ModificationTableScreen;
//import com.direwolf20.mininggadgets.common.Config;
//import com.direwolf20.mininggadgets.common.MiningGadgets;
//import com.direwolf20.mininggadgets.common.items.MiningGadget;
//import com.direwolf20.mininggadgets.common.items.ModItems;
//import mezz.jei.api.IModPlugin;
//import mezz.jei.api.JeiPlugin;
//import mezz.jei.api.gui.handlers.IGuiContainerHandler;
//import mezz.jei.api.ingredients.subtypes.IIngredientSubtypeInterpreter;
//import mezz.jei.api.registration.IGuiHandlerRegistration;
//import mezz.jei.api.registration.ISubtypeRegistration;
//import net.minecraft.client.renderer.Rect2i;
//import net.minecraft.world.item.ItemStack;
//import net.minecraft.resources.ResourceLocation;
//
//import java.util.ArrayList;
//import java.util.Collections;
//import java.util.List;
//
//@JeiPlugin
//public class MiningGadgetsJEI implements IModPlugin {
// @Override
// public ResourceLocation getPluginUid() {
// return new ResourceLocation(MiningGadgets.MOD_ID, "jei_plugin");
// }
//
// @Override
// public void registerItemSubtypes(ISubtypeRegistration registration) {
// IIngredientSubtypeInterpreter<ItemStack> chargedProvider = (stack, uid) -> {
// if (!(stack.getItem() instanceof MiningGadget)) {
// return IIngredientSubtypeInterpreter.NONE;
// }
//
// double energy = stack.getOrCreateTag().getDouble("energy");
// if (energy == 0) {
// return "empty";
// } else if (energy == Config.MININGGADGET_MAXPOWER.get()) {
// return "charged";
// }
//
// return IIngredientSubtypeInterpreter.NONE;
// };
//
// registration.registerSubtypeInterpreter(ModItems.MININGGADGET.get(), chargedProvider);
// registration.registerSubtypeInterpreter(ModItems.MININGGADGET_SIMPLE.get(), chargedProvider);
// registration.registerSubtypeInterpreter(ModItems.MININGGADGET_FANCY.get(), chargedProvider);
// }
//
// @Override
// public void registerGuiHandlers(IGuiHandlerRegistration registration) {
// registration.addGuiContainerHandler(ModificationTableScreen.class, new ModificationTableContainerHandler());
// }
//
// private static class ModificationTableContainerHandler implements IGuiContainerHandler<ModificationTableScreen> {
// @Override
// public List<Rect2i> getGuiExtraAreas(ModificationTableScreen containerScreen) {
// return new ArrayList<>(Collections.singleton(new Rect2i((containerScreen.width / 2) - 120, (containerScreen.height / 2) - 5, 25, 35)));
// }
// }
//}
|
andersongns/vutter-api | src/utils/helpers/hash-bcrypt-generator.js | <reponame>andersongns/vutter-api<filename>src/utils/helpers/hash-bcrypt-generator.js
const bcrypt = require('bcrypt')
const { MissingDependenceError, MissingParamError } = require('../errors')
module.exports = class HashBcryptGenerator {
constructor (salt) {
if (!salt) throw new MissingDependenceError('salt')
this.salt = salt
}
async generate (value) {
if (!value) throw new MissingParamError('value')
return bcrypt.hash(value, this.salt)
}
async verify (value, hashedValue) {
if (!value) throw new MissingParamError('value')
if (!hashedValue) throw new MissingParamError('hashedValue')
return bcrypt.compare(value, hashedValue)
}
}
|
JakeB1998/Aveona-Utility-Library | src/main/org/botka/utility/api/time/TimeConstants.java | /*
* File name: TimeConstants.java
*
* Programmer : <NAME>
* ULID: JMBOTKA
*
* Date: May 28, 2020
*
* Out Of Class Personal Program
*/
package main.org.botka.utility.api.time;
import java.time.Month;
/**
* <insert class description here>
*
* @author <NAME>
*
*/
public class TimeConstants {
public static final int YEARS_IN_CENTURY = 100;
public static final int DECADES_IN_CENTURY = 10;
//Days
//Do days in each month
public static final int DAYS_IN_YEAR = 365;
public static final int DAYS_IN_YEAR_LEAP = 366;
public static final int DAYS_IN_DECADE = -1;
public static final int DAYS_IN_CENTURY = DAYS_IN_DECADE * DECADES_IN_CENTURY;
//Hours
public static final int HOURS_IN_DAY = 24;
//Minutes
public static final int MINUTES_IN_HOUR = 60;
public static final int MINUTES_IN_DAY = MINUTES_IN_HOUR * HOURS_IN_DAY;
//Seconds
public static final int SECONDS_IN_MINUTE = 60;
public static final int SECONDS_IN_HOUR = SECONDS_IN_MINUTE * 60;
public static final int SECONDS_IN_DAY = SECONDS_IN_HOUR * 24;
//Milliseconds
public static final int MILLISECONDS_IN_SECOND = 1000;
public static final int MILLISECONDS_IN_MINUTE = MILLISECONDS_IN_SECOND * 60;
public static final long MILLISECONDS_IN_HOUR = MILLISECONDS_IN_MINUTE * 60;
//Microseconds
public static final int NANO_SECONDS_PER_MILLISECOND = 1000000;
//Picoseconds
public static int daysInMonth(Month month) {
return -1;
}
}
|
aidan-mundy-forks/docker-cli | cli/command/trust/helpers.go | package trust
import (
"strings"
"github.com/docker/cli/cli/trust"
"github.com/theupdateframework/notary/client"
"github.com/theupdateframework/notary/tuf/data"
)
const releasedRoleName = "Repo Admin"
const releasesRoleTUFName = "targets/releases"
// isReleasedTarget checks if a role name is "released":
// either targets/releases or targets TUF roles
func isReleasedTarget(role data.RoleName) bool {
return role == data.CanonicalTargetsRole || role == trust.ReleasesRole
}
// notaryRoleToSigner converts TUF role name to a human-understandable signer name
func notaryRoleToSigner(tufRole data.RoleName) string {
// don't show a signer for "targets" or "targets/releases"
if isReleasedTarget(data.RoleName(tufRole.String())) {
return releasedRoleName
}
return strings.TrimPrefix(tufRole.String(), "targets/")
}
// clearChangelist clears the notary staging changelist.
func clearChangeList(notaryRepo client.Repository) error {
cl, err := notaryRepo.GetChangelist()
if err != nil {
return err
}
return cl.Clear("")
}
// getOrGenerateRootKeyAndInitRepo initializes the notary repository
// with a remotely managed snapshot key. The initialization will use
// an existing root key if one is found, else a new one will be generated.
func getOrGenerateRootKeyAndInitRepo(notaryRepo client.Repository) error {
rootKey, err := getOrGenerateNotaryKey(notaryRepo, data.CanonicalRootRole)
if err != nil {
return err
}
return notaryRepo.Initialize([]string{rootKey.ID()}, data.CanonicalSnapshotRole)
}
|
atul-vyshnav/2021_IBM_Code_Challenge_StockIT | src/StockIT-v2-release_source_from_JADX/sources/expo/modules/updates/loader/EmbeddedLoader.java | <filename>src/StockIT-v2-release_source_from_JADX/sources/expo/modules/updates/loader/EmbeddedLoader.java
package expo.modules.updates.loader;
import android.content.Context;
import expo.modules.updates.UpdatesConfiguration;
import expo.modules.updates.UpdatesUtils;
import expo.modules.updates.manifest.Manifest;
import expo.modules.updates.p020db.UpdatesDatabase;
import expo.modules.updates.p020db.entity.AssetEntity;
import expo.modules.updates.p020db.entity.UpdateEntity;
import expo.modules.updates.p020db.enums.UpdateStatus;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
public class EmbeddedLoader {
public static final String BARE_BUNDLE_FILENAME = "index.android.bundle";
public static final String BUNDLE_FILENAME = "app.bundle";
public static final String MANIFEST_FILENAME = "app.manifest";
private static final String TAG = EmbeddedLoader.class.getSimpleName();
private static Manifest sEmbeddedManifest = null;
private UpdatesConfiguration mConfiguration;
private Context mContext;
private UpdatesDatabase mDatabase;
private ArrayList<AssetEntity> mErroredAssetList = new ArrayList<>();
private ArrayList<AssetEntity> mExistingAssetList = new ArrayList<>();
private ArrayList<AssetEntity> mFinishedAssetList = new ArrayList<>();
private float mPixelDensity;
private ArrayList<AssetEntity> mSkippedAssetList = new ArrayList<>();
private UpdateEntity mUpdateEntity;
private File mUpdatesDirectory;
public EmbeddedLoader(Context context, UpdatesConfiguration updatesConfiguration, UpdatesDatabase updatesDatabase, File file) {
this.mContext = context;
this.mConfiguration = updatesConfiguration;
this.mDatabase = updatesDatabase;
this.mUpdatesDirectory = file;
this.mPixelDensity = context.getResources().getDisplayMetrics().density;
}
public boolean loadEmbeddedUpdate() {
Manifest readEmbeddedManifest = readEmbeddedManifest(this.mContext, this.mConfiguration);
if (readEmbeddedManifest == null) {
return false;
}
boolean processManifest = processManifest(readEmbeddedManifest);
reset();
return processManifest;
}
public void reset() {
this.mUpdateEntity = null;
this.mErroredAssetList = new ArrayList<>();
this.mSkippedAssetList = new ArrayList<>();
this.mExistingAssetList = new ArrayList<>();
this.mFinishedAssetList = new ArrayList<>();
}
/* JADX WARNING: Code restructure failed: missing block: B:16:0x0035, code lost:
r0 = move-exception;
*/
/* JADX WARNING: Code restructure failed: missing block: B:17:0x0036, code lost:
if (r3 != null) goto L_0x0038;
*/
/* JADX WARNING: Code restructure failed: missing block: B:19:?, code lost:
r3.close();
*/
/* JADX WARNING: Code restructure failed: missing block: B:23:0x0040, code lost:
throw r0;
*/
/* Code decompiled incorrectly, please refer to instructions dump. */
public static expo.modules.updates.manifest.Manifest readEmbeddedManifest(android.content.Context r3, expo.modules.updates.UpdatesConfiguration r4) {
/*
boolean r0 = r4.hasEmbeddedUpdate()
if (r0 != 0) goto L_0x0008
r3 = 0
return r3
L_0x0008:
expo.modules.updates.manifest.Manifest r0 = sEmbeddedManifest
if (r0 != 0) goto L_0x0064
android.content.res.AssetManager r3 = r3.getAssets() // Catch:{ Exception -> 0x0041 }
java.lang.String r0 = "app.manifest"
java.io.InputStream r3 = r3.open(r0) // Catch:{ Exception -> 0x0041 }
java.lang.String r0 = "UTF-8"
java.lang.String r0 = org.apache.commons.p029io.IOUtils.toString((java.io.InputStream) r3, (java.lang.String) r0) // Catch:{ all -> 0x0033 }
org.json.JSONObject r1 = new org.json.JSONObject // Catch:{ all -> 0x0033 }
r1.<init>(r0) // Catch:{ all -> 0x0033 }
java.lang.String r0 = "isVerified"
r2 = 1
r1.put(r0, r2) // Catch:{ all -> 0x0033 }
expo.modules.updates.manifest.Manifest r4 = expo.modules.updates.manifest.ManifestFactory.getEmbeddedManifest(r1, r4) // Catch:{ all -> 0x0033 }
sEmbeddedManifest = r4 // Catch:{ all -> 0x0033 }
if (r3 == 0) goto L_0x0064
r3.close() // Catch:{ Exception -> 0x0041 }
goto L_0x0064
L_0x0033:
r4 = move-exception
throw r4 // Catch:{ all -> 0x0035 }
L_0x0035:
r0 = move-exception
if (r3 == 0) goto L_0x0040
r3.close() // Catch:{ all -> 0x003c }
goto L_0x0040
L_0x003c:
r3 = move-exception
r4.addSuppressed(r3) // Catch:{ Exception -> 0x0041 }
L_0x0040:
throw r0 // Catch:{ Exception -> 0x0041 }
L_0x0041:
r3 = move-exception
java.lang.String r4 = TAG
java.lang.String r0 = "Could not read embedded manifest"
android.util.Log.e(r4, r0, r3)
java.lang.AssertionError r4 = new java.lang.AssertionError
java.lang.StringBuilder r0 = new java.lang.StringBuilder
r0.<init>()
java.lang.String r1 = "The embedded manifest is invalid or could not be read. Make sure you have configured expo-updates correctly in android/app/build.gradle. "
r0.append(r1)
java.lang.String r3 = r3.getMessage()
r0.append(r3)
java.lang.String r3 = r0.toString()
r4.<init>(r3)
throw r4
L_0x0064:
expo.modules.updates.manifest.Manifest r3 = sEmbeddedManifest
return r3
*/
throw new UnsupportedOperationException("Method not decompiled: expo.modules.updates.loader.EmbeddedLoader.readEmbeddedManifest(android.content.Context, expo.modules.updates.UpdatesConfiguration):expo.modules.updates.manifest.Manifest");
}
public static byte[] copyAssetAndGetHash(AssetEntity assetEntity, File file, Context context) throws NoSuchAlgorithmException, IOException {
if (assetEntity.embeddedAssetFilename != null) {
return copyContextAssetAndGetHash(assetEntity, file, context);
}
if (assetEntity.resourcesFilename != null && assetEntity.resourcesFolder != null) {
return copyResourceAndGetHash(assetEntity, file, context);
}
throw new AssertionError("Failed to copy embedded asset " + assetEntity.key + " from APK assets or resources because not enough information was provided.");
}
/* JADX WARNING: Code restructure failed: missing block: B:11:0x0016, code lost:
r0 = move-exception;
*/
/* JADX WARNING: Code restructure failed: missing block: B:12:0x0017, code lost:
if (r4 != null) goto L_0x0019;
*/
/* JADX WARNING: Code restructure failed: missing block: B:14:?, code lost:
r4.close();
*/
/* JADX WARNING: Code restructure failed: missing block: B:18:0x0021, code lost:
throw r0;
*/
/* Code decompiled incorrectly, please refer to instructions dump. */
public static byte[] copyContextAssetAndGetHash(expo.modules.updates.p020db.entity.AssetEntity r2, java.io.File r3, android.content.Context r4) throws java.security.NoSuchAlgorithmException, java.io.IOException {
/*
android.content.res.AssetManager r4 = r4.getAssets() // Catch:{ Exception -> 0x0022 }
java.lang.String r0 = r2.embeddedAssetFilename // Catch:{ Exception -> 0x0022 }
java.io.InputStream r4 = r4.open(r0) // Catch:{ Exception -> 0x0022 }
byte[] r3 = expo.modules.updates.UpdatesUtils.sha256AndWriteToFile(r4, r3) // Catch:{ all -> 0x0014 }
if (r4 == 0) goto L_0x0013
r4.close() // Catch:{ Exception -> 0x0022 }
L_0x0013:
return r3
L_0x0014:
r3 = move-exception
throw r3 // Catch:{ all -> 0x0016 }
L_0x0016:
r0 = move-exception
if (r4 == 0) goto L_0x0021
r4.close() // Catch:{ all -> 0x001d }
goto L_0x0021
L_0x001d:
r4 = move-exception
r3.addSuppressed(r4) // Catch:{ Exception -> 0x0022 }
L_0x0021:
throw r0 // Catch:{ Exception -> 0x0022 }
L_0x0022:
r3 = move-exception
java.lang.String r4 = TAG
java.lang.StringBuilder r0 = new java.lang.StringBuilder
r0.<init>()
java.lang.String r1 = "Failed to copy asset "
r0.append(r1)
java.lang.String r2 = r2.embeddedAssetFilename
r0.append(r2)
java.lang.String r2 = r0.toString()
android.util.Log.e(r4, r2, r3)
throw r3
*/
throw new UnsupportedOperationException("Method not decompiled: expo.modules.updates.loader.EmbeddedLoader.copyContextAssetAndGetHash(expo.modules.updates.db.entity.AssetEntity, java.io.File, android.content.Context):byte[]");
}
/* JADX WARNING: Code restructure failed: missing block: B:12:0x0024, code lost:
r0 = move-exception;
*/
/* JADX WARNING: Code restructure failed: missing block: B:13:0x0025, code lost:
if (r6 != null) goto L_0x0027;
*/
/* JADX WARNING: Code restructure failed: missing block: B:15:?, code lost:
r6.close();
*/
/* JADX WARNING: Code restructure failed: missing block: B:19:0x002f, code lost:
throw r0;
*/
/* Code decompiled incorrectly, please refer to instructions dump. */
public static byte[] copyResourceAndGetHash(expo.modules.updates.p020db.entity.AssetEntity r4, java.io.File r5, android.content.Context r6) throws java.security.NoSuchAlgorithmException, java.io.IOException {
/*
android.content.res.Resources r0 = r6.getResources()
java.lang.String r1 = r4.resourcesFilename
java.lang.String r2 = r4.resourcesFolder
java.lang.String r3 = r6.getPackageName()
int r0 = r0.getIdentifier(r1, r2, r3)
android.content.res.Resources r6 = r6.getResources() // Catch:{ Exception -> 0x0030 }
java.io.InputStream r6 = r6.openRawResource(r0) // Catch:{ Exception -> 0x0030 }
byte[] r5 = expo.modules.updates.UpdatesUtils.sha256AndWriteToFile(r6, r5) // Catch:{ all -> 0x0022 }
if (r6 == 0) goto L_0x0021
r6.close() // Catch:{ Exception -> 0x0030 }
L_0x0021:
return r5
L_0x0022:
r5 = move-exception
throw r5 // Catch:{ all -> 0x0024 }
L_0x0024:
r0 = move-exception
if (r6 == 0) goto L_0x002f
r6.close() // Catch:{ all -> 0x002b }
goto L_0x002f
L_0x002b:
r6 = move-exception
r5.addSuppressed(r6) // Catch:{ Exception -> 0x0030 }
L_0x002f:
throw r0 // Catch:{ Exception -> 0x0030 }
L_0x0030:
r5 = move-exception
java.lang.String r6 = TAG
java.lang.StringBuilder r0 = new java.lang.StringBuilder
r0.<init>()
java.lang.String r1 = "Failed to copy asset "
r0.append(r1)
java.lang.String r4 = r4.embeddedAssetFilename
r0.append(r4)
java.lang.String r4 = r0.toString()
android.util.Log.e(r6, r4, r5)
throw r5
*/
throw new UnsupportedOperationException("Method not decompiled: expo.modules.updates.loader.EmbeddedLoader.copyResourceAndGetHash(expo.modules.updates.db.entity.AssetEntity, java.io.File, android.content.Context):byte[]");
}
private boolean processManifest(Manifest manifest) {
UpdateEntity updateEntity = manifest.getUpdateEntity();
UpdateEntity loadUpdateWithId = this.mDatabase.updateDao().loadUpdateWithId(updateEntity.f1253id);
if (loadUpdateWithId == null || loadUpdateWithId.status != UpdateStatus.READY) {
if (loadUpdateWithId == null) {
this.mUpdateEntity = updateEntity;
this.mDatabase.updateDao().insertUpdate(this.mUpdateEntity);
} else {
this.mUpdateEntity = loadUpdateWithId;
}
copyAllAssets(manifest.getAssetEntityList());
return true;
}
this.mUpdateEntity = loadUpdateWithId;
return true;
}
private void copyAllAssets(ArrayList<AssetEntity> arrayList) {
Iterator<AssetEntity> it = arrayList.iterator();
while (it.hasNext()) {
AssetEntity next = it.next();
if (shouldSkipAsset(next)) {
this.mSkippedAssetList.add(next);
} else {
AssetEntity loadAssetWithKey = this.mDatabase.assetDao().loadAssetWithKey(next.key);
if (loadAssetWithKey != null) {
this.mDatabase.assetDao().mergeAndUpdateAsset(loadAssetWithKey, next);
next = loadAssetWithKey;
}
if (next.relativePath == null || !new File(this.mUpdatesDirectory, next.relativePath).exists()) {
String createFilenameForAsset = UpdatesUtils.createFilenameForAsset(next);
File file = new File(this.mUpdatesDirectory, createFilenameForAsset);
if (file.exists()) {
next.relativePath = createFilenameForAsset;
this.mExistingAssetList.add(next);
} else {
try {
next.hash = copyAssetAndGetHash(next, file, this.mContext);
next.downloadTime = new Date();
next.relativePath = createFilenameForAsset;
this.mFinishedAssetList.add(next);
} catch (FileNotFoundException unused) {
StringBuilder sb = new StringBuilder();
sb.append("APK bundle must contain the expected embedded asset ");
sb.append(next.embeddedAssetFilename != null ? next.embeddedAssetFilename : next.resourcesFilename);
throw new AssertionError(sb.toString());
} catch (Exception unused2) {
this.mErroredAssetList.add(next);
}
}
} else {
this.mExistingAssetList.add(next);
}
}
}
Iterator<AssetEntity> it2 = this.mExistingAssetList.iterator();
while (it2.hasNext()) {
AssetEntity next2 = it2.next();
if (!this.mDatabase.assetDao().addExistingAssetToUpdate(this.mUpdateEntity, next2, next2.isLaunchAsset)) {
byte[] bArr = null;
try {
bArr = UpdatesUtils.sha256(new File(this.mUpdatesDirectory, next2.relativePath));
} catch (Exception unused3) {
}
next2.downloadTime = new Date();
next2.hash = bArr;
this.mFinishedAssetList.add(next2);
}
}
this.mDatabase.assetDao().insertAssets(this.mFinishedAssetList, this.mUpdateEntity);
if (this.mErroredAssetList.size() == 0) {
this.mDatabase.updateDao().markUpdateFinished(this.mUpdateEntity, this.mSkippedAssetList.size() != 0);
}
}
private boolean shouldSkipAsset(AssetEntity assetEntity) {
if (assetEntity.scales == null || assetEntity.scale == null || pickClosestScale(assetEntity.scales) == assetEntity.scale.floatValue()) {
return false;
}
return true;
}
private float pickClosestScale(Float[] fArr) {
float f = 0.0f;
float f2 = Float.MAX_VALUE;
for (Float floatValue : fArr) {
float floatValue2 = floatValue.floatValue();
if (floatValue2 >= this.mPixelDensity && floatValue2 < f2) {
f2 = floatValue2;
}
if (floatValue2 > f) {
f = floatValue2;
}
}
return f2 < Float.MAX_VALUE ? f2 : f;
}
}
|
a4x4kiwi/Exo-CC | extensions/cce/src/main/jni/lib_ccx/asf_functions.c | #include "lib_ccx.h"
#include "ccx_common_option.h"
#include "asf_constants.h"
#include "activity.h"
#include "file_buffer.h"
// Indicate first / subsequent calls to asf_get_more_data()
int firstcall;
asf_data asf_data_container;
// For ASF parsing
// 0, 1, 2, 3 means none, BYTE, WORD, DWORD
#define ASF_TypeLength(A) (A == 3 ? 4 : A)
uint32_t asf_readval(void *val, int ltype)
{
uint32_t rval;
switch (ltype)
{
case 0:
rval = 0;
break;
case 1:
rval = *((uint8_t *)val);
break;
case 2:
rval = *((uint16_t *)val);
break;
case 4:
rval = *((uint32_t *)val);
break;
default:
fatal(CCX_COMMON_EXIT_BUG_BUG, "In asf_readval(): Invalid ltype, cannot continue processing this stream.\n");
break;
}
return rval;
}
char *gui_data_string(void *val)
{
static char sbuf[40];
sprintf(sbuf, "%08lX-%04X-%04X-",
(long)*((uint32_t *)((char *)val + 0)),
(int)*((uint16_t *)((char *)val + 4)),
(int)*((uint16_t *)((char *)val + 6)));
for (int ii = 0; ii < 2; ii++)
sprintf(sbuf + 19 + ii * 2, "%02X-", *((unsigned char *)val + 8 + ii));
for (int ii = 0; ii < 6; ii++)
sprintf(sbuf + 24 + ii * 2, "%02X", *((unsigned char *)val + 10 + ii));
return sbuf;
}
/* ASF container specific data parser
* The following function reads an ASF file and returns the included
* video stream. The function returns after a complete Media Object
* is read (the Media Object Number increases by one). A Media Object
* seems to consist of one frame.
* When the function is called the next time it continues to read
* where it stopped before, static variables make sure that parameters
* are remembered between calls. */
int asf_get_more_data(struct lib_ccx_ctx *ctx, struct demuxer_data **ppdata)
{
int enough = 0;
int payload_read = 0;
// The fist call to this function (per file) is indicated with
// firstcall == 1
// Except for the first call of this function we will reenter
// the Data Packet loop below.
int reentry = 1;
// Variables for Header Object
int64_t data_packets_count = 0;
int broadcast_flag = 0;
int seekable_flag = 0;
uint32_t min_packet_size = 0;
uint32_t max_packet_size = 0;
// Data Object Loop
int data_packet_length = 0; // Collect the read header bytes
// Payload parsing information
int sequence_type = 0; // ASF
int padding_l_type = 0; // ASF
uint32_t sequence = 0;
uint32_t send_time = 0;
int payload_parser_size = 0; // Inferred (PacketLType + sequence_type + padding_l_type + 6);
uint32_t offset_media_length = 0; // ASF
uint32_t replicated_length = 0; // ASF
// Last media number. Used to determine a new PES, mark uninitialized.
uint32_t current_media_number = 0xFFFFFFFF;
unsigned char *current_position;
int64_t get_bytes;
size_t result = 0;
struct demuxer_data *data;
if (!*ppdata)
*ppdata = alloc_demuxer_data();
if (!*ppdata)
return -1;
data = *ppdata;
// Read Header Object and the Top-level Data Object header only once
if (firstcall)
{
asf_data_container = (asf_data){
.parsebuf = (unsigned char *)malloc(1024),
.parsebufsize = 1024,
.FileSize = 0,
.PacketSize = 0,
.StreamProperties = {
// Make sure the stream numbers are invalid when a new file begins
// so that they are only set when detected.
.VideoStreamNumber = 0,
.AudioStreamNumber = 0,
.CaptionStreamNumber = 0,
.CaptionStreamStyle = 0,
.DecodeStreamNumber = 0,
.DecodeStreamPTS = 0,
.currDecodeStreamPTS = 0,
.prevDecodeStreamPTS = 0,
.VideoStreamMS = 0,
.currVideoStreamMS = 0,
.prevVideoStreamMS = 0,
.VideoJump = 0},
.VideoClosedCaptioningFlag = 0,
.PayloadLType = 0,
.PayloadLength = 0,
.NumberOfPayloads = 0,
.payloadcur = 0,
.PayloadStreamNumber = 0,
.KeyFrame = 0,
.PayloadMediaNumber = 0,
.datapacketcur = 0,
.dobjectread = 50,
.MultiplePayloads = 0,
.PacketLType = 0,
.ReplicatedLType = 0,
.OffsetMediaLType = 0,
.MediaNumberLType = 0,
.StreamNumberLType = 0,
.PacketLength = 0,
.PaddingLength = 0};
// Initialize the Payload Extension System
for (int stream = 0; stream < STREAMNUM; stream++)
{
for (int payext = 0; payext < PAYEXTNUM; payext++)
{
asf_data_container.PayloadExtSize[stream][payext] = 0;
}
asf_data_container.PayloadExtPTSEntry[stream] = -1;
}
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf, 30);
ctx->demux_ctx->past += result;
if (result != 30)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
// Expecting ASF header
if (!memcmp(asf_data_container.parsebuf, ASF_HEADER, 16))
{
dbg_print(CCX_DMT_PARSE, "\nASF header\n");
}
else
{
fatal(EXIT_MISSING_ASF_HEADER, "Missing ASF header. Could not read ASF file. Abort.\n");
}
asf_data_container.HeaderObjectSize = *((int64_t *)(asf_data_container.parsebuf + 16));
dbg_print(CCX_DMT_PARSE, "Length: %lld\n", asf_data_container.HeaderObjectSize);
dbg_print(CCX_DMT_PARSE, "\nNumber of header objects: %ld\n",
(long)*((uint32_t *)(asf_data_container.parsebuf + 24)));
if (asf_data_container.HeaderObjectSize > asf_data_container.parsebufsize)
{
asf_data_container.parsebuf = (unsigned char *)realloc(asf_data_container.parsebuf, (size_t)asf_data_container.HeaderObjectSize);
if (!asf_data_container.parsebuf)
fatal(EXIT_NOT_ENOUGH_MEMORY, "In asf_getmoredata: Out of memory requesting buffer for data container.");
asf_data_container.parsebufsize = (long)asf_data_container.HeaderObjectSize;
}
current_position = asf_data_container.parsebuf + 30;
get_bytes = asf_data_container.HeaderObjectSize - 30;
result = buffered_read(ctx->demux_ctx, current_position, (int)get_bytes);
ctx->demux_ctx->past += result;
if (result != get_bytes)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
dbg_print(CCX_DMT_PARSE, "Reading header objects\n");
while (current_position < asf_data_container.parsebuf + asf_data_container.HeaderObjectSize)
{
int64_t hpobjectsize = *((int64_t *)(current_position + 16)); // Local
if (!memcmp(current_position, ASF_FILE_PROPERTIES, 16))
{
// Mandatory Object, only one.
dbg_print(CCX_DMT_PARSE, "\nFile Properties Object (size: %lld)\n", hpobjectsize);
asf_data_container.FileSize = *((int64_t *)(current_position + 40));
data_packets_count = *((int64_t *)(current_position + 56));
broadcast_flag = 0x1 & current_position[88];
seekable_flag = 0x2 & current_position[88];
min_packet_size = *((uint32_t *)(current_position + 92));
max_packet_size = *((uint32_t *)(current_position + 96));
dbg_print(CCX_DMT_PARSE, "FileSize: %lld Packet count: %lld\n", asf_data_container.FileSize, data_packets_count);
dbg_print(CCX_DMT_PARSE, "Broadcast: %d - Seekable: %d\n", broadcast_flag, seekable_flag);
dbg_print(CCX_DMT_PARSE, "MiDPS: %d MaDPS: %d\n", min_packet_size, max_packet_size);
}
else if (!memcmp(current_position, ASF_STREAM_PROPERTIES, 16))
{
dbg_print(CCX_DMT_PARSE, "\nStream Properties Object (size: %lld)\n", hpobjectsize);
if (!memcmp(current_position + 24, ASF_VIDEO_MEDIA, 16))
{
asf_data_container.StreamProperties.VideoStreamNumber = *(current_position + 72) & 0x7F;
dbg_print(CCX_DMT_PARSE, "Stream Type: ASF_Video_Media\n");
dbg_print(CCX_DMT_PARSE, "Video Stream Number: %d\n", asf_data_container.StreamProperties.VideoStreamNumber);
}
else if (!memcmp(current_position + 24, ASF_AUDIO_MEDIA, 16))
{
asf_data_container.StreamProperties.AudioStreamNumber = *(current_position + 72) & 0x7F;
dbg_print(CCX_DMT_PARSE, "Stream Type: ASF_Audio_Media\n");
dbg_print(CCX_DMT_PARSE, "Audio Stream Number: %d\n", asf_data_container.StreamProperties.AudioStreamNumber);
}
else
{
dbg_print(CCX_DMT_PARSE, "Stream Type: %s\n",
gui_data_string(current_position + 24));
dbg_print(CCX_DMT_PARSE, "Stream Number: %d\n", *(current_position + 72) & 0x7F);
}
}
else if (!memcmp(current_position, ASF_HEADER_EXTENSION, 16))
{
dbg_print(CCX_DMT_PARSE, "\nHeader Extension Object (size: %lld)\n", hpobjectsize);
int32_t header_extension_data_size = *((uint32_t *)(current_position + 42));
// Process Header Extension Data
if (header_extension_data_size)
{
unsigned char *header_current_position = current_position + 46;
if (header_extension_data_size != hpobjectsize - 46)
fatal(EXIT_NOT_CLASSIFIED, "In asf_getmoredata: Incorrect HeaderExtensionDataSize value, cannot continue.");
dbg_print(CCX_DMT_PARSE, "\nReading Header Extension Sub-Objects\n");
while (header_current_position < current_position + 46 + header_extension_data_size)
{
int64_t header_object_size = *((int64_t *)(header_current_position + 16)); // Local
if (!memcmp(header_current_position, ASF_EXTENDED_STREAM_PROPERTIES, 16))
{
dbg_print(CCX_DMT_PARSE, "\nExtended Stream Properties Object (size: %lld)\n", header_object_size);
int stream_number = *((uint16_t *)(header_current_position + 72));
int stream_name_count = *((uint16_t *)(header_current_position + 84));
int payload_ext_system_count = *((uint16_t *)(header_current_position + 86));
unsigned char *stream_prop_position = header_current_position + 88;
int stream_name_length;
dbg_print(CCX_DMT_PARSE, "Stream Number: %d NameCount: %d ESCount: %d\n",
stream_number, stream_name_count, payload_ext_system_count);
if (stream_number >= STREAMNUM)
fatal(CCX_COMMON_EXIT_BUG_BUG, "In asf_getmoredata: STREAMNUM too small. Please file a bug report on GitHub.\n");
for (int i = 0; i < stream_name_count; i++)
{
dbg_print(CCX_DMT_PARSE, "%2d. Stream Name Field\n", i);
stream_name_length = *((uint16_t *)(stream_prop_position + 2));
stream_prop_position += 4 + stream_name_length;
}
int ext_system_data_size;
int ext_system_info_length;
if (payload_ext_system_count > PAYEXTNUM)
fatal(CCX_COMMON_EXIT_BUG_BUG, "In asf_getmoredata: PAYEXTNUM too small. Please file a bug report on GitHub.\n");
for (int i = 0; i < payload_ext_system_count; i++)
{
ext_system_data_size = *((uint16_t *)(stream_prop_position + 16));
ext_system_info_length = *((uint32_t *)(stream_prop_position + 18));
asf_data_container.PayloadExtSize[stream_number][i] = ext_system_data_size;
dbg_print(CCX_DMT_PARSE, "%2d. Payload Extension GUID: %s Size %d Info Length %d\n",
i, gui_data_string(stream_prop_position + 0),
ext_system_data_size,
ext_system_info_length);
// For DVR-MS presentation timestamp
if (!memcmp(stream_prop_position, DVRMS_PTS, 16))
{
dbg_print(CCX_DMT_PARSE, "Found DVRMS_PTS\n");
asf_data_container.PayloadExtPTSEntry[stream_number] = i;
}
stream_prop_position += 22 + ext_system_info_length;
}
// Now, there can be a Stream Properties Object. The only way to
// find out is to check if there are bytes left in the current
// object.
if ((stream_prop_position - header_current_position) < header_object_size)
{
int64_t stream_prop_object_size = *((int64_t *)(stream_prop_position + 16)); // Local
if (memcmp(stream_prop_position, ASF_STREAM_PROPERTIES, 16))
fatal(EXIT_NOT_CLASSIFIED, "Stream Properties Object expected\n");
if (!memcmp(stream_prop_position + 24, ASF_VIDEO_MEDIA, 16))
{
dbg_print(CCX_DMT_PARSE, "Stream Type: ASF_Video_Media (size: %lld)\n",
stream_prop_object_size);
asf_data_container.StreamProperties.VideoStreamNumber = stream_number;
}
else if (!memcmp(stream_prop_position + 24, ASF_AUDIO_MEDIA, 16))
{
dbg_print(CCX_DMT_PARSE, "Stream Type: ASF_Audio_Media (size: %lld)\n",
stream_prop_object_size);
asf_data_container.StreamProperties.AudioStreamNumber = stream_number;
}
else if (!memcmp(stream_prop_position + 24, ASF_BINARY_MEDIA, 16))
{
// dvr-ms files identify audio streams as binary streams
// but use the "major media type" accordingly to identify
// the steam. (There might be other audio identifiers.)
if (!memcmp(stream_prop_position + 78, DVRMS_AUDIO, 16))
{
dbg_print(CCX_DMT_PARSE, "Binary media: DVR-MS Audio Stream (size: %lld)\n",
stream_prop_object_size);
}
else if (!memcmp(stream_prop_position + 78, DVRMS_NTSC, 16))
{
dbg_print(CCX_DMT_PARSE, "Binary media: NTSC captions (size: %lld)\n",
stream_prop_object_size);
asf_data_container.StreamProperties.CaptionStreamNumber = stream_number;
asf_data_container.StreamProperties.CaptionStreamStyle = 1;
}
else if (!memcmp(stream_prop_position + 78, DVRMS_ATSC, 16))
{
dbg_print(CCX_DMT_PARSE, "Binary media: ATSC captions (size: %lld)\n",
stream_prop_object_size);
asf_data_container.StreamProperties.CaptionStreamNumber = stream_number;
asf_data_container.StreamProperties.CaptionStreamStyle = 2;
}
else
{
dbg_print(CCX_DMT_PARSE, "Binary media: Major Media Type GUID: %s (size: %lld)\n",
gui_data_string(stream_prop_position + 78), stream_prop_object_size);
}
}
else
{
dbg_print(CCX_DMT_PARSE, "Unknown Type GUID: %s (size: %lld)\n",
gui_data_string(stream_prop_position + 24), stream_prop_object_size);
}
}
else
{
dbg_print(CCX_DMT_PARSE, "No Stream Properties Object\n");
}
}
else if (!memcmp(header_current_position, ASF_METADATA, 16))
{
dbg_print(CCX_DMT_PARSE, "\nMetadata Object (size: %lld)\n", header_object_size);
}
else if (!memcmp(header_current_position, ASF_METADATA_LIBRARY, 16))
{
dbg_print(CCX_DMT_PARSE, "\nMetadata Library Object (size: %lld)\n", header_object_size);
}
else if (!memcmp(header_current_position, ASF_COMPATIBILITY2, 16))
{
dbg_print(CCX_DMT_PARSE, "\nCompatibility Object 2 (size: %lld)\n", header_object_size);
}
else if (!memcmp(header_current_position, ASF_PADDING, 16))
{
dbg_print(CCX_DMT_PARSE, "\nPadding Object (size: %lld)\n", header_object_size);
}
else
{
dbg_print(CCX_DMT_PARSE, "\nGUID: %s size: %lld\n",
gui_data_string(header_current_position), header_object_size);
dump(CCX_DMT_PARSE, header_current_position, 16, 0, 0);
}
header_current_position += header_object_size;
}
if (header_current_position - (current_position + 46) != header_extension_data_size)
fatal(EXIT_NOT_CLASSIFIED, "Header Extension Parsing problem: read bytes %ld != header length %lld\nAbort!\n",
(long)(header_current_position - (current_position + 46)), header_extension_data_size);
}
dbg_print(CCX_DMT_PARSE, "\nHeader Extension Object - End\n");
}
else if (!memcmp(current_position, ASF_CONTENT_DESCRIPTION, 16))
{
dbg_print(CCX_DMT_PARSE, "\nContend Description Object (size: %lld)\n", hpobjectsize);
}
else if (!memcmp(current_position, ASF_EXTENDED_CONTENT_DESCRIPTION, 16))
{
dbg_print(CCX_DMT_PARSE, "\nExtended Content Description Object (size: %lld)\n", hpobjectsize);
int content_descriptor_count = *((uint16_t *)(current_position + 24));
unsigned char *ext_content_position = current_position + 26;
int descriptor_name_length;
int descriptor_value_data_type;
int descriptor_value_length;
unsigned char *extended_description_value;
for (int i = 0; i < content_descriptor_count; i++)
{
descriptor_name_length = *((uint16_t *)(ext_content_position));
descriptor_value_data_type = *((uint16_t *)(ext_content_position + 2 + descriptor_name_length));
descriptor_value_length = *((uint16_t *)(ext_content_position + 4 + descriptor_name_length));
extended_description_value = ext_content_position + 6 + descriptor_name_length;
dbg_print(CCX_DMT_PARSE, "%3d. %ls = ", i, (wchar_t *)(ext_content_position + 2));
switch (descriptor_value_data_type)
{
case 0: // Unicode string
dbg_print(CCX_DMT_PARSE, "%ls (Unicode)\n", (wchar_t *)extended_description_value);
break;
case 1: // byte string
dbg_print(CCX_DMT_PARSE, ":");
for (int ii = 0; ii < descriptor_value_length && ii < 9; ii++)
{
dbg_print(CCX_DMT_PARSE, "%02X:", *((unsigned char *)(extended_description_value + ii)));
}
if (descriptor_value_length > 8)
dbg_print(CCX_DMT_PARSE, "skipped %d more", descriptor_value_length - 8);
dbg_print(CCX_DMT_PARSE, " (BYTES)\n");
break;
case 2: // BOOL
dbg_print(CCX_DMT_PARSE, "%d (BOOL)\n", *((int32_t *)extended_description_value));
break;
case 3: // DWORD
dbg_print(CCX_DMT_PARSE, "%u (DWORD)\n", *((uint32_t *)extended_description_value));
break;
case 4: // QWORD
dbg_print(CCX_DMT_PARSE, "%llu (QWORD)\n", *((uint64_t *)extended_description_value));
break;
case 5: // WORD
dbg_print(CCX_DMT_PARSE, "%u (WORD)\n", (int)*((uint16_t *)extended_description_value));
break;
default:
fatal(CCX_COMMON_EXIT_BUG_BUG, "In asf_getmoredata: Impossible value for DescriptorValueDataType. Please file a bug report in GitHub.\n");
break;
}
if (!memcmp(ext_content_position + 2, L"WM/VideoClosedCaptioning", descriptor_name_length))
{
// This flag would be really useful if it would be
// reliable - it isn't.
asf_data_container.VideoClosedCaptioningFlag = *((int32_t *)extended_description_value);
dbg_print(CCX_DMT_PARSE, "Found WM/VideoClosedCaptioning flag: %d\n",
asf_data_container.VideoClosedCaptioningFlag);
}
ext_content_position += 6 + descriptor_name_length + descriptor_value_length;
}
}
else if (!memcmp(current_position, ASF_STREAM_BITRATE_PROPERTIES, 16))
{
dbg_print(CCX_DMT_PARSE, "\nStream Bitrate Properties Object (size: %lld)\n", hpobjectsize);
}
else
{
dbg_print(CCX_DMT_PARSE, "\nGUID: %s size: %lld\n",
gui_data_string(current_position), hpobjectsize);
dump(CCX_DMT_PARSE, current_position, 16, 0, 0);
}
current_position += hpobjectsize;
}
if (current_position - asf_data_container.parsebuf != asf_data_container.HeaderObjectSize)
fatal(EXIT_NOT_CLASSIFIED, "Header Object Parsing problem: read bytes %ld != header length %lld\nAbort!\n",
(long)(current_position - asf_data_container.parsebuf), asf_data_container.HeaderObjectSize);
if (asf_data_container.StreamProperties.VideoStreamNumber == 0)
fatal(EXIT_NOT_CLASSIFIED, "No Video Stream Properties Object found. Unable to continue ...\n");
// Wouldn't it be nice if VideoClosedCaptioningFlag would be usable, unfortunately
// it is not reliable.
// Now decide where we are going to expect the captions
data->bufferdatatype = CCX_PES; // Except for NTSC captions
if (asf_data_container.StreamProperties.CaptionStreamNumber > 0 && (asf_data_container.StreamProperties.CaptionStreamStyle == 1 ||
(asf_data_container.StreamProperties.CaptionStreamStyle == 2 && !ccx_options.wtvconvertfix)))
{
mprint("\nNote: If you converted a WTV into a DVR-MS and CCExtractor finds no captions, try passing -wtvconvertfix to work around bug in the conversion process.");
}
if (asf_data_container.StreamProperties.CaptionStreamNumber > 0 && (asf_data_container.StreamProperties.CaptionStreamStyle == 1 ||
(asf_data_container.StreamProperties.CaptionStreamStyle == 2 && ccx_options.wtvconvertfix)))
{
//if (debug_parse)
mprint("\nNTSC captions in stream #%d\n\n", asf_data_container.StreamProperties.CaptionStreamNumber);
data->bufferdatatype = CCX_RAW;
asf_data_container.StreamProperties.DecodeStreamNumber = asf_data_container.StreamProperties.CaptionStreamNumber;
}
else if (asf_data_container.StreamProperties.CaptionStreamNumber > 0 && asf_data_container.StreamProperties.CaptionStreamStyle == 2)
{
//if (debug_parse)
mprint("\nATSC captions (probably) in stream #%d - Decode the video stream #%d instead\n\n",
asf_data_container.StreamProperties.CaptionStreamNumber, asf_data_container.StreamProperties.VideoStreamNumber);
asf_data_container.StreamProperties.DecodeStreamNumber = asf_data_container.StreamProperties.VideoStreamNumber;
}
else
{
asf_data_container.StreamProperties.DecodeStreamNumber = asf_data_container.StreamProperties.VideoStreamNumber;
//if (debug_parse)
mprint("\nAssume CC info in video stream #%d (No caption stream found)\n\n",
asf_data_container.StreamProperties.DecodeStreamNumber);
}
// When reading "Payload parsing information" it occured that "Packet Lenght"
// was not set (Packet Length Type 00) and for "Single Payloads" this means
// the Payload Data Length cannot be infered. Use min_packet_size, max_packet_size instead.
if ((min_packet_size > 0) && (min_packet_size == max_packet_size))
asf_data_container.PacketSize = min_packet_size;
// Now the Data Object, except for the packages
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf, 50); // No realloc needed.
ctx->demux_ctx->past += result;
if (result != 50)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
// Expecting ASF Data object
if (!memcmp(asf_data_container.parsebuf, ASF_DATA, 16))
{
dbg_print(CCX_DMT_PARSE, "\nASF Data Object\n");
}
else
{
fatal(EXIT_NOT_CLASSIFIED, "In asf_getmoredata: Missing ASF Data Object. Abort.\n");
}
asf_data_container.DataObjectSize = *((int64_t *)(asf_data_container.parsebuf + 16));
asf_data_container.TotalDataPackets = *((uint32_t *)(asf_data_container.parsebuf + 40));
dbg_print(CCX_DMT_PARSE, "Size: %lld\n", asf_data_container.DataObjectSize);
dbg_print(CCX_DMT_PARSE, "Number of data packets: %ld\n", (long)asf_data_container.TotalDataPackets);
reentry = 0; // Make sure we read the Data Packet Headers
} // End of if (firstcall)
firstcall = 0;
// Start loop over Data Packets
while (asf_data_container.datapacketcur < asf_data_container.TotalDataPackets && !enough)
{
// Skip reading the headers the first time when reentering the loop
if (!reentry)
{
int ecinfo = 0;
data_packet_length = 0;
dbg_print(CCX_DMT_PARSE, "\nReading packet %d/%d\n", asf_data_container.datapacketcur + 1, asf_data_container.TotalDataPackets);
// First packet
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf, 1); // No realloc needed.
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != 1)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
data_packet_length += 1;
if (*asf_data_container.parsebuf & 0x80)
{
int ecdatalength = *asf_data_container.parsebuf & 0x0F; // Small, no realloc needed
if (*asf_data_container.parsebuf & 0x60)
{
fatal(EXIT_NOT_CLASSIFIED, "Error Correction Length Type not 00 - reserved - aborting ...\n");
}
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf + 1, ecdatalength);
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != ecdatalength)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
data_packet_length += ecdatalength;
if (asf_data_container.parsebuf[1] & 0x0F)
fatal(EXIT_NOT_CLASSIFIED, "Error correction present. Unable to continue ...\n");
}
else
{
// When no ecinfo is present the byte we just read belongs
// to the payload parsing information.
ecinfo = 1;
}
// Now payload parsing information
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf + ecinfo, 2 - ecinfo); // No realloc needed
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != 2)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
data_packet_length += 2;
asf_data_container.MultiplePayloads = *asf_data_container.parsebuf & 0x01;
sequence_type = (*asf_data_container.parsebuf >> 1) & 0x03;
sequence_type = ASF_TypeLength(sequence_type);
padding_l_type = (*asf_data_container.parsebuf >> 3) & 0x03;
padding_l_type = ASF_TypeLength(padding_l_type);
asf_data_container.PacketLType = (*asf_data_container.parsebuf >> 5) & 0x03;
asf_data_container.PacketLType = ASF_TypeLength(asf_data_container.PacketLType);
asf_data_container.ReplicatedLType = (asf_data_container.parsebuf[1]) & 0x03;
asf_data_container.ReplicatedLType = ASF_TypeLength(asf_data_container.ReplicatedLType);
asf_data_container.OffsetMediaLType = (asf_data_container.parsebuf[1] >> 2) & 0x03;
asf_data_container.OffsetMediaLType = ASF_TypeLength(asf_data_container.OffsetMediaLType);
asf_data_container.MediaNumberLType = (asf_data_container.parsebuf[1] >> 4) & 0x03;
asf_data_container.MediaNumberLType = ASF_TypeLength(asf_data_container.MediaNumberLType);
asf_data_container.StreamNumberLType = (asf_data_container.parsebuf[1] >> 6) & 0x03;
asf_data_container.StreamNumberLType = ASF_TypeLength(asf_data_container.StreamNumberLType);
payload_parser_size = asf_data_container.PacketLType + sequence_type + padding_l_type + 6;
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf + 2, payload_parser_size); // No realloc needed
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != payload_parser_size)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
data_packet_length += payload_parser_size;
asf_data_container.PacketLength = asf_readval(asf_data_container.parsebuf + 2, asf_data_container.PacketLType);
sequence = asf_readval(asf_data_container.parsebuf + 2 + asf_data_container.PacketLType, sequence_type);
asf_data_container.PaddingLength = asf_readval(asf_data_container.parsebuf + 2 + asf_data_container.PacketLType + sequence_type, padding_l_type);
// Data Packet ms time stamp
send_time = *((uint32_t *)(asf_data_container.parsebuf + 2 + asf_data_container.PacketLType + sequence_type + padding_l_type));
// If Packet Length is not set use global setting if possible
if (asf_data_container.PacketLength == 0)
{
asf_data_container.PacketLength = asf_data_container.PacketSize;
// For multiple payloads we can get away without a given
// Packet length as individual payload length are given
if (asf_data_container.PacketLength == 0 && asf_data_container.MultiplePayloads == 0)
fatal(EXIT_NOT_CLASSIFIED, "In asf_getmoredata: Cannot determine packet length. Unable to continue processing this file.\n");
}
dbg_print(CCX_DMT_PARSE, "Lengths - Packet: %d / sequence %d / Padding %d\n",
asf_data_container.PacketLength, sequence, asf_data_container.PaddingLength);
asf_data_container.PayloadLType = 0; // Payload Length Type. <>0 for multiple payloads
asf_data_container.PayloadLength = 0; // Payload Length (for multiple payloads)
asf_data_container.NumberOfPayloads = 1; // Number of payloads.
if (asf_data_container.MultiplePayloads != 0)
{
unsigned char plheader[1];
result = buffered_read(ctx->demux_ctx, plheader, 1);
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != 1)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
data_packet_length += 1;
asf_data_container.PayloadLType = (*plheader >> 6) & 0x03;
asf_data_container.PayloadLType = ASF_TypeLength(asf_data_container.PayloadLType);
// Number of payloads
asf_data_container.NumberOfPayloads = *plheader & 0x3F;
}
asf_data_container.payloadcur = 0;
}
else
{ // Rely on
// NumberOfPayloads, payloadcur, PayloadLength, PaddingLength
// and related variables being kept as static variables to be
// able to reenter the loop here.
dbg_print(CCX_DMT_PARSE, "\nReentry into asf_get_more_data()\n");
}
// The following repeats NumberOfPayloads times
while (asf_data_container.payloadcur < asf_data_container.NumberOfPayloads && !enough)
{
// Skip reading the Payload headers the first time when reentering the loop
if (!reentry)
{
if (asf_data_container.NumberOfPayloads < 2)
dbg_print(CCX_DMT_PARSE, "\nSingle payload\n");
else
dbg_print(CCX_DMT_PARSE, "\nMultiple payloads %d/%d\n", asf_data_container.payloadcur + 1, asf_data_container.NumberOfPayloads);
int payload_header_size = 1 + asf_data_container.MediaNumberLType + asf_data_container.OffsetMediaLType + asf_data_container.ReplicatedLType;
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf, payload_header_size); // No realloc needed
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != payload_header_size)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
data_packet_length += payload_header_size;
asf_data_container.PayloadStreamNumber = *asf_data_container.parsebuf & 0x7F;
asf_data_container.KeyFrame = (*asf_data_container.parsebuf & 0x80) && 1;
asf_data_container.PayloadMediaNumber = asf_readval(asf_data_container.parsebuf + 1, asf_data_container.MediaNumberLType);
offset_media_length = asf_readval(asf_data_container.parsebuf + 1 + asf_data_container.MediaNumberLType, asf_data_container.OffsetMediaLType);
replicated_length = asf_readval(asf_data_container.parsebuf + 1 + asf_data_container.MediaNumberLType + asf_data_container.OffsetMediaLType, asf_data_container.ReplicatedLType);
if (replicated_length == 1)
fatal(EXIT_NOT_CLASSIFIED, "Cannot handle compressed data...\n");
if ((long)replicated_length > asf_data_container.parsebufsize)
{
asf_data_container.parsebuf = (unsigned char *)realloc(asf_data_container.parsebuf, replicated_length);
if (!asf_data_container.parsebuf)
fatal(EXIT_NOT_ENOUGH_MEMORY, "In asf_getmoredata: Not enough memory for buffer, unable to continue.\n");
asf_data_container.parsebufsize = replicated_length;
}
result = buffered_read(ctx->demux_ctx, asf_data_container.parsebuf, (long)replicated_length);
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != replicated_length)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
// Parse Replicated data
unsigned char *replicate_position = asf_data_container.parsebuf;
int media_object_size = 0;
int presentation_time_millis = 0; //Payload ms time stamp
int extsize = 0;
// int32_t dwVersion = 0;
// int32_t unknown = 0;
int64_t rtStart = 0; // dvr-ms 100ns time stamp start
int64_t rtEnd = 0; // dvr-ms 100ns time stamp end
// Always at least 8 bytes long, see 7.3.1
media_object_size = *((uint16_t *)(asf_data_container.parsebuf));
presentation_time_millis = *((uint16_t *)(asf_data_container.parsebuf + 4));
replicate_position += 8;
dbg_print(CCX_DMT_PARSE, "Stream# %d[%d] Media# %d Offset/Size: %d/%d\n",
asf_data_container.PayloadStreamNumber, asf_data_container.KeyFrame, asf_data_container.PayloadMediaNumber,
offset_media_length, media_object_size);
// Loop over Payload Extension Systems
for (int i = 0; i < asf_data_container.PayloadExtPTSEntry[asf_data_container.PayloadStreamNumber]; i++)
{
if (asf_data_container.PayloadExtSize[asf_data_container.PayloadStreamNumber][i] == 0xffff)
{
extsize = *((uint16_t *)(replicate_position + 0));
replicate_position += 2;
}
else
{
extsize = asf_data_container.PayloadExtSize[asf_data_container.PayloadStreamNumber][i];
}
replicate_position += extsize;
//printf("%2d. Ext. System - size: %d\n", i, extsize);
}
if (asf_data_container.PayloadExtPTSEntry[asf_data_container.PayloadStreamNumber] > 0)
{
// dwVersion = *((uint32_t*)(replicate_position+0));
// unknown = *((uint32_t*)(replicate_position+4));
rtStart = *((int64_t *)(replicate_position + 8));
rtEnd = *((int64_t *)(replicate_position + 16));
//printf("dwVersion: %d unknown: 0x%04X\n", dwVersion, unknown);
}
// Avoid problems with unset PTS times
if (rtStart == -1)
{
rtStart = 0;
rtEnd = 0;
dbg_print(CCX_DMT_PARSE, "dvr-ms time not defined\n");
}
// print_mstime uses a static buffer
dbg_print(CCX_DMT_PARSE, "Stream #%d PacketTime: %s",
asf_data_container.PayloadStreamNumber, print_mstime_static(send_time));
dbg_print(CCX_DMT_PARSE, " PayloadTime: %s",
print_mstime_static(presentation_time_millis));
dbg_print(CCX_DMT_PARSE, " dvr-ms PTS: %s+%lld\n",
print_mstime_static(rtStart / 10000), (rtEnd - rtStart) / 10000);
data_packet_length += replicated_length;
// Only multiple payload packages have this value
if (asf_data_container.MultiplePayloads != 0)
{
unsigned char plheader[4];
result = buffered_read(ctx->demux_ctx, plheader, asf_data_container.PayloadLType);
ctx->demux_ctx->past += result;
asf_data_container.dobjectread += result;
if (result != asf_data_container.PayloadLType)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
asf_data_container.PayloadLength = asf_readval(plheader, asf_data_container.PayloadLType);
}
else
{
asf_data_container.PayloadLength = asf_data_container.PacketLength - data_packet_length - asf_data_container.PaddingLength;
}
dbg_print(CCX_DMT_PARSE, "Size - Replicated %d + Payload %d = %d\n",
replicated_length, asf_data_container.PayloadLength, replicated_length + asf_data_container.PayloadLength);
// Remember the last video time stamp - only when captions are separate
// from video stream.
if (asf_data_container.PayloadStreamNumber == asf_data_container.StreamProperties.VideoStreamNumber && asf_data_container.StreamProperties.DecodeStreamNumber != asf_data_container.StreamProperties.VideoStreamNumber && offset_media_length == 0)
{
asf_data_container.StreamProperties.prevVideoStreamMS = asf_data_container.StreamProperties.currVideoStreamMS;
asf_data_container.StreamProperties.currVideoStreamMS = asf_data_container.StreamProperties.VideoStreamMS;
// Use presentation_time_millis (send_time might also work) when the
// dvr-ms time stamp is not present.
if (asf_data_container.PayloadExtPTSEntry[asf_data_container.PayloadStreamNumber] > 0)
{
// When rstart is not set, keep the previous value
if (rtStart > 0)
asf_data_container.StreamProperties.VideoStreamMS = (int)(rtStart / 10000);
}
else
{
// Add 1ms to avoid 0ms start times getting rejected
asf_data_container.StreamProperties.VideoStreamMS = presentation_time_millis + 1;
}
// This checks if there is a video time jump in the timeline
// between caption information.
if (abs(asf_data_container.StreamProperties.currVideoStreamMS - asf_data_container.StreamProperties.prevVideoStreamMS) > 500)
{
// Found a more than 500ms jump in the video timeline
asf_data_container.StreamProperties.VideoJump = 1;
// This is remembered until the next caption block is
// found.
}
}
// Remember the PTS values
if (asf_data_container.PayloadStreamNumber == asf_data_container.StreamProperties.DecodeStreamNumber && offset_media_length == 0)
{
asf_data_container.StreamProperties.prevDecodeStreamPTS = asf_data_container.StreamProperties.currDecodeStreamPTS;
asf_data_container.StreamProperties.currDecodeStreamPTS = asf_data_container.StreamProperties.DecodeStreamPTS;
// Use presentation_time_millis (send_time might also work) when the
// dvr-ms time stamp is not present.
if (asf_data_container.PayloadExtPTSEntry[asf_data_container.PayloadStreamNumber] > 0)
{
// When rstart is not set, keep the previous value
if (rtStart > 0)
asf_data_container.StreamProperties.DecodeStreamPTS = (int)(rtStart / 10000);
}
else
{
// Add 1ms to avoid 0ms start times getting rejected
asf_data_container.StreamProperties.DecodeStreamPTS = presentation_time_millis + 1;
}
// Check the caption stream for jumps in the timeline. This
// is needed when caption information is transmitted in a
// different stream then the video information. (For example
// NTSC recordings.) Otherwise a gap in the caption
// information would look like a jump in the timeline.
if (asf_data_container.StreamProperties.DecodeStreamNumber != asf_data_container.StreamProperties.VideoStreamNumber)
{
// Check if there is a gap larger than 500ms.
if (asf_data_container.StreamProperties.currDecodeStreamPTS - asf_data_container.StreamProperties.prevDecodeStreamPTS > 500)
{
// Found more than 500ms since the previous caption,
// now check the video timeline. If there was a also
// a jump this needs synchronizing, otherwise it was
// just a gap in the captions.
if (!asf_data_container.StreamProperties.VideoJump)
ccx_common_timing_settings.disable_sync_check = 1;
else
ccx_common_timing_settings.disable_sync_check = 0;
}
asf_data_container.StreamProperties.VideoJump = 0;
}
// Remember, we are reading the previous package.
data->pts = asf_data_container.StreamProperties.currDecodeStreamPTS * (MPEG_CLOCK_FREQ / 1000);
}
}
// A new media number. The old "object" finished, we stop here to
// continue later.
// To continue later we need to remember:
// NumberOfPayloads
// payloadcur
// PayloadLength
// PaddingLength
// Now, the next loop is no reentry anymore:
reentry = 0;
// Video streams need several packages to complete a PES. Leave
// the loop when the next package starts a new Media Object.
if (current_media_number != 0xFFFFFFFF // Is initialized
&& asf_data_container.PayloadStreamNumber == asf_data_container.StreamProperties.DecodeStreamNumber && asf_data_container.PayloadMediaNumber != current_media_number)
{
if (asf_data_container.StreamProperties.DecodeStreamNumber == asf_data_container.StreamProperties.CaptionStreamNumber)
dbg_print(CCX_DMT_PARSE, "\nCaption stream object");
else
dbg_print(CCX_DMT_PARSE, "\nVideo stream object");
dbg_print(CCX_DMT_PARSE, " read with PTS: %s\n",
print_mstime_static(asf_data_container.StreamProperties.currDecodeStreamPTS));
// Enough for now
enough = 1;
break;
}
// Read it!!
if (asf_data_container.PayloadStreamNumber == asf_data_container.StreamProperties.DecodeStreamNumber)
{
current_media_number = asf_data_container.PayloadMediaNumber; // Remember current value
// Read the data
dbg_print(CCX_DMT_PARSE, "Reading Stream #%d data ...\n", asf_data_container.PayloadStreamNumber);
data->stream_pid = asf_data_container.StreamProperties.DecodeStreamNumber;
data->program_number = 1;
data->codec = CCX_CODEC_ATSC_CC;
int want = (long)((BUFSIZE - data->len) > asf_data_container.PayloadLength ? asf_data_container.PayloadLength : (BUFSIZE - data->len));
if (want < (long)asf_data_container.PayloadLength)
fatal(CCX_COMMON_EXIT_BUG_BUG, "Buffer size too small for ASF payload!\nPlease file a bug report!\n");
result = buffered_read(ctx->demux_ctx, data->buffer + data->len, want);
payload_read += (int)result;
data->len += result;
ctx->demux_ctx->past += result;
if (result != asf_data_container.PayloadLength)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
asf_data_container.dobjectread += asf_data_container.PayloadLength;
}
else
{
// Skip non-cc data
dbg_print(CCX_DMT_PARSE, "Skipping Stream #%d data ...\n", asf_data_container.PayloadStreamNumber);
result = buffered_skip(ctx->demux_ctx, (int)asf_data_container.PayloadLength);
ctx->demux_ctx->past += result;
if (result != asf_data_container.PayloadLength)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
asf_data_container.dobjectread += result;
}
asf_data_container.payloadcur++;
}
if (enough)
break;
// Skip padding bytes
dbg_print(CCX_DMT_PARSE, "Skip %d padding\n", asf_data_container.PaddingLength);
result = buffered_skip(ctx->demux_ctx, (long)asf_data_container.PaddingLength);
ctx->demux_ctx->past += result;
if (result != asf_data_container.PaddingLength)
{
mprint("Premature end of file!\n");
end_of_file = 1;
return payload_read;
}
asf_data_container.dobjectread += result;
asf_data_container.datapacketcur++;
dbg_print(CCX_DMT_PARSE, "Bytes read: %lld/%lld\n", asf_data_container.dobjectread, asf_data_container.DataObjectSize);
}
if (asf_data_container.datapacketcur == asf_data_container.TotalDataPackets)
{
dbg_print(CCX_DMT_PARSE, "\nWe read the last packet!\n\n");
// Skip the rest of the file
dbg_print(CCX_DMT_PARSE, "Skip the rest: %d\n", (int)(asf_data_container.FileSize - asf_data_container.HeaderObjectSize - asf_data_container.DataObjectSize));
result = buffered_skip(ctx->demux_ctx, (int)(asf_data_container.FileSize - asf_data_container.HeaderObjectSize - asf_data_container.DataObjectSize));
ctx->demux_ctx->past += result;
// Don not set end_of_file (although it is true) as this would
// produce an premature end error.
//end_of_file=1;
// parsebuf is freed automatically when the program closes.
}
if (!payload_read)
return CCX_EOF;
return payload_read;
}
|
ww362034710/Gannt | gannt/lib/Scheduler/view/EventEditor.js | <filename>gannt/lib/Scheduler/view/EventEditor.js
import Popup from '../../Core/widget/Popup.js';
/**
* @module Scheduler/view/EventEditor
*/
/**
* Provided event editor dialog.
*
* @extends Core/widget/Popup
* @private
*/
export default class EventEditor extends Popup {
// Factoryable type name
static get type() {
return 'eventeditor';
}
static get $name() {
return 'EventEditor';
}
static get configurable() {
return {
items : [],
draggable : {
handleSelector : ':not(button,.b-field-inner)' // blacklist buttons and field inners
},
axisLock : 'flexible',
scrollable : {
// In case editor is very tall or window is small, make it scrollable
overflowY : true
},
readOnly : null,
/**
* A Function (or *name* of a function) which produces a customized Panel header based upon the event being edited.
* @config {Function|String}
* @param {Scheduler.model.EventModel} eventRecord The record being edited
* @returns {String} The Panel title.
*/
titleRenderer : null
};
}
updateLocalization() {
super.updateLocalization(...arguments);
// Use this if there's no titleRenderer
this.initialTitle = this.title || '';
}
processWidgetConfig(widget) {
const
me = this,
eventEditFeature = me.eventEditFeature;
if (widget.type === 'extraItems') {
return false;
}
let fieldConfig = {};
if (widget.ref === 'resourceField') {
if (!eventEditFeature.showResourceField) {
return false;
}
// Can't use store directly since it may be grouped and then contains irrelevant group records
me.resourceStore = widget.store = eventEditFeature.resourceStore.makeChained(
record => !record.meta.specialRow,
null,
{
// Need to show all records in the combo. Required in case resource store is a tree.
excludeCollapsedRecords : false
}
);
// When events are loaded with resourceId, we should only support single select
widget.multiSelect = !eventEditFeature.eventStore.usesSingleAssignment;
if (eventEditFeature.resourceFieldConfig) {
fieldConfig = eventEditFeature.resourceFieldConfig;
}
}
if (widget.ref === 'nameField' && !eventEditFeature.showNameField) {
return false;
}
if (widget.ref === 'deleteButton' && !eventEditFeature.showDeleteButton) {
return false;
}
if ((widget.name === 'startDate' || widget.name === 'endDate') && widget.type === 'date') {
fieldConfig.format = eventEditFeature.dateFormat;
}
if ((widget.name === 'startDate' || widget.name === 'endDate') && widget.type === 'time') {
fieldConfig.format = eventEditFeature.timeFormat;
}
if (eventEditFeature.startDateConfig && widget.name === 'startDate' && widget.type === 'date') {
fieldConfig = eventEditFeature.startDateConfig;
}
if (eventEditFeature.startTimeConfig && widget.name === 'startDate' && widget.type === 'time') {
fieldConfig = eventEditFeature.startTimeConfig;
}
if (eventEditFeature.endDateConfig && widget.name === 'endDate' && widget.type === 'date') {
fieldConfig = eventEditFeature.endDateConfig;
}
if (eventEditFeature.endTimeConfig && widget.name === 'endDate' && widget.type === 'time') {
fieldConfig = eventEditFeature.endTimeConfig;
}
Object.assign(widget, fieldConfig);
return super.processWidgetConfig(widget);
}
show(...args) {
// Updated chained store. It is not done automatically for grouping/trees.
if (this.resourceStore) {
this.resourceStore.fillFromMaster();
}
super.show(...args);
}
onBeforeShow(...args) {
const
me = this,
{
record,
titleRenderer
} = me,
deleteButton = me.widgetMap.deleteButton;
// Hide delete button if we are readOnly or the event does not belong to a store
if (deleteButton) {
deleteButton.hidden = me.readOnly || (!record.stores.length && !record.isOccurrence);
}
if (titleRenderer) {
me.title = me.callback(titleRenderer, me, [record]);
}
else {
me.title = me.initialTitle;
}
super.onBeforeShow && super.onBeforeShow(...args);
}
onInternalKeyDown(event) {
this.trigger('keyDown', { event });
super.onInternalKeyDown(event);
}
updateReadOnly(readOnly) {
const
{
deleteButton,
saveButton,
cancelButton
} = this.widgetMap;
super.updateReadOnly(readOnly);
if (deleteButton) {
deleteButton.hidden = readOnly;
}
if (saveButton) {
saveButton.hidden = readOnly;
}
if (cancelButton) {
cancelButton.hidden = readOnly;
}
}
}
// Register this widget type with its Factory
EventEditor.initClass();
|
tradingsecret/beam_wallet | websocket/reactor.h | <filename>websocket/reactor.h
// Copyright 2018-2020 The Beam Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <memory>
#include <queue>
#include <mutex>
#include <thread>
#include "utility/io/reactor.h"
#include "utility/io/asyncevent.h"
namespace beam
{
class SafeReactor : std::enable_shared_from_this<SafeReactor>
{
public:
SafeReactor(const SafeReactor&) = delete;
SafeReactor& operator=(const SafeReactor&) = delete;
SafeReactor() = default;
using Ptr = std::shared_ptr<SafeReactor>;
using Callback = beam::io::AsyncEvent::Callback;
static Ptr create();
void callAsync(Callback cback);
void assert_thread();
io::Reactor& ref();
io::Reactor::Ptr ptr();
private:
io::Reactor::Ptr _reactor;
io::AsyncEvent::Ptr _event;
std::thread::id _reactorThread;
std::mutex _queueMutex;
std::queue<Callback> _queue;
};
}
|
megakode/MegaTinyEngine | MegaTinyEngine/Resources/JSONSerialization.h | //
// Created by <NAME> on 24/06/2020.
//
#include "Vendor/json.hpp"
#include "ResourceFile.h"
#ifndef SDLTEST_JSONSERIALIZATION_H
#define SDLTEST_JSONSERIALIZATION_H
using nlohmann::json;
namespace Engine
{
/// Sprite Frame
void to_json(json& j, const SpriteFrame& frame)
{
j = json{{"x", frame.x}, {"y", frame.y}, {"w", frame.w}, {"h", frame.h} };
}
void from_json(const json& j, SpriteFrame& frame)
{
j.at("x").get_to(frame.x);
j.at("y").get_to(frame.y);
j.at("w").get_to(frame.w);
j.at("h").get_to(frame.h);
}
/// Sprite animation
void to_json(json& j, const SpriteAnimation& spriteAnimation)
{
j = json{{"ticksPrFrame", spriteAnimation.ticksPrFrame}, {"type", spriteAnimation.type}, {"direction", spriteAnimation.direction}, {"texture", spriteAnimation.textureId}};
for( auto& frame : spriteAnimation.frames)
{
j["frames"].push_back(frame);
}
}
void from_json(const json& j, SpriteAnimation& sa)
{
j.at("ticksPrFrame").get_to(sa.ticksPrFrame);
j.at("type").get_to(sa.type);
j.at("direction").get_to(sa.direction);
j.at("texture").get_to(sa.textureId);
for( auto& element : j.at("frames") ){
auto frame = element.get<SpriteFrame>();
sa.frames.push_back(frame);
}
sa.numberOfFrames = sa.frames.size();
}
// Font resource definition
void from_json(const json& j, FontDefinition& font)
{
j.at("texture").get_to(font.texture);
j.at("charWidth").get_to(font.charWidth);
j.at("charHeight").get_to(font.charHeight);
j.at("numStyles").get_to(font.numStyles);
}
}
#endif //SDLTEST_JSONSERIALIZATION_H
|
vvd170501/ClickHouse | src/Functions/FunctionsStringSimilarity.cpp | <reponame>vvd170501/ClickHouse
#include <Functions/FunctionsStringSimilarity.h>
#include <Functions/FunctionFactory.h>
#include <Functions/FunctionsHashing.h>
#include <Common/HashTable/ClearableHashMap.h>
#include <Common/HashTable/Hash.h>
#include <Common/UTF8Helpers.h>
#include <Core/Defines.h>
#include <base/unaligned.h>
#include <algorithm>
#include <climits>
#include <cstring>
#include <limits>
#include <memory>
#include <utility>
#ifdef __SSE4_2__
# include <nmmintrin.h>
#endif
#if defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
# include <arm_acle.h>
#endif
namespace DB
{
/** Distance function implementation.
* We calculate all the n-grams from left string and count by the index of
* 16 bits hash of them in the map.
* Then calculate all the n-grams from the right string and calculate
* the n-gram distance on the flight by adding and subtracting from the hashmap.
* Then return the map into the condition of which it was after the left string
* calculation. If the right string size is big (more than 2**15 bytes),
* the strings are not similar at all and we return 1.
*/
template <size_t N, class CodePoint, bool UTF8, bool case_insensitive, bool symmetric>
struct NgramDistanceImpl
{
using ResultType = Float32;
/// map_size for ngram difference.
static constexpr size_t map_size = 1u << 16;
/// If the haystack size is bigger than this, behaviour is unspecified for this function.
static constexpr size_t max_string_size = 1u << 15;
/// Default padding to read safely.
static constexpr size_t default_padding = 16;
/// Max codepoints to store at once. 16 is for batching usage and PODArray has this padding.
static constexpr size_t simultaneously_codepoints_num = default_padding + N - 1;
/** map_size of this fits mostly in L2 cache all the time.
* Actually use UInt16 as addings and subtractions do not UB overflow. But think of it as a signed
* integer array.
*/
using NgramCount = UInt16;
static ALWAYS_INLINE UInt16 calculateASCIIHash(const CodePoint * code_points)
{
return intHashCRC32(unalignedLoad<UInt32>(code_points)) & 0xFFFFu;
}
static ALWAYS_INLINE UInt16 calculateUTF8Hash(const CodePoint * code_points)
{
UInt64 combined = (static_cast<UInt64>(code_points[0]) << 32) | code_points[1];
#ifdef __SSE4_2__
return _mm_crc32_u64(code_points[2], combined) & 0xFFFFu;
#elif defined(__aarch64__) && defined(__ARM_FEATURE_CRC32)
return __crc32cd(code_points[2], combined) & 0xFFFFu;
#else
return (intHashCRC32(combined) ^ intHashCRC32(code_points[2])) & 0xFFFFu;
#endif
}
template <size_t Offset, class Container, size_t... I>
static ALWAYS_INLINE inline void unrollLowering(Container & cont, const std::index_sequence<I...> &)
{
((cont[Offset + I] = std::tolower(cont[Offset + I])), ...);
}
static ALWAYS_INLINE size_t readASCIICodePoints(CodePoint * code_points, const char *& pos, const char * end)
{
/// Offset before which we copy some data.
constexpr size_t padding_offset = default_padding - N + 1;
/// We have an array like this for ASCII (N == 4, other cases are similar)
/// |a0|a1|a2|a3|a4|a5|a6|a7|a8|a9|a10|a11|a12|a13|a14|a15|a16|a17|a18|
/// And we copy ^^^^^^^^^^^^^^^ these bytes to the start
/// Actually it is enough to copy 3 bytes, but memcpy for 4 bytes translates into 1 instruction
memcpy(code_points, code_points + padding_offset, roundUpToPowerOfTwoOrZero(N - 1) * sizeof(CodePoint));
/// Now we have an array
/// |a13|a14|a15|a16|a4|a5|a6|a7|a8|a9|a10|a11|a12|a13|a14|a15|a16|a17|a18|
/// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
/// Doing unaligned read of 16 bytes and copy them like above
/// 16 is also chosen to do two `movups`.
/// Such copying allow us to have 3 codepoints from the previous read to produce the 4-grams with them.
memcpy(code_points + (N - 1), pos, default_padding * sizeof(CodePoint));
if constexpr (case_insensitive)
{
/// We really need template lambdas with C++20 to do it inline
unrollLowering<N - 1>(code_points, std::make_index_sequence<padding_offset>());
}
pos += padding_offset;
if (pos > end)
return default_padding - (pos - end);
return default_padding;
}
static ALWAYS_INLINE size_t readUTF8CodePoints(CodePoint * code_points, const char *& pos, const char * end)
{
/// The same copying as described in the function above.
memcpy(code_points, code_points + default_padding - N + 1, roundUpToPowerOfTwoOrZero(N - 1) * sizeof(CodePoint));
size_t num = N - 1;
while (num < default_padding && pos < end)
{
size_t length = UTF8::seqLength(*pos);
if (pos + length > end)
length = end - pos;
CodePoint res;
/// This is faster than just memcpy because of compiler optimizations with moving bytes.
switch (length)
{
case 1:
res = 0;
memcpy(&res, pos, 1);
break;
case 2:
res = 0;
memcpy(&res, pos, 2);
break;
case 3:
res = 0;
memcpy(&res, pos, 3);
break;
default:
memcpy(&res, pos, 4);
}
/// This is not a really true case insensitive utf8. We zero the 5-th bit of every byte.
/// And first bit of first byte if there are two bytes.
/// For ASCII it works https://catonmat.net/ascii-case-conversion-trick. For most cyrillic letters also does.
/// For others, we don't care now. Lowering UTF is not a cheap operation.
if constexpr (case_insensitive)
{
switch (length)
{
case 4:
res &= ~(1u << (5 + 3 * CHAR_BIT));
[[fallthrough]];
case 3:
res &= ~(1u << (5 + 2 * CHAR_BIT));
[[fallthrough]];
case 2:
res &= ~(1u);
res &= ~(1u << (5 + CHAR_BIT));
[[fallthrough]];
default:
res &= ~(1u << 5);
}
}
pos += length;
code_points[num++] = res;
}
return num;
}
template <bool save_ngrams>
static ALWAYS_INLINE inline size_t calculateNeedleStats(
const char * data,
const size_t size,
NgramCount * ngram_stats,
[[maybe_unused]] NgramCount * ngram_storage,
size_t (*read_code_points)(CodePoint *, const char *&, const char *),
UInt16 (*hash_functor)(const CodePoint *))
{
const char * start = data;
const char * end = data + size;
CodePoint cp[simultaneously_codepoints_num] = {};
/// read_code_points returns the position of cp where it stopped reading codepoints.
size_t found = read_code_points(cp, start, end);
/// We need to start for the first time here, because first N - 1 codepoints mean nothing.
size_t i = N - 1;
size_t len = 0;
do
{
for (; i + N <= found; ++i)
{
++len;
UInt16 hash = hash_functor(cp + i);
if constexpr (save_ngrams)
*ngram_storage++ = hash;
++ngram_stats[hash];
}
i = 0;
} while (start < end && (found = read_code_points(cp, start, end)));
return len;
}
template <bool reuse_stats>
static ALWAYS_INLINE inline UInt64 calculateHaystackStatsAndMetric(
const char * data,
const size_t size,
NgramCount * ngram_stats,
size_t & distance,
[[maybe_unused]] UInt16 * ngram_storage,
size_t (*read_code_points)(CodePoint *, const char *&, const char *),
UInt16 (*hash_functor)(const CodePoint *))
{
size_t ngram_cnt = 0;
const char * start = data;
const char * end = data + size;
CodePoint cp[simultaneously_codepoints_num] = {};
/// read_code_points returns the position of cp where it stopped reading codepoints.
size_t found = read_code_points(cp, start, end);
/// We need to start for the first time here, because first N - 1 codepoints mean nothing.
size_t iter = N - 1;
do
{
for (; iter + N <= found; ++iter)
{
UInt16 hash = hash_functor(cp + iter);
/// For symmetric version we should add when we can't subtract to get symmetric difference.
if (static_cast<Int16>(ngram_stats[hash]) > 0)
--distance;
else if constexpr (symmetric)
++distance;
if constexpr (reuse_stats)
ngram_storage[ngram_cnt] = hash;
++ngram_cnt;
--ngram_stats[hash];
}
iter = 0;
} while (start < end && (found = read_code_points(cp, start, end)));
/// Return the state of hash map to its initial.
if constexpr (reuse_stats)
{
for (size_t i = 0; i < ngram_cnt; ++i)
++ngram_stats[ngram_storage[i]];
}
return ngram_cnt;
}
template <class Callback, class... Args>
static inline auto dispatchSearcher(Callback callback, Args &&... args)
{
if constexpr (!UTF8)
return callback(std::forward<Args>(args)..., readASCIICodePoints, calculateASCIIHash);
else
return callback(std::forward<Args>(args)..., readUTF8CodePoints, calculateUTF8Hash);
}
static void constantConstant(std::string data, std::string needle, Float32 & res)
{
std::unique_ptr<NgramCount[]> common_stats{new NgramCount[map_size]{}};
/// We use unsafe versions of getting ngrams, so I decided to use padded strings.
const size_t needle_size = needle.size();
const size_t data_size = data.size();
needle.resize(needle_size + default_padding);
data.resize(data_size + default_padding);
size_t second_size = dispatchSearcher(calculateNeedleStats<false>, needle.data(), needle_size, common_stats.get(), nullptr);
size_t distance = second_size;
if (data_size <= max_string_size)
{
size_t first_size = dispatchSearcher(calculateHaystackStatsAndMetric<false>, data.data(), data_size, common_stats.get(), distance, nullptr);
/// For !symmetric version we should not use first_size.
if constexpr (symmetric)
res = distance * 1.f / std::max(first_size + second_size, static_cast<size_t>(1));
else
res = 1.f - distance * 1.f / std::max(second_size, static_cast<size_t>(1));
}
else
{
if constexpr (symmetric)
res = 1.f;
else
res = 0.f;
}
}
static void vectorVector(
const ColumnString::Chars & haystack_data,
const ColumnString::Offsets & haystack_offsets,
const ColumnString::Chars & needle_data,
const ColumnString::Offsets & needle_offsets,
PaddedPODArray<Float32> & res)
{
const size_t haystack_offsets_size = haystack_offsets.size();
size_t prev_haystack_offset = 0;
size_t prev_needle_offset = 0;
std::unique_ptr<NgramCount[]> common_stats{new NgramCount[map_size]{}};
/// The main motivation is to not allocate more on stack because we have already allocated a lot (128Kb).
/// And we can reuse these storages in one thread because we care only about what was written to first places.
std::unique_ptr<UInt16[]> needle_ngram_storage(new UInt16[max_string_size]);
std::unique_ptr<UInt16[]> haystack_ngram_storage(new UInt16[max_string_size]);
for (size_t i = 0; i < haystack_offsets_size; ++i)
{
const char * haystack = reinterpret_cast<const char *>(&haystack_data[prev_haystack_offset]);
const size_t haystack_size = haystack_offsets[i] - prev_haystack_offset - 1;
const char * needle = reinterpret_cast<const char *>(&needle_data[prev_needle_offset]);
const size_t needle_size = needle_offsets[i] - prev_needle_offset - 1;
if (needle_size <= max_string_size && haystack_size <= max_string_size)
{
/// Get needle stats.
const size_t needle_stats_size = dispatchSearcher(
calculateNeedleStats<true>,
needle,
needle_size,
common_stats.get(),
needle_ngram_storage.get());
size_t distance = needle_stats_size;
/// Combine with haystack stats, return to initial needle stats.
const size_t haystack_stats_size = dispatchSearcher(
calculateHaystackStatsAndMetric<true>,
haystack,
haystack_size,
common_stats.get(),
distance,
haystack_ngram_storage.get());
/// Return to zero array stats.
for (size_t j = 0; j < needle_stats_size; ++j)
--common_stats[needle_ngram_storage[j]];
/// For now, common stats is a zero array.
/// For !symmetric version we should not use haystack_stats_size.
if constexpr (symmetric)
res[i] = distance * 1.f / std::max(haystack_stats_size + needle_stats_size, static_cast<size_t>(1));
else
res[i] = 1.f - distance * 1.f / std::max(needle_stats_size, static_cast<size_t>(1));
}
else
{
/// Strings are too big, we are assuming they are not the same. This is done because of limiting number
/// of bigrams added and not allocating too much memory.
if constexpr (symmetric)
res[i] = 1.f;
else
res[i] = 0.f;
}
prev_needle_offset = needle_offsets[i];
prev_haystack_offset = haystack_offsets[i];
}
}
static void constantVector(
std::string haystack,
const ColumnString::Chars & needle_data,
const ColumnString::Offsets & needle_offsets,
PaddedPODArray<Float32> & res)
{
/// For symmetric version it is better to use vector_constant
if constexpr (symmetric)
{
vectorConstant(needle_data, needle_offsets, std::move(haystack), res);
}
else
{
const size_t haystack_size = haystack.size();
haystack.resize(haystack_size + default_padding);
/// For logic explanation see vector_vector function.
const size_t needle_offsets_size = needle_offsets.size();
size_t prev_offset = 0;
std::unique_ptr<NgramCount[]> common_stats{new NgramCount[map_size]{}};
std::unique_ptr<UInt16[]> needle_ngram_storage(new UInt16[max_string_size]);
std::unique_ptr<UInt16[]> haystack_ngram_storage(new UInt16[max_string_size]);
for (size_t i = 0; i < needle_offsets_size; ++i)
{
const char * needle = reinterpret_cast<const char *>(&needle_data[prev_offset]);
const size_t needle_size = needle_offsets[i] - prev_offset - 1;
if (needle_size <= max_string_size && haystack_size <= max_string_size)
{
const size_t needle_stats_size = dispatchSearcher(
calculateNeedleStats<true>,
needle,
needle_size,
common_stats.get(),
needle_ngram_storage.get());
size_t distance = needle_stats_size;
dispatchSearcher(
calculateHaystackStatsAndMetric<true>,
haystack.data(),
haystack_size,
common_stats.get(),
distance,
haystack_ngram_storage.get());
for (size_t j = 0; j < needle_stats_size; ++j)
--common_stats[needle_ngram_storage[j]];
res[i] = 1.f - distance * 1.f / std::max(needle_stats_size, static_cast<size_t>(1));
}
else
{
res[i] = 0.f;
}
prev_offset = needle_offsets[i];
}
}
}
static void vectorConstant(
const ColumnString::Chars & data,
const ColumnString::Offsets & offsets,
std::string needle,
PaddedPODArray<Float32> & res)
{
/// zeroing our map
std::unique_ptr<NgramCount[]> common_stats{new NgramCount[map_size]{}};
/// We can reuse these storages in one thread because we care only about what was written to first places.
std::unique_ptr<UInt16[]> ngram_storage(new NgramCount[max_string_size]);
/// We use unsafe versions of getting ngrams, so I decided to use padded_data even in needle case.
const size_t needle_size = needle.size();
needle.resize(needle_size + default_padding);
const size_t needle_stats_size = dispatchSearcher(calculateNeedleStats<false>, needle.data(), needle_size, common_stats.get(), nullptr);
size_t distance = needle_stats_size;
size_t prev_offset = 0;
for (size_t i = 0; i < offsets.size(); ++i)
{
const UInt8 * haystack = &data[prev_offset];
const size_t haystack_size = offsets[i] - prev_offset - 1;
if (haystack_size <= max_string_size)
{
size_t haystack_stats_size = dispatchSearcher(
calculateHaystackStatsAndMetric<true>,
reinterpret_cast<const char *>(haystack),
haystack_size, common_stats.get(),
distance,
ngram_storage.get());
/// For !symmetric version we should not use haystack_stats_size.
if constexpr (symmetric)
res[i] = distance * 1.f / std::max(haystack_stats_size + needle_stats_size, static_cast<size_t>(1));
else
res[i] = 1.f - distance * 1.f / std::max(needle_stats_size, static_cast<size_t>(1));
}
else
{
/// if the strings are too big, we say they are completely not the same
if constexpr (symmetric)
res[i] = 1.f;
else
res[i] = 0.f;
}
distance = needle_stats_size;
prev_offset = offsets[i];
}
}
};
struct NameNgramDistance
{
static constexpr auto name = "ngramDistance";
};
struct NameNgramDistanceCaseInsensitive
{
static constexpr auto name = "ngramDistanceCaseInsensitive";
};
struct NameNgramDistanceUTF8
{
static constexpr auto name = "ngramDistanceUTF8";
};
struct NameNgramDistanceUTF8CaseInsensitive
{
static constexpr auto name = "ngramDistanceCaseInsensitiveUTF8";
};
struct NameNgramSearch
{
static constexpr auto name = "ngramSearch";
};
struct NameNgramSearchCaseInsensitive
{
static constexpr auto name = "ngramSearchCaseInsensitive";
};
struct NameNgramSearchUTF8
{
static constexpr auto name = "ngramSearchUTF8";
};
struct NameNgramSearchUTF8CaseInsensitive
{
static constexpr auto name = "ngramSearchCaseInsensitiveUTF8";
};
using FunctionNgramDistance = FunctionsStringSimilarity<NgramDistanceImpl<4, UInt8, false, false, true>, NameNgramDistance>;
using FunctionNgramDistanceCaseInsensitive = FunctionsStringSimilarity<NgramDistanceImpl<4, UInt8, false, true, true>, NameNgramDistanceCaseInsensitive>;
using FunctionNgramDistanceUTF8 = FunctionsStringSimilarity<NgramDistanceImpl<3, UInt32, true, false, true>, NameNgramDistanceUTF8>;
using FunctionNgramDistanceCaseInsensitiveUTF8 = FunctionsStringSimilarity<NgramDistanceImpl<3, UInt32, true, true, true>, NameNgramDistanceUTF8CaseInsensitive>;
using FunctionNgramSearch = FunctionsStringSimilarity<NgramDistanceImpl<4, UInt8, false, false, false>, NameNgramSearch>;
using FunctionNgramSearchCaseInsensitive = FunctionsStringSimilarity<NgramDistanceImpl<4, UInt8, false, true, false>, NameNgramSearchCaseInsensitive>;
using FunctionNgramSearchUTF8 = FunctionsStringSimilarity<NgramDistanceImpl<3, UInt32, true, false, false>, NameNgramSearchUTF8>;
using FunctionNgramSearchCaseInsensitiveUTF8 = FunctionsStringSimilarity<NgramDistanceImpl<3, UInt32, true, true, false>, NameNgramSearchUTF8CaseInsensitive>;
void registerFunctionsStringSimilarity(FunctionFactory & factory)
{
factory.registerFunction<FunctionNgramDistance>();
factory.registerFunction<FunctionNgramDistanceCaseInsensitive>();
factory.registerFunction<FunctionNgramDistanceUTF8>();
factory.registerFunction<FunctionNgramDistanceCaseInsensitiveUTF8>();
factory.registerFunction<FunctionNgramSearch>();
factory.registerFunction<FunctionNgramSearchCaseInsensitive>();
factory.registerFunction<FunctionNgramSearchUTF8>();
factory.registerFunction<FunctionNgramSearchCaseInsensitiveUTF8>();
}
}
|
VijayS02/Random-Programming-Items | PythonFiles/SUWSS/Java/jexcelapi/src/jxl/write/WritableCell.java | /*********************************************************************
*
* Copyright (C) 2002 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
***************************************************************************/
package jxl.write;
import jxl.Cell;
import jxl.format.CellFormat;
/**
* The interface for all writable cells
*/
public interface WritableCell extends Cell
{
/**
* Sets the cell format for this cell
*
* @param cf the cell format
*/
public void setCellFormat(CellFormat cf);
/**
* A deep copy. The returned cell still needs to be added to the sheet.
* By not automatically adding the cell to the sheet, the client program
* may change certain attributes, such as the value or the format
*
* @param col the column which the new cell will occupy
* @param row the row which the new cell will occupy
* @return a copy of this cell, which can then be added to the sheet
*/
public WritableCell copyTo(int col, int row);
/**
* Accessor for the cell features
*
* @return the cell features or NULL if this cell doesn't have any
*/
public WritableCellFeatures getWritableCellFeatures();
/**
* Sets the cell features
*
* @param cf the cell features
*/
public void setCellFeatures(WritableCellFeatures cf);
}
|
cclauss/xmodaler | xmodaler/optim/rmsprop.py | <gh_stars>100-1000
# Copyright 2021 JD.com, Inc., JD AI
"""
@author: <NAME>
@contact: <EMAIL>
"""
import torch
from xmodaler.config import configurable
from .build import SOLVER_REGISTRY
@SOLVER_REGISTRY.register()
class RMSprop(torch.optim.RMSprop):
@configurable
def __init__(
self,
*,
params,
lr=1e-2,
alpha=0.99,
eps=1e-8,
weight_decay=0,
momentum=0,
centered=False
):
super(RMSprop, self).__init__(
params,
lr,
alpha,
eps,
weight_decay,
momentum,
centered
)
@classmethod
def from_config(cls, cfg, params):
return {
"params": params,
"lr": cfg.SOLVER.BASE_LR,
"alpha": cfg.SOLVER.ALPHA,
"eps": cfg.SOLVER.EPS,
"weight_decay": cfg.SOLVER.WEIGHT_DECAY,
"momentum": cfg.SOLVER.MOMENTUM,
"centered": cfg.SOLVER.CENTERED
}
|
xiao125/o2oMaven | src/main/java/com/imooc/o2o/dao/ShopDao.java | <reponame>xiao125/o2oMaven
package com.imooc.o2o.dao;
import com.imooc.o2o.entity.Shop;
import org.apache.ibatis.annotations.Param;
import java.util.List;
/**
* Created by Administrator on 2017/11/27.
*/
public interface ShopDao {
/**
* 分页查询店铺,可输入的条件有:店铺名(模糊),店铺状态,店铺类别,区域Id,owner
*
* @param shopCondition
* @param rowIndex
* 从第几行开始取数据
* @param pageSize
* 返回的条数
* @return
*/
List<Shop> queryShopList(@Param("shopCondition") Shop shopCondition,@Param("rowIndex") int rowIndex,
@Param("pageSize") int pageSize);
/**
* 返回queryShopList总数
* @param shopCondition
* @return
*/
int queryShopCount(@Param("shopCondition") Shop shopCondition);
/**
* 通过shop id 查询店铺
* @param shopId
* @return
*/
Shop queryByShopId(long shopId);
/**
* 添加店铺信息
* @param shop
* @return
*/
int insertShop(Shop shop);
/**
* 更新店铺信息
* @param shop
* @return
*/
int updateShop(Shop shop);
}
|
benpolinsky/bp_custom_fields | app/helpers/bp_custom_fields/form_helper.rb | # Add a FormHelper to fetch and display our custom fields
# There's the possibility there's too much going on in the fetch dept.
# But then I'd have to ask the end user to setup something in controllers (probably)
#
# usage:
#
# form_for(@object) do |f|
# f.bp_custom_fields
# end
module BpCustomFields
module FormHelper
ActionView::Helpers::FormBuilder.class_eval do
include ActionView::Context
include ActionView::Helpers::TextHelper
include ActionView::Helpers::TagHelper
def bp_custom_fields
@object.update_custom_field_groups
if @object.groups.any?
content_tag :div, class: "custom-field-container bpcf-base-theme" do
concat content_tag(:div, custom_groups, class: "custom-groups")
end
end
end
private
# hmm.. why isn't this fields?
# and I am not using this...
# FOR DELETION
# def groups
# @object.groups.map(&:fields)
# end
# Setup each custom field's group
def custom_groups
fields_for :groups do |group_builder|
custom_group(group_builder)
end
end
# Set up each groups display
# A Label, a hidden_field to keep track of the group_template_id
# and of course the custom fields themselves
def custom_group(group_builder)
content_tag :div, class: "custom-group" do
concat content_tag(:p, "#{group_builder.object.name}", class: 'toggle-group active')
concat content_tag(:span, group_builder.hidden_field(:group_template_id))
concat content_tag(:div, custom_fields(group_builder), class: "custom-group-inner active")
end
end
# Iterate through the builder's fields
def custom_fields(group_builder)
group_builder.fields_for :fields do |fields_f|
capture do
concat custom_field(fields_f)
end
end
end
# Build the form field from the object's field_template
def custom_field(field_builder)
field_template = field_builder.object.field_template
@template.render partial: "bp_custom_fields/field_types/admin/basic",
locals: {f: field_builder, field_template: field_template}
end
end
end
end |
Nibor007/Proyectos | src/main/java/cl/bancochile/portal/common/recaudacion/converter/DetalleContratoRes.java | <reponame>Nibor007/Proyectos
package cl.bancochile.portal.common.recaudacion.converter;
import cl.bancochile.osb.bch.neg.pagos.cobranzaexterna.consultarcobranzasexternasrs.mpi.Canal;
import cl.bancochile.osb.bch.neg.pagos.cobranzaexterna.consultarcobranzasexternasrs.mpi.ConsultarCobranzasExternasRs;
import cl.bancochile.portal.common.recaudacion.domain.DetalleContrato;
import cl.bancochile.portal.common.recaudacion.utils.DateUtil;
import com.google.common.base.Joiner;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.convert.converter.Converter;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
public class DetalleContratoRes implements Converter<ConsultarCobranzasExternasRs, DetalleContrato> {
@Autowired
@Qualifier(value = "detalleContrato")
private Properties properties;
private static final Joiner JOINER = Joiner.on(" - ").skipNulls();
@Override
public DetalleContrato convert(ConsultarCobranzasExternasRs res) {
DetalleContrato detalleContrato = new DetalleContrato();
StringBuilder sbCanal = new StringBuilder();
List<String> listadoCanales = new ArrayList<>();
List<BigInteger> listadoCodigoCanales = new ArrayList<>();
for(Canal canal: res.getServicio().getCanales().getCanal()){
listadoCanales.add(properties.getProperty("CANORI."+ canal.getCodigo().intValue()));
listadoCodigoCanales.add(canal.getCodigo());
}
detalleContrato.setRecaudacionCaja(JOINER.appendTo(sbCanal,listadoCanales).toString());
detalleContrato.setUsoCodigoBarra(properties.getProperty("INCOBA."+ res.getServicio().getIndicadorCodigoBarra().toString()));
detalleContrato.setCuentaCorriente(res.getPagoEmpresa().getCuentaCorriente());
detalleContrato.setFechaInicio(DateUtil.dateToString(DateUtil.xmlGregorianCalendartoDate(res.getServicio().getFechaInicio()), DateUtil.FMT_FECHA_DD_MM_YYYY));
detalleContrato.setCobertura(properties.getProperty("CODCOB."+ res.getServicio().getCodigoCobertura().toString()));
detalleContrato.setMoneda(properties.getProperty("MOANVT." + res.getIntereses().getMonedaAntesVencimiento()));
detalleContrato.setTipoInteres(properties.getProperty("TIPTAS." + res.getIntereses().getTipoTasa()));
detalleContrato.setCanalRecaudacion(res.getServicio().getCanales().getCanal().get(0).getCodigo().toString());
detalleContrato.setNombreConvenio(res.getServicio().getNombre());
detalleContrato.setListaCanales(listadoCodigoCanales);
return detalleContrato;
}
}
|
DevLabsDigital/consultoria_gem | app/javascript/components/app/mock/etapasDashboard.js | const etapasDashboard = [
{
etapa: 'Apresentação do Projeto',
status: 'ok',
},
{
etapa: 'Definição dos objetivos macro do projeto',
status: 'pendente',
},
{
etapa: 'Definir equipe do projeto - LÍDER DO PROJETO',
status: 'pendente',
},
{
etapa: 'Entrevista com os principais executivos (operação)',
status: 'ok',
},
{
etapa: 'Levantamento dos dados internos - Diagnostico',
status: 'ok',
},
{
etapa: 'Análise dos dados levantados',
status: 'ok',
},
]
export default etapasDashboard |
Repast/repast.simphony | repast.simphony.data/src/repast/simphony/data2/AggregateDataSource.java | <reponame>Repast/repast.simphony
package repast.simphony.data2;
/**
* Interface for classes that can function as the source of aggregate data to be
* logged or charted.
*
* @author <NAME>
*/
public interface AggregateDataSource extends DataSource {
/**
* Gets the data using the specified iterable.
*
* @param size
* the number of objects in the iterable
* @param objs
* the iterable over objects to use in getting the data
*
* @return the data using the specified iterable.
*/
Object get(Iterable<?> objs, int size);
/**
* Resets this AggregateDataSource prior to the next get call.
*/
void reset();
}
|
albertdow/zinv-analysis | drawing/dist_facet.py | import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
#sns.set(style='ticks')
def dist_facet(df, bins, filepath, cfg):
plt.rcParams['xtick.top'] = False
plt.rcParams['ytick.right'] = False
with sns.plotting_context(context='paper', font_scale=1.8):
variations = list(set(c.replace("Up","").replace("Down","") for c in df.columns))
df_up = df.loc[:,[c for c in df.columns if c.endswith("Up")]]
df_down = df.loc[:,[c for c in df.columns if c.endswith("Down")]]
df_up.columns = [cfg.sample_names[c[:-2]] for c in df_up.columns]
df_up.columns.name = "key"
df_down.columns = [cfg.sample_names[c[:-4]] for c in df_down.columns]
df_down.columns.name = "key"
df_up = df_up.stack(level="key", dropna=False).reset_index()
df_down = df_down.stack(level="key", dropna=False).reset_index()
df_up["variation"] = "up"
df_down["variation"] = "down"
df = pd.concat([df_up, df_down])
df["process"] = df["process"].replace(cfg.sample_names)
process_order = [cfg.sample_names.get(p, p) for p in cfg.process_order]
df = df.rename({
"key": "Systematic",
"process": "Process",
"variation": "Variation",
"bin0_low": cfg.xlabel,
0: cfg.ylabel,
}, axis='columns')
g = sns.FacetGrid(
df, row='Systematic', col='Process', hue='Variation',
margin_titles=True, legend_out=True, col_order=process_order,
)
g.map(plt.step, cfg.xlabel, cfg.ylabel, where='post').add_legend()
g.set(xlim=(0, 1000), ylim=(0.9, 1.1))
g.fig.text(0.0, 1, r'$\mathbf{CMS}\ \mathit{Preliminary}$',
ha='left', va='bottom', fontsize='large')
g.fig.text(0.9, 1, r'$35.9\ \mathrm{fb}^{-1}(13\ \mathrm{TeV})$',
ha='right', va='bottom', fontsize='large')
# Report
print("Creating {}.pdf".format(filepath))
# Actually save the figure
g.fig.savefig(filepath+".pdf", format="pdf", bbox_inches="tight")
plt.close(g.fig)
plt.rcParams['xtick.top'] = True
plt.rcParams['ytick.right'] = True
return df
|
iam-Legend/Project-Assembly | Source/FactoryGame/FGNobeliskDetonator.cpp | <gh_stars>0
// This file has been automatically generated by the Unreal Header Implementation tool
#include "FGNobeliskDetonator.h"
AFGNobeliskDetonator::AFGNobeliskDetonator(){ }
void AFGNobeliskDetonator::PostLoadGame_Implementation( int32 saveVersion, int32 gameVersion){ }
bool AFGNobeliskDetonator::ShouldSaveState() const{ return bool(); }
void AFGNobeliskDetonator::BeginPrimaryFire(){ }
void AFGNobeliskDetonator::EndPrimaryFire(){ }
void AFGNobeliskDetonator::Equip( AFGCharacterPlayer* character){ }
void AFGNobeliskDetonator::UnEquip(){ }
float AFGNobeliskDetonator::GetChargePct(){ return float(); }
void AFGNobeliskDetonator::SpawnExplosive( FTransform spawnTransform, int32 throwForce){ }
void AFGNobeliskDetonator::Server_SpawnExplosive_Implementation( FTransform spawnTransform, int32 throwForce){ }
bool AFGNobeliskDetonator::Server_SpawnExplosive_Validate( FTransform spawnTransform, int32 throwForce){ return bool(); }
void AFGNobeliskDetonator::StartDetonations(){ }
void AFGNobeliskDetonator::Server_StartDetonations_Implementation(){ }
bool AFGNobeliskDetonator::Server_StartDetonations_Validate(){ return bool(); }
void AFGNobeliskDetonator::ExecutePrimaryFire(){ }
void AFGNobeliskDetonator::Server_ExecutePrimaryFire_Implementation(){ }
bool AFGNobeliskDetonator::Server_ExecutePrimaryFire_Validate(){ return bool(); }
void AFGNobeliskDetonator::SecondaryFirePressed(){ }
void AFGNobeliskDetonator::OnSecondaryFirePressed_Implementation(){ }
void AFGNobeliskDetonator::ExecuteSecondaryFire(){ }
void AFGNobeliskDetonator::Server_ExecuteSecondaryFire_Implementation(){ }
bool AFGNobeliskDetonator::Server_ExecuteSecondaryFire_Validate(){ return bool(); }
void AFGNobeliskDetonator::OnViewportFocusChanged(bool isOpen, TSubclassOf< UUserWidget > interactionClass){ }
void AFGNobeliskDetonator::AddEquipmentActionBindings(){ }
|
sunxuia/leetcode-solution-java | src/main/java/q200/Q188_BestTimeToBuyAndSellStockIV.java | <filename>src/main/java/q200/Q188_BestTimeToBuyAndSellStockIV.java
package q200;
import java.util.Arrays;
import org.junit.runner.RunWith;
import q150.Q122_BestTimeToBuyAndSellStockII;
import q150.Q123_BestTimeToBuyAndSellStockIII;
import q350.Q309_BestTimeToBuyAndSellStockWithCooldown;
import util.runner.Answer;
import util.runner.LeetCodeRunner;
import util.runner.TestData;
import util.runner.data.DataExpectation;
import util.runner.data.TestDataFileHelper;
/**
* [Hard] 188. Best Time to Buy and Sell Stock IV
* https://leetcode.com/problems/best-time-to-buy-and-sell-stock-iv/
*
* Say you have an array for which the i-th element is the price of a given stock on day i.
*
* Design an algorithm to find the maximum profit. You may complete at most k transactions.
*
* Note:
* You may not engage in multiple transactions at the same time (ie, you must sell the stock before you buy again).
*
* Example 1:
*
* Input: [2,4,1], k = 2
* Output: 2
* Explanation: Buy on day 1 (price = 2) and sell on day 2 (price = 4), profit = 4-2 = 2.
*
* Example 2:
*
* Input: [3,2,6,5,0,3], k = 2
* Output: 7
* Explanation: Buy on day 2 (price = 2) and sell on day 3 (price = 6), profit = 6-2 = 4.
* Then buy on day 5 (price = 0) and sell on day 6 (price = 3), profit = 3-0 = 3.
*
* 相关题目:
* 上一题 {@link Q123_BestTimeToBuyAndSellStockIII}
* 下一题 {@link Q309_BestTimeToBuyAndSellStockWithCooldown}
*
* 题解:
* 123 题的买卖次数固定为 2, 本题相比 123 题题目限制最多有 k 次买卖 (买卖只 1 次交易, 即一买一卖算 1 次交易)
*/
@RunWith(LeetCodeRunner.class)
public class Q188_BestTimeToBuyAndSellStockIV {
/**
* 参考极客时间视频, 比较容易理解的解法如下,
* 在 {@link Q122_BestTimeToBuyAndSellStockII#maxProfit2(int[])} 的思路上加入k 次交易的限制.
* 定义 dp[i][j][k] 表示第 i 天手上有 j 股股票, 且已经交易了 k 次时候的最大收益.
* (买入则交易次数 +1, 卖出交易次数不变)
* 则 dp[i][0][k] 的值为 dp[i-1][0][k] (无法卖出) 或 dp[i-1][1][k] + prices[i] (卖出)
* dp[i][1][k] 的值为 dp[i-1][1][k] (无法买入) 或 dp[i-1][1][k-1] - prices[i] (买入)
* 同时要注意去除边界条件, 最后的出来 dp[i][0][0 ~ K] 中的最大值就是结果.
*/
@Answer
public int maxProfit(int K, int[] prices) {
final int n = prices.length;
if (n < 2 || K == 0) {
return 0;
}
if (K >= prices.length) {
return solveMaxProfit(prices);
}
int[][][] dp = new int[n][2][K + 1];
for (int i = 0; i < n; i++) {
for (int j = 0; j < 2; j++) {
Arrays.fill(dp[i][j], Integer.MIN_VALUE / 2);
}
}
// (第0 次交易的边界条件)
dp[0][0][0] = 0;
dp[0][1][1] = -prices[0];
for (int i = 1; i < n; i++) {
int[][] curr = dp[i], prev = dp[i - 1];
// (交易0 次的边界条件)
curr[0][0] = 0;
for (int k = 1; k <= K; k++) {
// 本次不卖出或卖出
curr[0][k] = Math.max(prev[0][k], prev[1][k] + prices[i]);
// 本次不买入或买入
curr[1][k] = Math.max(prev[1][k], prev[0][k - 1] - prices[i]);
}
}
return Arrays.stream(dp[n - 1][0]).max().getAsInt();
}
/**
* 次数限制超过最大可能买卖次数的情况, 这用来避免k 极端大的情况.
* 解答同 {@link Q122_BestTimeToBuyAndSellStockII#maxProfit(int[])}
*/
private int solveMaxProfit(int[] prices) {
int res = 0;
for (int i = 1; i < prices.length; i++) {
res += Math.max(0, prices[i] - prices[i - 1]);
}
return res;
}
/**
* 参考网络解法解答如下:
*/
@Answer
public int maxProfit2(int k, int[] prices) {
if (prices == null || prices.length < 2) {
return 0;
}
// 避免overTime 测试用例的内存溢出问题
if (k >= prices.length) {
return solveMaxProfit(prices);
}
int[] local = new int[k + 1];
int[] global = new int[k + 1];
for (int i = 0; i < prices.length - 1; i++) {
int diff = prices[i + 1] - prices[i];
for (int j = k; j >= 1; j--) {
local[j] = Math.max(global[j - 1] + (diff > 0 ? diff : 0), local[j] + diff);
global[j] = Math.max(local[j], global[j]);
}
}
return global[k];
}
@TestData
public DataExpectation example1 = DataExpectation.createWith(2, new int[]{2, 4, 1}).expect(2);
@TestData
public DataExpectation example2 = DataExpectation.createWith(2, new int[]{3, 2, 6, 5, 0, 3}).expect(7);
@TestData
public DataExpectation border0 = DataExpectation.createWith(0, new int[]{1, 3}).expect(0);
@TestData
public DataExpectation border1 = DataExpectation.createWith(1, new int[]{}).expect(0);
@TestData
public DataExpectation normal1 = DataExpectation.createWith(2, new int[]{1, 2, 4, 2, 5, 7, 2, 4, 9, 0}).expect(13);
@TestData
public DataExpectation normal2 = DataExpectation.createWith(1, new int[]{1, 2}).expect(1);
@TestData
public DataExpectation normal3 = DataExpectation.createWith(1, new int[]{2, 1}).expect(0);
// 1万个元素的数组
@TestData
public DataExpectation overMemory = DataExpectation
.createWith(1000000000, TestDataFileHelper.read("Q188_LongTestData", int[].class))
.expect(1648961);
}
|
SimiaCryptus/mindseye-core | src/main/java/com/simiacryptus/mindseye/opt/orient/ValidatingOrientationWrapper.java | /*
* Copyright (c) 2019 by <NAME>.
*
* The author licenses this file to you under the
* Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance
* with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.simiacryptus.mindseye.opt.orient;
import com.simiacryptus.mindseye.eval.Trainable;
import com.simiacryptus.mindseye.lang.DeltaSet;
import com.simiacryptus.mindseye.lang.PointSample;
import com.simiacryptus.mindseye.opt.TrainingMonitor;
import com.simiacryptus.mindseye.opt.line.LineSearchCursor;
import com.simiacryptus.mindseye.opt.line.LineSearchCursorBase;
import com.simiacryptus.mindseye.opt.line.LineSearchPoint;
import com.simiacryptus.ref.wrappers.RefString;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.Arrays;
import java.util.UUID;
/**
* The type Validating orientation wrapper.
*/
public class ValidatingOrientationWrapper extends OrientationStrategyBase<LineSearchCursor> {
@Nullable
private final OrientationStrategy<? extends LineSearchCursor> inner;
/**
* Instantiates a new Validating orientation wrapper.
*
* @param inner the inner
*/
public ValidatingOrientationWrapper(@Nullable final OrientationStrategy<? extends LineSearchCursor> inner) {
OrientationStrategy<? extends LineSearchCursor> temp_26_0001 = inner == null ? null : inner.addRef();
this.inner = temp_26_0001 == null ? null : temp_26_0001.addRef();
if (null != temp_26_0001)
temp_26_0001.freeRef();
if (null != inner)
inner.freeRef();
}
@Nonnull
@Override
public LineSearchCursor orient(@Nullable final Trainable subject, @Nullable final PointSample measurement,
final TrainingMonitor monitor) {
assert inner != null;
final LineSearchCursor cursor = inner.orient(subject, measurement, monitor);
return new ValidatingLineSearchCursor(cursor);
}
@Override
public void reset() {
assert inner != null;
inner.reset();
}
public void _free() {
super._free();
if (null != inner)
inner.freeRef();
}
@Nonnull
public @Override
@SuppressWarnings("unused")
ValidatingOrientationWrapper addRef() {
return (ValidatingOrientationWrapper) super.addRef();
}
private static class ValidatingLineSearchCursor extends LineSearchCursorBase {
@Nullable
private final LineSearchCursor cursor;
/**
* Instantiates a new Validating line search cursor.
*
* @param cursor the cursor
*/
public ValidatingLineSearchCursor(@Nullable final LineSearchCursor cursor) {
this.cursor = cursor;
}
@Override
public CharSequence getDirectionType() {
assert cursor != null;
return cursor.getDirectionType();
}
/**
* Add ref validating line search cursor [ ].
*
* @param array the array
* @return the validating line search cursor [ ]
*/
@Nullable
public static @SuppressWarnings("unused")
ValidatingLineSearchCursor[] addRef(@Nullable ValidatingLineSearchCursor[] array) {
if (array == null)
return null;
return Arrays.stream(array).filter(x -> {
boolean notNull = x != null;
x.freeRef();
return notNull;
}).toArray(ValidatingLineSearchCursor[]::new);
}
@Override
public PointSample afterStep(@Nonnull PointSample step) {
super.afterStep(step.addRef()).freeRef();
assert cursor != null;
return cursor.afterStep(step);
}
@Override
public DeltaSet<UUID> position(final double alpha) {
assert cursor != null;
return cursor.position(alpha);
}
@Override
public void reset() {
assert cursor != null;
cursor.reset();
}
@javax.annotation.Nullable
@Override
public LineSearchPoint step(final double alpha, @Nonnull final TrainingMonitor monitor) {
assert cursor != null;
final LineSearchPoint primaryPoint = cursor.step(alpha, monitor);
//monitor.log(String.format("f(%s) = %s",alphaList, primaryPoint.point.value));
test(monitor, primaryPoint == null ? null : primaryPoint.addRef(), 1e-3);
test(monitor, primaryPoint == null ? null : primaryPoint.addRef(), 1e-4);
test(monitor, primaryPoint == null ? null : primaryPoint.addRef(), 1e-6);
return primaryPoint;
}
/**
* Test.
*
* @param monitor the monitor
* @param primaryPoint the primary point
* @param probeSize the probe size
*/
public void test(@Nonnull final TrainingMonitor monitor, @Nonnull final LineSearchPoint primaryPoint,
final double probeSize) {
final double alpha = primaryPoint.getPointRate();
double probeAlpha = alpha + primaryPoint.getPointSum() * probeSize / primaryPoint.derivative;
if (!Double.isFinite(probeAlpha) || probeAlpha == alpha) {
probeAlpha = alpha + probeSize;
}
assert cursor != null;
final LineSearchPoint probePoint = cursor.step(probeAlpha, monitor);
assert probePoint != null;
final double dy = probePoint.getPointSum() - primaryPoint.getPointSum();
final double dx = probeAlpha - alpha;
final double measuredDerivative = dy / dx;
monitor.log(RefString.format("%s vs (%s, %s); probe=%s", measuredDerivative, primaryPoint.derivative,
probePoint.derivative, probeSize));
primaryPoint.freeRef();
probePoint.freeRef();
}
public void _free() {
super._free();
if (null != cursor)
cursor.freeRef();
}
@Nonnull
public @Override
@SuppressWarnings("unused")
ValidatingLineSearchCursor addRef() {
return (ValidatingLineSearchCursor) super.addRef();
}
}
}
|
whble/trunk | target/linux/ar71xx/files/arch/mips/ath79/mach-wnr2200.c | /*
* NETGEAR WNR2200 board support
*
* Copyright (C) 2013 <NAME> <<EMAIL>>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published
* by the Free Software Foundation.
*/
#include <linux/gpio.h>
#include <linux/mtd/mtd.h>
#include <linux/mtd/partitions.h>
#include <asm/mach-ath79/ath79.h>
#include "dev-ap9x-pci.h"
#include "dev-eth.h"
#include "dev-gpio-buttons.h"
#include "dev-leds-gpio.h"
#include "dev-m25p80.h"
#include "dev-usb.h"
#include "machtypes.h"
#define WNR2200_GPIO_LED_LAN2_AMBER 0
#define WNR2200_GPIO_LED_LAN4_AMBER 1
#define WNR2200_GPIO_LED_WPS 5
#define WNR2200_GPIO_LED_WAN_GREEN 7
#define WNR2200_GPIO_LED_USB 8
#define WNR2200_GPIO_LED_LAN3_AMBER 11
#define WNR2200_GPIO_LED_WAN_AMBER 12
#define WNR2200_GPIO_LED_LAN1_GREEN 13
#define WNR2200_GPIO_LED_LAN2_GREEN 14
#define WNR2200_GPIO_LED_LAN3_GREEN 15
#define WNR2200_GPIO_LED_LAN4_GREEN 16
#define WNR2200_GPIO_LED_PWR_AMBER 21
#define WNR2200_GPIO_LED_PWR_GREEN 22
#define WNR2200_GPIO_USB_POWER 24
#define WNR2200_KEYS_POLL_INTERVAL 20 /* msecs */
#define WNR2200_KEYS_DEBOUNCE_INTERVAL (3 * WNR2200_KEYS_POLL_INTERVAL)
#define WNR2200_MAC0_OFFSET 0
#define WNR2200_MAC1_OFFSET 6
#define WNR2200_PCIE_CALDATA_OFFSET 0x1000
static struct gpio_led wnr2200_leds_gpio[] __initdata = {
{
.name = "netgear:amber:lan2",
.gpio = WNR2200_GPIO_LED_LAN2_AMBER,
.active_low = 1,
}, {
.name = "netgear:amber:lan4",
.gpio = WNR2200_GPIO_LED_LAN4_AMBER,
.active_low = 1,
}, {
.name = "netgear:green:wps",
.gpio = WNR2200_GPIO_LED_WPS,
.active_low = 1,
}, {
.name = "netgear:green:wan",
.gpio = WNR2200_GPIO_LED_WAN_GREEN,
.active_low = 1,
}, {
.name = "netgear:green:usb",
.gpio = WNR2200_GPIO_LED_USB,
.active_low = 1,
}, {
.name = "netgear:amber:lan3",
.gpio = WNR2200_GPIO_LED_LAN3_AMBER,
.active_low = 1,
}, {
.name = "netgear:amber:wan",
.gpio = WNR2200_GPIO_LED_WAN_AMBER,
.active_low = 1,
}, {
.name = "netgear:green:lan1",
.gpio = WNR2200_GPIO_LED_LAN1_GREEN,
.active_low = 1,
}, {
.name = "netgear:green:lan2",
.gpio = WNR2200_GPIO_LED_LAN2_GREEN,
.active_low = 1,
}, {
.name = "netgear:green:lan3",
.gpio = WNR2200_GPIO_LED_LAN3_GREEN,
.active_low = 1,
}, {
.name = "netgear:green:lan4",
.gpio = WNR2200_GPIO_LED_LAN4_GREEN,
.active_low = 1,
}, {
.name = "netgear:amber:power",
.gpio = WNR2200_GPIO_LED_PWR_AMBER,
.active_low = 1,
}, {
.name = "netgear:green:power",
.gpio = WNR2200_GPIO_LED_PWR_GREEN,
.active_low = 1,
}
};
static void __init wnr2200_setup(void)
{
u8 *art = (u8 *) KSEG1ADDR(0x1fff0000);
ath79_register_mdio(0, 0x0);
ath79_init_mac(ath79_eth0_data.mac_addr, art+WNR2200_MAC0_OFFSET, 0);
ath79_eth0_data.phy_if_mode = PHY_INTERFACE_MODE_RMII;
ath79_eth0_data.speed = SPEED_100;
ath79_eth0_data.duplex = DUPLEX_FULL;
ath79_init_mac(ath79_eth1_data.mac_addr, art+WNR2200_MAC1_OFFSET, 0);
ath79_eth1_data.phy_if_mode = PHY_INTERFACE_MODE_RMII;
ath79_eth1_data.phy_mask = 0x10;
ath79_register_eth(0);
ath79_register_eth(1);
ath79_register_m25p80(NULL);
ap91_pci_init(art + WNR2200_PCIE_CALDATA_OFFSET, NULL);
ath79_register_leds_gpio(-1, ARRAY_SIZE(wnr2200_leds_gpio),
wnr2200_leds_gpio);
/* enable power for the USB port */
gpio_request_one(WNR2200_GPIO_USB_POWER,
GPIOF_OUT_INIT_HIGH | GPIOF_EXPORT_DIR_FIXED,
"USB power");
ath79_register_usb();
}
MIPS_MACHINE(ATH79_MACH_WNR2200, "WNR2200", "NETGEAR WNR2200", wnr2200_setup);
|
mcraken/spring-scaffy | src/main/java/com/scaffy/weave/PreAuthorizeBuilder.java | package com.scaffy.weave;
import org.springframework.security.access.prepost.PreAuthorize;
import javassist.bytecode.ConstPool;
import javassist.bytecode.annotation.Annotation;
public class PreAuthorizeBuilder extends AnnotationBuilder{
private String priv;
public PreAuthorizeBuilder(String priv) {
super(PreAuthorize.class.getName());
this.priv = priv;
}
@Override
protected void execute(Annotation annot, ConstPool cpool) {
addStringMemberVariable(annot, cpool, "value", priv);
}
}
|
edellano/Adenita-SAMSON-Edition-Win- | AdenitaCoreSE/source/SEAdenitaVisualModelProperties.cpp | <gh_stars>1-10
#include "SEAdenitaVisualModelProperties.hpp"
#include "SEAdenitaVisualModel.hpp"
#include "SAMSON.hpp"
#include "SBGWindow.hpp"
SEAdenitaVisualModelProperties::SEAdenitaVisualModelProperties() {
visualModel = 0;
ui.setupUi( this );
observer = new Observer(this);
ui.gboHighlight->hide();
}
SEAdenitaVisualModelProperties::~SEAdenitaVisualModelProperties() {
if (!visualModel.isValid()) return;
visualModel->disconnectBaseSignalFromSlot(observer(), SB_SLOT(&SEAdenitaVisualModelProperties::Observer::onBaseEvent));
visualModel->disconnectVisualSignalFromSlot(observer(), SB_SLOT(&SEAdenitaVisualModelProperties::Observer::onVisualEvent));
}
void SEAdenitaVisualModelProperties::loadSettings( SBGSettings *settings ) {
if ( settings == 0 ) return;
// SAMSON Element generator pro tip: complete this function so this property window can save its GUI state from one session to the next
}
void SEAdenitaVisualModelProperties::saveSettings( SBGSettings *settings ) {
if ( settings == 0 ) return;
// SAMSON Element generator pro tip: complete this function so this property window can save its GUI state from one session to the next
}
SBCContainerUUID SEAdenitaVisualModelProperties::getUUID() const { return SBCContainerUUID( "D4A5CB4D-13E4-9C27-AE35-B6F690D17E3E" );}
QPixmap SEAdenitaVisualModelProperties::getLogo() const {
// SAMSON Element generator pro tip: this icon will be visible in the GUI title bar.
// Modify it to better reflect the purpose of your visual model.
return QPixmap(QString::fromStdString(SB_ELEMENT_PATH + "/Resource/Icons/SEAdenitaVisualModelPropertiesIcon.png"));
}
QString SEAdenitaVisualModelProperties::getName() const {
// SAMSON Element generator pro tip: this string will be the GUI title.
// Modify this function to have a user-friendly description of your visual model inside SAMSON
return "Adenita Visual Model";
}
int SEAdenitaVisualModelProperties::getFormat() const {
// SAMSON Element generator pro tip: modify these default settings to configure the window
//
// SBGWindow::Savable : let users save and load interface settings (implement loadSettings and saveSettings)
// SBGWindow::Lockable : let users lock the window on top
// SBGWindow::Resizable : let users resize the window
// SBGWindow::Citable : let users obtain citation information (implement getCitation)
return (SBGWindow::Savable | SBGWindow::Lockable | SBGWindow::Resizable | SBGWindow::Citable);
}
QString SEAdenitaVisualModelProperties::getCitation() const {
// SAMSON Element generator pro tip: modify this function to add citation information
return ADNAuxiliary::AdenitaCitation();
}
bool SEAdenitaVisualModelProperties::setup() {
SBNodeIndexer nodeIndexer;
SB_FOR(SBNode* node, *SAMSON::getActiveDocument()->getSelectedNodes()) {
node->getNodes(nodeIndexer, SBNode::GetClass() == std::string("SEAdenitaVisualModel") && SBNode::GetElement() == std::string("SEAdenitaCoreSE") && SBNode::GetElementUUID() == SBUUID(SB_ELEMENT_UUID));
}
if (nodeIndexer.size() == 1) {
visualModel = static_cast<SEAdenitaVisualModel*>((nodeIndexer)[0]);
visualModel->connectBaseSignalToSlot(observer(), SB_SLOT(&SEAdenitaVisualModelProperties::Observer::onBaseEvent));
visualModel->connectVisualSignalToSlot(observer(), SB_SLOT(&SEAdenitaVisualModelProperties::Observer::onVisualEvent));
connect(ui.hslScale, SIGNAL(sliderMoved(int)), this, SLOT(onSliderScaleChanged(int)));
return true;
}
return false;
}
bool SEAdenitaVisualModelProperties::setup(SBNode* node) {
if (node->getProxy()->getName() != "SEAdenitaVisualModel") return false;
if (node->getProxy()->getElement() != "SEAdenitaCoreSE") return false;
if (node->getProxy()->getElementUUID() != SBUUID(SB_ELEMENT_UUID)) return false;
visualModel = static_cast<SEAdenitaVisualModel*>(node);
visualModel->connectBaseSignalToSlot(observer(), SB_SLOT(&SEAdenitaVisualModelProperties::Observer::onBaseEvent));
visualModel->connectVisualSignalToSlot(observer(), SB_SLOT(&SEAdenitaVisualModelProperties::Observer::onVisualEvent));
connect(ui.hslScale, SIGNAL(sliderMoved(int)), this, SLOT(onSliderScaleChanged(int)));
return true;
}
void SEAdenitaVisualModelProperties::onSliderScaleChanged(int val)
{
double scale = (double)val / 10.0f;
visualModel->changeScale(scale, true);
ui.lblScale->setText(QString::number(scale));
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onSliderVisibilityChanged(int val)
{
visualModel->changeVisibility(val / 100.0f);
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onSpinboxVisibilityChanged(double val)
{
visualModel->changeVisibility(val / 100.0f);
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onSliderDimensionChanged(int val)
{
float dim = (float)val / 100.0f;
visualModel->changeDimension(dim);
ui.lblDimension->setText(QString::number(dim));
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onPropertyColorsChanged(int propertyIdx)
{
visualModel->changePropertyColors(propertyIdx, ui.cbbPropertyColorSchemes->currentIndex());
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onHighlightChanged(int highlightIdx)
{
visualModel->changeHighlight(highlightIdx);
if (highlightIdx == 4) {
ui.gboHighlight->show();
}
else {
ui.gboHighlight->hide();
}
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onPropertyColorSchemeChanged(int colorSchemeIdx)
{
visualModel->changePropertyColors(ui.cbbPropertyColors->currentIndex(), colorSchemeIdx);
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onSingleStrandColorSchemeChanged(int index)
{
visualModel->setSingleStrandColors(index);
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onNucleotideColorSchemeChanged(int index)
{
visualModel->setNucleotideColors(index);
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onDoubleStrandColorSchemeChanged(int index)
{
visualModel->setDoubleStrandColors(index);
SAMSON::requestViewportUpdate();
}
void SEAdenitaVisualModelProperties::onShowBasePairing(bool show)
{
visualModel->showBasePairing(show);
}
void SEAdenitaVisualModelProperties::onMinLenChanged(QString text)
{
unsigned int val = text.toUInt();
visualModel->setHighlightMinLen(val);
}
void SEAdenitaVisualModelProperties::onMaxLenChanged(QString text)
{
unsigned int val = text.toUInt();
visualModel->setHighlightMaxLen(val);
}
void SEAdenitaVisualModelProperties::onNotWithinRangeChanged(bool c)
{
visualModel->setNotWithinRange(c);
}
void SEAdenitaVisualModelProperties::onNotScaffoldChanged(bool c)
{
visualModel->setNotScaffold(c);
}
SEAdenitaVisualModelProperties::Observer::Observer(SEAdenitaVisualModelProperties* properties) { this->properties = properties; }
SEAdenitaVisualModelProperties::Observer::~Observer() {}
void SEAdenitaVisualModelProperties::Observer::onBaseEvent(SBBaseEvent* baseEvent) {
if (baseEvent->getType() == SBBaseEvent::NodeEraseBegin) properties->hide();
}
void SEAdenitaVisualModelProperties::Observer::onVisualEvent(SBVisualEvent* visualEvent) {
if (visualEvent->getType() == SBVisualEvent::VisualModelChanged) {
// SAMSON Element generator pro tip: modify this function if the property window
// needs to be updated when the VisualModelChanged event is sent
}
}
|
winksaville/sel4-min-sel4 | kernel/include/kernel/boot.h | /*
* Copyright 2014, General Dynamics C4 Systems
*
* This software may be distributed and modified according to the terms of
* the GNU General Public License version 2. Note that NO WARRANTY is provided.
* See "LICENSE_GPLv2.txt" for details.
*
* @TAG(GD_GPL)
*/
#ifndef __KERNEL_BOOT_H
#define __KERNEL_BOOT_H
#include <bootinfo.h>
#define MAX_NUM_FREEMEM_REG 2
/*
* Resolve naming differences between the abstract specifications
* of the bootstrapping phase and the runtime phase of the kernel.
*/
typedef cte_t slot_t;
typedef cte_t* slot_ptr_t;
#define SLOT_PTR(pptr, pos) (((slot_ptr_t)(pptr)) + (pos))
#define pptr_of_cap (pptr_t)cap_get_capPtr
/* (node-local) state accessed only during bootstrapping */
typedef struct ndks_boot {
region_t freemem[MAX_NUM_FREEMEM_REG];
bi_t* bi_frame;
slot_pos_t slot_pos_cur;
slot_pos_t slot_pos_max;
} ndks_boot_t;
extern ndks_boot_t ndks_boot;
/* function prototypes */
static inline bool_t
is_reg_empty(region_t reg)
{
return reg.start == reg.end;
}
pptr_t alloc_region(uint32_t size_bits);
bool_t insert_region(region_t reg);
void write_slot(slot_ptr_t slot_ptr, cap_t cap);
cap_t create_root_cnode(void);
bool_t provide_cap(cap_t root_cnode_cap, cap_t cap);
cap_t create_it_asid_pool(cap_t root_cnode_cap);
void write_it_pd_pts(cap_t root_cnode_cap, cap_t it_pd_cap);
bool_t create_idle_thread(void);
bool_t create_untypeds(cap_t root_cnode_cap, region_t boot_mem_reuse_reg);
void bi_finalise(void);
bool_t create_irq_cnode(void);
void create_domain_cap(cap_t root_cnode_cap);
cap_t create_ipcbuf_frame(cap_t root_cnode_cap, cap_t pd_cap, vptr_t vptr);
pptr_t allocate_bi_frame(node_id_t node_id, uint32_t num_nodes, vptr_t ipcbuf_vptr);
void create_bi_frame_cap(cap_t root_cnode_cap, cap_t pd_cap, pptr_t pptr, vptr_t vptr);
typedef struct create_frames_of_region_ret {
slot_region_t region;
bool_t success;
} create_frames_of_region_ret_t;
create_frames_of_region_ret_t
create_frames_of_region(
cap_t root_cnode_cap,
cap_t pd_cap,
region_t reg,
bool_t do_map,
int32_t pv_offset
);
cap_t
create_it_pd_pts(
cap_t root_cnode_cap,
v_region_t ui_v_reg,
vptr_t ipcbuf_vptr,
vptr_t bi_frame_vptr
);
bool_t
create_initial_thread(
cap_t root_cnode_cap,
cap_t it_pd_cap,
vptr_t ui_v_entry,
vptr_t bi_frame_vptr,
vptr_t ipcbuf_vptr,
cap_t ipcbuf_cap
);
#endif
|
fzk466569/flask_fishbook | app/__init__.py | <reponame>fzk466569/flask_fishbook
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: fzk
# @Time 10:53
from flask import Flask
from flask_login import LoginManager
from app.models.base import db
login_manager = LoginManager()
def create_app():
app = Flask(__name__)
app.config.from_object('app.config')
register_blueprint(app)
login_manager.init_app(app)
login_manager.login_view = 'web.login'
login_manager.login_message = '请先登录'
db.init_app(app)
db.create_all(app=app) # 生成数据表
return app
def register_blueprint(app):
from app.web.blueprint import web
app.register_blueprint(web)
|
cjd8363/Global-Illum | src/netpbm/10.27/netpbm-10.27/urt/rle_global.c | <reponame>cjd8363/Global-Illum<filename>src/netpbm/10.27/netpbm-10.27/urt/rle_global.c<gh_stars>1-10
/*
* This software is copyrighted as noted below. It may be freely copied,
* modified, and redistributed, provided that the copyright notice is
* preserved on all copies.
*
* There is no warranty or other guarantee of fitness for this software,
* it is provided solely "as is". Bug reports or fixes may be sent
* to the author, who may or may not act on them as he desires.
*
* You may not include this software in a program or other software product
* without supplying the source, or without informing the end-user that the
* source is available for no extra charge.
*
* If you modify this software, you should include a notice giving the
* name of the person performing the modification, the date of modification,
* and the reason for such modification.
*
* Modified at BRL 16-May-88 by <NAME> to avoid Alliant STDC desire
* to have all "void" functions so declared.
*/
/*
* rle_global.c - Global variable initialization for rle routines.
*
* Author: <NAME>
* Computer Science Dept.
* University of Utah
* Date: Thu Apr 25 1985
* Copyright (c) 1985,1986 <NAME>
*
* $Id: rle_global.c,v 3.0.1.1 1992/01/28 18:23:03 spencer Exp $
*/
#include <stdio.h>
#include "rle_put.h"
#include "rle.h"
#include "Runput.h"
struct rle_dispatch_tab rle_DTable[] = {
{
" OB",
RunSetup,
RunSkipBlankLines,
RunSetColor,
RunSkipPixels,
RunNewScanLine,
Runputdata,
Runputrun,
DefaultBlockHook,
RunputEof
},
};
static int bg_color[3] = { 0, 0, 0 };
rle_hdr rle_dflt_hdr = {
RUN_DISPATCH, /* dispatch value */
3, /* 3 colors */
bg_color, /* background color */
0, /* (alpha) if 1, save alpha channel */
2, /* (background) 0->just save pixels, */
/* 1->overlay, 2->clear to bg first */
0, 511, /* (xmin, xmax) X bounds to save */
0, 511, /* (ymin, ymax) Y bounds to save */
0, /* ncmap (if != 0, save color map) */
8, /* cmaplen (log2 of length of color map) */
NULL, /* pointer to color map */
NULL, /* pointer to comment strings */
NULL, /* output file -- must be set dynamically */
{ 7 }, /* RGB channels only */
0L, /* Can't free name and file fields. */
"Urt", /* Default "program name". */
"no file", /* No file name given. */
0 /* First image. */
/* Can't initialize the union */
};
#if 0
/* ARGSUSED */
void
NullputEof(the_hdr)
rle_hdr * the_hdr;
{
/* do nothing */
}
#endif
|
honeytavis/cpp | Thinking_in_Cpp/I/C03/function_pointer.cc | #include <iostream>
void func() {
std::cout << "func() called..." << '\n';
}
int main()
{
void (*fp)();
fp = func;
(*fp)();
void (*fp2)() = func;
(*fp2)();
return 0;
}
|
JustDoom/riotspigot | riotspigot-library/src/main/java/de/dytanic/log/DytanicAsyncPrintStream.java | /*
* Copyright (c) <NAME> 2017
*/
package de.dytanic.log;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
/**
* Created by Tareko on 23.09.2017.
*/
public class DytanicAsyncPrintStream extends PrintStream {
static final BlockingQueue<Runnable> asyncQueue = new LinkedBlockingQueue<>();
private static final Thread WORKER = new Thread() {
{
setPriority(Thread.MIN_PRIORITY);
setDaemon(true);
start();
}
@Override
public void run()
{
while (!isInterrupted())
{
try
{
asyncQueue.take().run();
} catch (InterruptedException e)
{
e.printStackTrace();
}
}
}
};
public DytanicAsyncPrintStream(OutputStream out) throws UnsupportedEncodingException
{
super(out, true, StandardCharsets.UTF_8.name());
}
private void println0()
{
super.println();
}
@Override
public void println()
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0();
}
});
}
private void println0(int x)
{
super.println(x);
}
@Override
public void println(int x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(String x)
{
super.println(x);
}
@Override
public void println(String x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(long x)
{
super.println(x);
}
@Override
public void println(long x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(char x)
{
super.println(x);
}
@Override
public void println(char x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(double x)
{
super.println(x);
}
@Override
public void println(double x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(float x)
{
super.println(x);
}
@Override
public void println(float x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(Object x)
{
super.println(x);
}
@Override
public void println(Object x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(char[] x)
{
super.println(x);
}
@Override
public void println(char[] x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
private void println0(boolean x)
{
super.println(x);
}
@Override
public void println(boolean x)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
println0(x);
}
});
}
/* ============================================== */
private void print0(int x)
{
super.print(x);
}
@Override
public void print(int x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(String x)
{
super.print(x);
}
@Override
public void print(String x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(long x)
{
super.print(x);
}
@Override
public void print(long x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(char x)
{
super.print(x);
}
@Override
public void print(char x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(double x)
{
super.print(x);
}
@Override
public void print(double x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(float x)
{
super.print(x);
}
@Override
public void print(float x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(Object x)
{
super.print(x);
}
@Override
public void print(Object x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(char[] x)
{
super.print(x);
}
@Override
public void print(char[] x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
private void print0(boolean x)
{
super.print(x);
}
@Override
public void print(boolean x)
{
if (Thread.currentThread() != WORKER)
{
asyncQueue.offer(new Runnable() {
@Override
public void run()
{
print0(x);
}
});
} else
{
super.print(x);
}
}
} |
urvashijain18/Bet-On-Better | Bet_On_Better/src/UserInterface/UserLogin.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package UserInterface;
import Business.EndUser.AccountDetails;
import UserInterface.UserRole.CreateNewUser;
import Business.AdvertisingEmployee.AdvertisingEmployeeAccountDirectory;
import Business.BankEmployee.BankEmployeeAccountDirectory;
import Business.DB4OUtil.DB4OUtil;
import Business.EcoSystem;
import Business.Enterprise.Enterprise;
import Business.FundRaisingEmployee.FundRaisingEmployeeAccountDirectory;
import Business.FundRaiserEvents.EventDirectory;
import Business.Network.Network;
import Business.Organization.Organization;
import Business.UserAccount.UserAccountDirectory;
import Business.UserAccount.UserAccount;
import java.awt.CardLayout;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
/**
*
* @author @author Urvashi
*/
public class UserLogin extends javax.swing.JPanel {
private JPanel leftContainer;
private JPanel rightContainer;
private UserAccountDirectory userAccountDirectory;
private EventDirectory eventdirectory;
private FundRaisingEmployeeAccountDirectory fundraisingemployeeAccountDirectory;
private AdvertisingEmployeeAccountDirectory advertisingemployeeAccountDirectory;
private BankEmployeeAccountDirectory bankemployeeAccountDirectory;
private EcoSystem system;
private DB4OUtil dB4OUtil;
//Enterprise enterprise;
//private AccountDetails accdetails;
/**
* Creates new form UserLogin /**
* Creates new form UserLogin
*/
public UserLogin(JPanel leftContainer, JPanel rightContainer, UserAccountDirectory userAccountDirectory,
EventDirectory eventdirectory, FundRaisingEmployeeAccountDirectory fundraisingemployeeAccountDirectory,
AdvertisingEmployeeAccountDirectory advertisingemployeeAccountDirectory,
BankEmployeeAccountDirectory bankemployeeAccountDirectory, EcoSystem system, DB4OUtil dB4OUtil) {
initComponents();
this.leftContainer = leftContainer;
this.rightContainer = rightContainer;
this.userAccountDirectory = userAccountDirectory;
this.eventdirectory = eventdirectory;
this.fundraisingemployeeAccountDirectory = fundraisingemployeeAccountDirectory;
this.bankemployeeAccountDirectory = bankemployeeAccountDirectory;
this.system = system;
this.dB4OUtil = dB4OUtil;
}
public void setLoginFieldEnabled(){
txtUserName.setEnabled(true);
txtPassword.setEnabled(true);
btnLogin.setEnabled(true);
btnNewUSer.setEnabled(true);
}
public void setLoginFieldDisabled(){
btnLogin.setEnabled(false);
btnNewUSer.setEnabled(false);
txtUserName.setEnabled(false);
txtPassword.setEnabled(false);
}
private void refreshLeftContainer(){
leftContainer.remove(this);
CardLayout layout= (CardLayout) leftContainer.getLayout();
layout.previous(leftContainer);
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
txtUserName = new javax.swing.JTextField();
txtPassword = new <PASSWORD>Field();
jLabel2 = new javax.swing.JLabel();
btnLogin = new javax.swing.JButton();
btnNewUSer = new javax.swing.JButton();
setBackground(new java.awt.Color(0, 153, 153));
jLabel1.setText("User Name");
jLabel2.setText("Password");
btnLogin.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
btnLogin.setText("Login");
btnLogin.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnLoginActionPerformed(evt);
}
});
btnNewUSer.setFont(new java.awt.Font("Tahoma", 1, 11)); // NOI18N
btnNewUSer.setText("New User?");
btnNewUSer.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnNewUSerActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(btnNewUSer)
.addContainerGap(javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(btnLogin, javax.swing.GroupLayout.PREFERRED_SIZE, 78, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtPassword, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel2)
.addComponent(txtUserName, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1))
.addGap(0, 30, Short.MAX_VALUE))))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(24, 24, 24)
.addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(txtUserName, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(32, 32, 32)
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(txtPassword, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(73, 73, 73)
.addComponent(btnLogin)
.addGap(33, 33, 33)
.addComponent(btnNewUSer)
.addContainerGap(142, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
private void btnLoginActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnLoginActionPerformed
String userName = txtUserName.getText();
char[] passwordCharArray = txtPassword.getPassword();
String password = String.valueOf(passwordCharArray);
boolean flag = false;
UserAccount userAccount = system.getUserAccountDirectory().authenticateUser(userName, password);
Enterprise inEnterprise = null;
Organization inOrganization = null;
if(userAccount==null){
for(Network network:system.getNetworkList()){
for(Enterprise enterprise : network.getEnterpriseDirectory().getEnterpriseList()){
userAccount=enterprise.getUserAccountDirectory().authenticateUser(userName, password);
if(userAccount==null){
for(Organization organization:enterprise.getOrganizationDirectory().getOrganizationList()){
userAccount=organization.getUserAccountDirectory().authenticateUser(userName, password);
if(userAccount!=null){
inEnterprise=enterprise;
inOrganization=organization;
break;
}
}
}
else{
inEnterprise=enterprise;
break;
}
if(inOrganization!=null){
break;
}
}
if(inEnterprise!=null){
break;
}
}
// flag = true;
}
if(userAccount==null){
JOptionPane.showMessageDialog(null, "Invalid UserName/Password");
return;
}else{
CardLayout rightLayout = (CardLayout)rightContainer.getLayout();
rightContainer.add("workArea",userAccount.getRole().createRightWorkArea(rightContainer,
userAccount, inOrganization, inEnterprise, system, eventdirectory));
rightLayout.next(rightContainer);
CardLayout leftLayout = (CardLayout)leftContainer.getLayout();
leftContainer.add("workArea",userAccount.getRole().createLeftWorkArea(leftContainer, rightContainer,
userAccountDirectory, eventdirectory, bankemployeeAccountDirectory,
fundraisingemployeeAccountDirectory, advertisingemployeeAccountDirectory, system, dB4OUtil,
inEnterprise, userAccount));
leftLayout.next(leftContainer);
}
setLoginFieldDisabled();
}//GEN-LAST:event_btnLoginActionPerformed
private void btnNewUSerActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnNewUSerActionPerformed
CardLayout layout = (CardLayout) rightContainer.getLayout();
CreateNewUser createNewUser = new CreateNewUser(leftContainer, rightContainer, userAccountDirectory,
fundraisingemployeeAccountDirectory, advertisingemployeeAccountDirectory, bankemployeeAccountDirectory,
system, dB4OUtil);
rightContainer.add("CreateNewUser", createNewUser);
layout.next(rightContainer);
refreshLeftContainer();
}//GEN-LAST:event_btnNewUSerActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton btnLogin;
private javax.swing.JButton btnNewUSer;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JPasswordField txtPassword;
private javax.swing.JTextField txtUserName;
// End of variables declaration//GEN-END:variables
}
|
jskirst/edward | db/migrate/20171026162501_add_workflow_id_to_options.rb | <filename>db/migrate/20171026162501_add_workflow_id_to_options.rb
class AddWorkflowIdToOptions < ActiveRecord::Migration[5.1]
def change
add_column :options, :workflow_id, :integer
add_column :options, :token, :string
end
end
|
rita0222/FK | CLI/FK_CLI/cpp/DList_CLI.cpp | <filename>CLI/FK_CLI/cpp/DList_CLI.cpp
#include "DList_CLI.h"
namespace FK_CLI {
::FK::fk_DisplayLink * fk_DisplayLink::GetP(void)
{
return (::FK::fk_DisplayLink *)(pBase);
}
void fk_DisplayLink::CameraUpdate(void)
{
_camera = gcnew fk_Model(const_cast<::FK::fk_Model *>(GetP()->getCamera()));
}
::FK::fk_StereoChannel fk_DisplayLink::GetStereo(FK_CLI::fk_StereoChannel argC)
{
if(argC == fk_StereoChannel::STEREO_LEFT) {
return ::FK::fk_StereoChannel::LEFT;
}
return ::FK::fk_StereoChannel::RIGHT;
}
fk_DisplayLink::fk_DisplayLink(bool argNewFlg)
: fk_BaseObject(false), modelList(gcnew LinkedList<fk_Model^>()),
overlayList(gcnew LinkedList<fk_Model^>()),
_camera(nullptr), _rCamera(nullptr), _lCamera(nullptr),
_proj(nullptr), _rProj(nullptr), _lProj(nullptr)
{
}
fk_DisplayLink::~fk_DisplayLink()
{
}
fk_Model^ fk_DisplayLink::Camera::get()
{
CameraUpdate();
return _camera;
}
void fk_DisplayLink::Camera::set(fk_Model^ argM)
{
if(!argM) return;
GetP()->entryCamera(argM->GetP());
_camera = argM;
}
fk_Model^ fk_DisplayLink::LeftCamera::get()
{
return _lCamera;
}
void fk_DisplayLink::LeftCamera::set(fk_Model^ argM)
{
if(!argM) return;
GetP()->entryStereoCamera(::FK::fk_StereoChannel::LEFT, argM->GetP());
_lCamera = argM;
}
fk_Model^ fk_DisplayLink::RightCamera::get()
{
return _rCamera;
}
void fk_DisplayLink::RightCamera::set(fk_Model^ argM)
{
if(!argM) return;
GetP()->entryStereoCamera(::FK::fk_StereoChannel::RIGHT, argM->GetP());
_rCamera = argM;
}
fk_ProjectBase^ fk_DisplayLink::Projection::get()
{
if(!_proj) {
const ::FK::fk_ProjectBase *cP = GetP()->getProjection();
::FK::fk_ProjectBase *proj = const_cast<::FK::fk_ProjectBase *>(cP);
switch(proj->getMode()) {
case ::FK::fk_ProjectMode::PERSPECTIVE:
_proj = gcnew fk_Perspective(false);
break;
case ::FK::fk_ProjectMode::FRUSTUM:
_proj = gcnew fk_Frustum(false);
break;
case ::FK::fk_ProjectMode::ORTHO:
_proj = gcnew fk_Ortho(false);
break;
default:
return nullptr;
}
_proj->pProj = proj;
}
return _proj;
}
void fk_DisplayLink::Projection::set(fk_ProjectBase^ argP)
{
if(!argP) return;
GetP()->setProjection(argP->GetP());
_proj = argP;
}
fk_ProjectBase^ fk_DisplayLink::LeftProjection::get()
{
if(!_lProj) {
const ::FK::fk_ProjectBase *cP = GetP()->getStereoProjection(::FK::fk_StereoChannel::LEFT);
::FK::fk_ProjectBase *proj = const_cast<::FK::fk_ProjectBase *>(cP);
switch(proj->getMode()) {
case ::FK::fk_ProjectMode::PERSPECTIVE:
_lProj = gcnew fk_Perspective(false);
break;
case ::FK::fk_ProjectMode::FRUSTUM:
_lProj = gcnew fk_Frustum(false);
break;
case ::FK::fk_ProjectMode::ORTHO:
_lProj = gcnew fk_Ortho(false);
break;
default:
return nullptr;
}
_lProj->pProj = proj;
}
return _lProj;
}
void fk_DisplayLink::LeftProjection::set(fk_ProjectBase^ argP)
{
if(!argP) return;
GetP()->setStereoProjection(::FK::fk_StereoChannel::LEFT, argP->GetP());
_lProj = argP;
}
fk_ProjectBase^ fk_DisplayLink::RightProjection::get()
{
if(!_rProj) {
const ::FK::fk_ProjectBase *cP = GetP()->getStereoProjection(::FK::fk_StereoChannel::RIGHT);
::FK::fk_ProjectBase *proj = const_cast<::FK::fk_ProjectBase *>(cP);
switch(proj->getMode()) {
case ::FK::fk_ProjectMode::PERSPECTIVE:
_rProj = gcnew fk_Perspective(false);
break;
case ::FK::fk_ProjectMode::FRUSTUM:
_rProj = gcnew fk_Frustum(false);
break;
case ::FK::fk_ProjectMode::ORTHO:
_rProj = gcnew fk_Ortho(false);
break;
default:
return nullptr;
}
_rProj->pProj = proj;
}
return _rProj;
}
void fk_DisplayLink::RightProjection::set(fk_ProjectBase^ argP)
{
if(!argP) return;
GetP()->setStereoProjection(::FK::fk_StereoChannel::RIGHT, argP->GetP());
_rProj = argP;
}
bool fk_DisplayLink::StereoOverlayMode::get()
{
return GetP()->getStereoOverlayMode();
}
void fk_DisplayLink::StereoOverlayMode::set(bool argMode)
{
GetP()->setStereoOverlayMode(argMode);
}
void fk_DisplayLink::ClearDisplay(void)
{
GetP()->clearDisplay();
modelList->Clear();
overlayList->Clear();
CameraUpdate();
}
void fk_DisplayLink::EntryModel(fk_Model ^argM)
{
if(!argM) return;
GetP()->entryModel(argM->GetP());
if(modelList->Contains(argM) == false) modelList->AddLast(argM);
}
void fk_DisplayLink::RemoveModel(fk_Model ^argM)
{
if(!argM) return;
GetP()->removeModel(argM->GetP());
while(modelList->Contains(argM) == true) {
modelList->Remove(argM);
}
}
void fk_DisplayLink::ClearModel(void)
{
GetP()->clearModel();
modelList->Clear();
}
void fk_DisplayLink::EntryOverlayModel(fk_Model ^argM)
{
if(!argM) return;
GetP()->entryOverlayModel(argM->GetP());
if(overlayList->Contains(argM) == false) overlayList->AddLast(argM);
}
void fk_DisplayLink::RemoveOverlayModel(fk_Model^ argM)
{
if(!argM) return;
GetP()->removeOverlayModel(argM->GetP());
while(overlayList->Contains(argM) == true) {
overlayList->Remove(argM);
}
}
void fk_DisplayLink::ClearOverlayModel(void)
{
GetP()->clearOverlayModel();
overlayList->Clear();
}
void fk_DisplayLink::ClearStereo(void)
{
GetP()->clearStereo();
}
}
/****************************************************************************
*
* Copyright (c) 1999-2020, Fine Kernel Project, All rights reserved.
*
* Redistribution and use in source and binary forms,
* with or without modification, are permitted provided that the
* following conditions are met:
*
* - Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
*
* - Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the
* following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* - Neither the name of the copyright holders nor the names
* of its contributors may be used to endorse or promote
* products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
****************************************************************************/
/****************************************************************************
*
* Copyright (c) 1999-2020, Fine Kernel Project, All rights reserved.
*
* 本ソフトウェアおよびソースコードのライセンスは、基本的に
* 「修正 BSD ライセンス」に従います。以下にその詳細を記します。
*
* ソースコード形式かバイナリ形式か、変更するかしないかを問わず、
* 以下の条件を満たす場合に限り、再頒布および使用が許可されます。
*
* - ソースコードを再頒布する場合、上記の著作権表示、本条件一覧、
* および下記免責条項を含めること。
*
* - バイナリ形式で再頒布する場合、頒布物に付属のドキュメント等の
* 資料に、上記の著作権表示、本条件一覧、および下記免責条項を
* 含めること。
*
* - 書面による特別の許可なしに、本ソフトウェアから派生した製品の
* 宣伝または販売促進に、本ソフトウェアの著作権者の名前または
* コントリビューターの名前を使用してはならない。
*
* 本ソフトウェアは、著作権者およびコントリビューターによって「現
* 状のまま」提供されており、明示黙示を問わず、商業的な使用可能性、
* および特定の目的に対する適合性に関す暗黙の保証も含め、またそれ
* に限定されない、いかなる保証もないものとします。著作権者もコン
* トリビューターも、事由のいかんを問わず、損害発生の原因いかんを
* 問わず、かつ責任の根拠が契約であるか厳格責任であるか(過失その
* 他の)不法行為であるかを問わず、仮にそのような損害が発生する可
* 能性を知らされていたとしても、本ソフトウェアの使用によって発生
* した(代替品または代用サービスの調達、使用の喪失、データの喪失、
* 利益の喪失、業務の中断も含め、またそれに限定されない)直接損害、
* 間接損害、偶発的な損害、特別損害、懲罰的損害、または結果損害に
* ついて、一切責任を負わないものとします。
*
****************************************************************************/
|
kokosing/hue | desktop/core/ext-py/docutils-0.14/test/functional/tests/standalone_rst_s5_html_1.py | exec(open('functional/tests/_standalone_rst_defaults.py').read())
# Source and destination file names:
test_source = 'standalone_rst_s5_html.txt'
test_destination = 'standalone_rst_s5_html_1.html'
# Keyword parameters passed to publish_file:
writer_name = 's5_html'
# Settings:
settings_overrides['theme'] = 'small-black'
# local copy of default stylesheet:
settings_overrides['stylesheet_path'] = (
'functional/input/data/html4css1.css')
# Extra functional tests.
# Prefix all names with '_' to avoid confusing `docutils.core.publish_file`.
import filecmp as _filecmp
def _test_more(expected_dir, output_dir, test_case, parameters):
"""Compare ``ui/<theme>`` directories."""
theme = settings_overrides.get('theme', 'default')
expected = '%s/%s/%s' % (expected_dir, 'ui', theme)
output = '%s/%s/%s' % (output_dir, 'ui', theme)
differences, uniques = _compare_directories(expected, output)
parts = []
if differences:
parts.append('The following files differ from the expected output:')
parts.extend(differences)
expected = [path.replace('functional/output/', 'functional/expected/')
for path in differences]
parts.append('Please compare the expected and actual output files:')
parts.extend([' diff %s %s' % tup
for tup in zip(expected, differences)])
parts.append('If the actual output is correct, please replace the '
'expected output files:')
parts.extend([' mv %s %s' % tup
for tup in zip(differences, expected)])
parts.append('and check them in to Subversion:')
parts.extend([' svn commit -m "<comment>" %s' % path
for path in expected])
if uniques:
parts.append('The following paths are unique:')
parts.extend(uniques)
test_case.assertTrue(not parts, '\n'.join(parts))
def _compare_directories(expected, output):
dircmp = _filecmp.dircmp(expected, output, ['.svn', 'CVS'])
differences = ['%s/%s' % (output, name) for name in dircmp.diff_files]
uniques = (['%s/%s' % (expected, name) for name in dircmp.left_only]
+ ['%s/%s' % (output, name) for name in dircmp.right_only])
for subdir in dircmp.common_dirs:
diffs, uniqs = _compare_directories('%s/%s' % (expected, subdir),
'%s/%s' % (output, subdir))
differences.extend(diffs)
uniques.extend(uniqs)
return differences, uniques
|
FENIX-Platform/fenix-commons | fenix-commons-search/src/main/java/org/fao/fenix/commons/msd/dto/templates/export/metadata/SeGridSpatialRepresentation.java | package org.fao.fenix.commons.msd.dto.templates.export.metadata;
import com.fasterxml.jackson.annotation.JsonProperty;
import org.fao.fenix.commons.msd.dto.templates.ResponseHandler;
import org.fao.fenix.commons.msd.dto.type.CellGeometry;
import org.fao.fenix.commons.msd.dto.type.CellOfOrigin;
import org.fao.fenix.commons.msd.dto.type.XYPosition;
import java.util.Map;
public class SeGridSpatialRepresentation extends ResponseHandler {
public SeGridSpatialRepresentation() {}
public SeGridSpatialRepresentation(Object source) {
super(source);
}
@JsonProperty
public Integer getNumberOfDimensions() {
return null;
}
@JsonProperty
public org.fao.fenix.commons.msd.dto.templates.export.metadata.OjAxis getAxisDimensionProperties() {
return null;
}
@JsonProperty
public CellGeometry getCellGeometry() {
return null;
}
@JsonProperty
public CellOfOrigin getCellOfOrigin() {
return null;
}
@JsonProperty
public XYPosition getXyPosition() {
return null;
}
}
|
kyowill/derby-10.0.2.1 | java/engine/org/apache/derby/iapi/error/PublicAPI.java | /*
Derby - Class org.apache.derby.iapi.error.PublicAPI
Copyright 1999, 2004 The Apache Software Foundation or its licensors, as applicable.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.error;
import java.sql.SQLException;
import org.apache.derby.impl.jdbc.EmbedSQLException;
/**
Class that wraps StandardExceptions in a SQLException.
This is used to make any public API methods always
throw SQLException rather than a random collection.
This wrapping is also special cased by TypeStatementException
to avoid double wrapping of some errors.
<P>
This will get cleaned up in main.
*/
public class PublicAPI
{
/**
Generates a SQLException for signalling that the
operation failed due to a database error.
*/
public static SQLException wrapStandardException(StandardException se) {
return EmbedSQLException.wrapStandardException(se.getMessage(),
se.getMessageId(), se.getSeverity(), se);
}
}
|
logginghub/core | logginghub-client/src/main/java/com/logginghub/logging/LogEventGenerator.java | <gh_stars>0
package com.logginghub.logging;
import com.logginghub.logging.interfaces.LogEventSource;
import com.logginghub.logging.listeners.LogEventListener;
/**
* Interface for objects that receive log events and may generate new events as
* result. The events are fired back through the listener added, and
* maybe fired on any thread including the one that calls onNewLogEvent.
*
* @author admin
*/
public interface LogEventGenerator extends LogEventListener, LogEventSource
{
/*
private LogEventListener m_listener;
public LogEventGenerator(LogEventListener listener)
{
m_listener = listener;
}
protected void fireNewLogEvent(LogEvent event)
{
m_listener.onNewLogEvent(event);
}
*/
// public void onNewLogEvent(LogEvent event);
// public void addLogEventListener(LogEventListener listener);
// public void removeLogEventListener(LogEventListener listener);
}
|
abhiisheek/react-chatbot | src/docs/examples/ChatMsg/PlainTextChatMsg.js | import React from 'react';
import ChatMsg from 'react-chatbot/ChatMsg';
import PlainText from 'react-chatbot/PlainText';
import TextWithLink from 'react-chatbot/TextWithLink';
import types from 'react-chatbot/types';
import styles from './ChatMsg.css';
const chatMsgTypesMap = {
[types.TEXT]: PlainText,
[types.TEXTWITHLINK]: TextWithLink
};
const chatMsgConditionFunc = ({ type = types.TEXT }) => {
let TypeComp = chatMsgTypesMap[type];
if (!TypeComp) {
TypeComp = PlainText;
}
return TypeComp;
};
const props = {
self: true,
data: 'What is the time now',
selfIcon: styles.selfIcon,
botIcon: styles.botIcon,
type: types.TEXT,
showSelfAvatar: false,
determineTypeComp: chatMsgConditionFunc
};
/** PlainText Self ChatMsg */
const WrappedPlainTextSelfMsg = () => <ChatMsg {...props} />;
export default WrappedPlainTextSelfMsg;
|
406345/leetcode | 611_triangleNumber/main.cpp | #include "stdio.h"
#include "vector"
#include "unordered_map"
#include "set"
#include "algorithm"
using namespace std;
class Solution
{
public:
int triangleNumber(vector<int> &nums)
{
sort(nums.begin(), nums.end());
reverse(nums.begin(), nums.end());
int size = nums.size();
int count = 0;
for (size_t i = 0; i < size; i++)
{
auto l = nums[i];
for (size_t j = i + 1; j < size; j++)
{
auto v1 = nums[j];
for (size_t k = j + 1; k < size; k++)
{
auto sum = v1 + nums[k];
if (l < sum)
{
++count;
}
else
{
break;
}
}
}
}
return count;
}
};
int main(int argc, char const *argv[])
{
Solution s;
s.triangleNumber(vector<int>({2, 3, 2, 4}));
return 0;
}
|
sirghiny/Real-Estate-Manager | api/views/auth.py | <filename>api/views/auth.py
"""Authorization functionality."""
from flask import request
from flask_restful import Resource
from api.helpers.auth import create_token
from api.helpers.general import digest
from api.helpers.validation import validate_json
from api.models import User
# pylint:disable=no-self-use
class AuthResource(Resource):
"""Resource to handle authorization."""
def post(self):
"""Sign a user in."""
payload = request.get_json()
required = ['email', 'password']
result = validate_json(required, payload)
if isinstance(result, bool):
password = <PASSWORD>(payload['password'])
user = User.get(email=payload['email'])
if isinstance(user, dict):
return {
'status': 'fail',
'message': 'The user does not exist.',
'help': 'Ensure arguments are of existent object.'
}, 404
if user.password != password:
return {
'status': 'fail',
'message': 'Wrong password.',
'help': 'Recover the password if necessary.'
}, 400
token = create_token(payload['email'])
return {
'status': 'success',
'data': {
'message': 'Welcome to Real Estate Manager.',
'token': token
}
}, 200
return {
'status': 'fail',
'message': 'Not all fields were provided.',
'missing': result
}, 400
|
bkaid/project-euler | problems/problem-0010/index.js | 'use strict';
const problem10 = require('./problem-0010');
let n = 2000000;
module.exports = {
description: `Find the sum of all the primes below ${n}.`,
result: () => problem10.sumOfPrimes(n)
};
|
dkBrazz/zserio | test/language/functions/java/functions/structure_parent_child_value/StructureParentChildValueTest.java | package functions.structure_parent_child_value;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.Arrays;
import org.junit.Test;
import zserio.runtime.io.ByteArrayBitStreamReader;
import zserio.runtime.io.ByteArrayBitStreamWriter;
public class StructureParentChildValueTest
{
@Test
public void checkParentValue() throws IOException
{
final ParentValue parentValue = createParentValue();
assertEquals(CHILD_VALUE, parentValue.funcGetValue());
final ByteArrayBitStreamWriter writer = new ByteArrayBitStreamWriter();
parentValue.write(writer);
final byte[] writtenByteArray = writer.toByteArray();
writer.close();
final byte[] expectedByteArray = writeParentValueToByteArray();
assertTrue(Arrays.equals(expectedByteArray, writtenByteArray));
final ByteArrayBitStreamReader reader = new ByteArrayBitStreamReader(writtenByteArray);
final ParentValue readParentValue = new ParentValue(reader);
assertEquals(parentValue, readParentValue);
}
private byte[] writeParentValueToByteArray() throws IOException
{
ByteArrayBitStreamWriter writer = new ByteArrayBitStreamWriter();
writer.writeBits(CHILD_VALUE, 32);
writer.close();
return writer.toByteArray();
}
private ParentValue createParentValue()
{
final ChildValue childValue = new ChildValue(CHILD_VALUE);
return new ParentValue(childValue);
}
private static int CHILD_VALUE = 0xABCD;
}
|
banbao990/Java | Learning/Thinking_in_Java_4th_Edition/Chapter_17/TestCollections.java | <gh_stars>1-10
/**
* @author banbao
* @comment 修改自示例代码
*/
import java.util.Collections;
import java.util.List;
import java.util.ArrayList;
public class TestCollections {
public static void main(String...args) {
List<TestCollections> list =
new ArrayList<TestCollections>(
Collections.nCopies(2, new TestCollections())
);
System.out.println(list);
Collections.fill(list, new TestCollections());
System.out.println(list);
}
}
/* Output
[TestCollections@15db9742, TestCollections@15db9742]
[TestCollections@6d06d69c, TestCollections@6d06d69c]
*/
|
msmygit/nosqlbench | nb/src/test/resources/scripts/async/cocycledelay_bursty.js | /*
*
* Copyright 2016 jshook
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* /
*/
co_cycle_delay = {
"alias": "co_cycle_delay",
"type": "diag",
"diagrate": "500",
"cycles": "0..1000000",
"threads": "10",
"cyclerate": "1000,1.5",
"modulo": "1000",
"async" : "1000"
};
print('starting activity co_cycle_delay');
scenario.start(co_cycle_delay);
for (i = 0; i < 5; i++) {
scenario.waitMillis(1000);
if (!scenario.isRunningActivity('co_cycle_delay')) {
print("scenario exited prematurely, aborting.");
break;
}
print("backlogging, cycles=" + metrics.co_cycle_delay.cycles.servicetime.count +
" waittime=" + metrics.co_cycle_delay.cycles.waittime.value +
" diagrate=" + activities.co_cycle_delay.diagrate +
" cyclerate=" + activities.co_cycle_delay.cyclerate
);
}
print('step1 metrics.waittime=' + metrics.co_cycle_delay.cycles.waittime.value);
activities.co_cycle_delay.diagrate = "10000";
for (i = 0; i < 10; i++) {
if (!scenario.isRunningActivity('co_cycle_delay')) {
print("scenario exited prematurely, aborting.");
break;
}
print("recovering, cycles=" + metrics.co_cycle_delay.cycles.servicetime.count +
" waittime=" + metrics.co_cycle_delay.cycles.waittime.value +
" diagrate=" + activities.co_cycle_delay.diagrate +
" cyclerate=" + activities.co_cycle_delay.cyclerate
);
scenario.waitMillis(1000);
if (metrics.co_cycle_delay.cycles.waittime.value < 50000000) {
print("waittime trended back down as expected, exiting on iteration " + i);
break;
}
}
//scenario.awaitActivity("co_cycle_delay");
print('step2 metrics.waittime=' + metrics.co_cycle_delay.cycles.waittime.value);
scenario.stop(co_cycle_delay);
print("stopped activity co_cycle_delay");
|
mcx/opensim-core | OpenSim/Common/APDMDataReader.cpp | #include <fstream>
#include "Simbody.h"
#include "Exception.h"
#include "FileAdapter.h"
#include "TimeSeriesTable.h"
#include "APDMDataReader.h"
namespace OpenSim {
const std::vector<std::string> APDMDataReader::acceleration_labels{
"/Acceleration/X", "/Acceleration/Y", "/Acceleration/Z"
};
const std::vector<std::string> APDMDataReader::angular_velocity_labels{
"/Angular Velocity/X", "/Angular Velocity/Y","/Angular Velocity/Z"
};
const std::vector<std::string> APDMDataReader::magnetic_heading_labels{
"/Magnetic Field/X", "/Magnetic Field/Y","/Magnetic Field/Z"
};
const std::vector<std::string> APDMDataReader::orientation_labels{
"/Orientation/Scalar", "/Orientation/X", "/Orientation/Y","/Orientation/Z"
};
const std::string APDMDataReader::TimeLabel{ "Time" };
APDMDataReader* APDMDataReader::clone() const {
return new APDMDataReader{*this};
}
DataAdapter::OutputTables
APDMDataReader::extendRead(const std::string& fileName) const {
OPENSIM_THROW_IF(fileName.empty(),
EmptyFileName);
std::ifstream in_stream{ fileName };
OPENSIM_THROW_IF(!in_stream.good(),
FileDoesNotExist,
fileName);
OPENSIM_THROW_IF(in_stream.peek() == std::ifstream::traits_type::eof(),
FileIsEmpty,
fileName);
std::vector<std::string> labels; // will be written to output tables
double dataRate = SimTK::NaN;
std::vector<int> accIndex;
std::vector<int> gyroIndex;
std::vector<int> magIndex;
std::vector<int> orientationsIndex;
int n_imus = _settings.getProperty_ExperimentalSensors().size();
int last_size = 1024;
// Will read data into pre-allocated Matrices in-memory rather than appendRow
// on the fly which copies the whole table on every call.
SimTK::Matrix_<SimTK::Quaternion> rotationsData{ last_size, n_imus };
SimTK::Matrix_<SimTK::Vec3> linearAccelerationData{ last_size, n_imus };
SimTK::Matrix_<SimTK::Vec3> magneticHeadingData{ last_size, n_imus };
SimTK::Matrix_<SimTK::Vec3> angularVelocityData{ last_size, n_imus };
std::vector<double> times;
times.resize(last_size);
// We support two formats, they contain similar data but headers are different
std::string line;
// Line 1
std::getline(in_stream, line);
std::vector<std::string> tokens = FileAdapter::tokenize(line, ",");
bool newFormat = false;
if (tokens[0] == "Format=7") {
newFormat = true;
dataRate = 128; // Will fix after reading computing it from time column
// Header Line 1:Format=7, [I1,,,$IMU1,,,,,,,,,,,]*
// Header Line 2: Time,[Accelerometer,,,Gyroscope,,,Magnetometer,,,Barometer,Orientation,,,]*
// Header Line 3: ,[X,Y,Z,X,Y,Z,X,Y,Z,,S,X,Y,Z]*
std::vector<std::string> emptyLabels;
emptyLabels.push_back("");
// In this format there's no dataRate, either assumed or computed from Time column
for (int imu_index = 0; imu_index < n_imus; ++imu_index) {
std::string sensorName = _settings.get_ExperimentalSensors(imu_index).getName();
labels.push_back(_settings.get_ExperimentalSensors(imu_index).get_name_in_model());
find_start_column(tokens, emptyLabels, sensorName, accIndex, newFormat);
if (accIndex[imu_index] != -1) {
gyroIndex.push_back(accIndex[imu_index] + 3);
magIndex.push_back(accIndex[imu_index] + 6);
orientationsIndex.push_back(accIndex[imu_index] + 10);
}
else
OPENSIM_THROW(Exception, "Data for sensor:" +sensorName + "was not found in data file "+ fileName+".");
}
// Line 2 unused
std::getline(in_stream, line);
}
else {
// Older Format looks like this:
// Header Line 1: Test Name:, $String,,,,,..
// Header Line 2: Sample Rate:, $Value, Hz,,,,,
// Labels Line 3: Time {SensorName/Acceleration/X,SensorName/Acceleration/Y,SensorName/Acceleration/Z,....} repeated per sensor
// Units Line 4: s,{m/s^2,m/s^2,m/s^2....} repeated
int header_lines = 4;
std::string trialName = tokens[1]; // May contain spaces
// Line 2
std::getline(in_stream, line);
tokens = FileAdapter::tokenize(line, ",");
dataRate = std::stod(tokens[1]);
// Line 3, find columns for IMUs
std::getline(in_stream, line);
tokens = FileAdapter::tokenize(line, ",");
OPENSIM_THROW_IF((tokens[0] != TimeLabel), UnexpectedColumnLabel,
fileName,
TimeLabel,
tokens[0]);
for (int imu_index = 0; imu_index < n_imus; ++imu_index) {
std::string sensorName = _settings.get_ExperimentalSensors(imu_index).getName();
labels.push_back(_settings.get_ExperimentalSensors(imu_index).get_name_in_model());
find_start_column(tokens, APDMDataReader::acceleration_labels, sensorName, accIndex);
find_start_column(tokens, APDMDataReader::angular_velocity_labels, sensorName, gyroIndex);
find_start_column(tokens, APDMDataReader::magnetic_heading_labels, sensorName, magIndex);
find_start_column(tokens, APDMDataReader::orientation_labels, sensorName, orientationsIndex);
}
}
// Will create a table to map
// internally keep track of what data was found in input files
bool foundLinearAccelerationData = accIndex.size()>0;
bool foundMagneticHeadingData = magIndex.size()>0;
bool foundAngularVelocityData = gyroIndex.size()>0;
// If no Orientation data is available we'll abort
OPENSIM_THROW_IF((orientationsIndex.size() == 0),
TableMissingHeader);
// Line 4, Units unused
std::getline(in_stream, line);
// For all tables, will create row, stitch values from different sensors then append
bool done = false;
double time = 0.0;
double timeIncrement = 1 / dataRate;
int rowNumber = 0;
while (!done){
// Make vectors one per table
TimeSeriesTableQuaternion::RowVector
orientation_row_vector{ n_imus, SimTK::Quaternion() };
TimeSeriesTableVec3::RowVector
accel_row_vector{ n_imus, SimTK::Vec3(SimTK::NaN) };
TimeSeriesTableVec3::RowVector
magneto_row_vector{ n_imus, SimTK::Vec3(SimTK::NaN) };
TimeSeriesTableVec3::RowVector
gyro_row_vector{ n_imus, SimTK::Vec3(SimTK::NaN) };
std::vector<std::string> nextRow = FileAdapter::getNextLine(in_stream, ",");
if (nextRow.empty()) {
done = true;
break;
}
// Cycle through the imus collating values
for (int imu_index = 0; imu_index < n_imus; ++imu_index) {
// parse gyro info from in_stream
if (foundLinearAccelerationData)
accel_row_vector[imu_index] = SimTK::Vec3(std::stod(nextRow[accIndex[imu_index]]),
std::stod(nextRow[accIndex[imu_index] + 1]), std::stod(nextRow[accIndex[imu_index] + 2]));
if (foundMagneticHeadingData)
magneto_row_vector[imu_index] = SimTK::Vec3(std::stod(nextRow[magIndex[imu_index]]),
std::stod(nextRow[magIndex[imu_index] + 1]), std::stod(nextRow[magIndex[imu_index] + 2]));
if (foundAngularVelocityData)
gyro_row_vector[imu_index] = SimTK::Vec3(std::stod(nextRow[gyroIndex[imu_index]]),
std::stod(nextRow[gyroIndex[imu_index] + 1]), std::stod(nextRow[gyroIndex[imu_index] + 2]));
// Create Quaternion from values in file, assume order in file W, X, Y, Z
orientation_row_vector[imu_index] =
SimTK::Quaternion(std::stod(nextRow[orientationsIndex[imu_index]]),
std::stod(nextRow[orientationsIndex[imu_index] + 1]),
std::stod(nextRow[orientationsIndex[imu_index] + 2]),
std::stod(nextRow[orientationsIndex[imu_index] + 3]));
}
// append to the tables
times[rowNumber] = time;
if (foundLinearAccelerationData)
linearAccelerationData[rowNumber] = accel_row_vector;
if (foundMagneticHeadingData)
magneticHeadingData[rowNumber] = magneto_row_vector;
if (foundAngularVelocityData)
angularVelocityData[rowNumber] = gyro_row_vector;
rotationsData[rowNumber] = orientation_row_vector;
// We could get some indication of time from file or generate time based on rate
// Here we use the latter mechanism.
time += timeIncrement;
rowNumber++;
if (std::remainder(rowNumber, last_size) == 0) {
// resize all Data/Matrices, double the size while keeping data
int newSize = last_size*2;
times.resize(newSize);
// Repeat for Data matrices in use
if (foundLinearAccelerationData) linearAccelerationData.resizeKeep(newSize, n_imus);
if (foundMagneticHeadingData) magneticHeadingData.resizeKeep(newSize, n_imus);
if (foundAngularVelocityData) angularVelocityData.resizeKeep(newSize, n_imus);
rotationsData.resizeKeep(newSize, n_imus);
last_size = newSize;
}
}
// Trim Matrices in use to actual data and move into tables
times.resize(rowNumber);
// Repeat for Data matrices in use and create Tables from them or size 0 for empty
linearAccelerationData.resizeKeep(foundLinearAccelerationData? rowNumber : 0,
n_imus);
magneticHeadingData.resizeKeep(foundMagneticHeadingData? rowNumber : 0,
n_imus);
angularVelocityData.resizeKeep(foundAngularVelocityData? rowNumber :0,
n_imus);
rotationsData.resizeKeep(rowNumber, n_imus);
// Now create the tables from matrices
// Create 4 tables for Rotations, LinearAccelerations, AngularVelocity, MagneticHeading
// Tables could be empty if data is not present in file(s)
DataAdapter::OutputTables tables = createTablesFromMatrices(dataRate, labels, times,
rotationsData, linearAccelerationData, magneticHeadingData, angularVelocityData);
return tables;
}
void APDMDataReader::find_start_column(std::vector<std::string> tokens,
std::vector<std::string> search_labels, const std::string& sensorName,
std::vector<int>& indices, bool newFormat) const {
// Search for "sensorName/{search_labels} in tokens, append result to indices if found"
std::string firstLabel = sensorName + search_labels[0];
// look for first label, when found check/confirm the rest. Out of order is not supported
int found_index = -1;
std::vector<std::string>::iterator it =
std::find(tokens.begin(), tokens.end(), firstLabel);
if (it != tokens.end()) {
found_index = static_cast<int>(std::distance(tokens.begin(), it));
// now check the following indices for match with remaining search_labels
bool match = true;
for (int remaining = 1; remaining < (int)search_labels.size() && match;
remaining++) {
match = tokens[found_index + remaining].compare(
sensorName + search_labels[remaining]) == 0;
}
if (match) {
if (newFormat) {
// Three extra comma separated fields in header before imu name
indices.push_back(found_index - 3);
} else {
indices.push_back(found_index);
}
} else { // first label found but the remaining didn't. Throw
throw Exception{"Expected labels for sensor " + sensorName +
" were not found."};
}
}
}
} // namespace OpenSim
|
danjung/sparsemapcontent | src/test/java/org/sakaiproject/nakamura/lite/soak/AbstractScalingClient.java | <reponame>danjung/sparsemapcontent<gh_stars>1-10
/*
* Licensed to the Sakai Foundation (SF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The SF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package org.sakaiproject.nakamura.lite.soak;
import org.sakaiproject.nakamura.api.lite.ClientPoolException;
import org.sakaiproject.nakamura.api.lite.Configuration;
import org.sakaiproject.nakamura.api.lite.StorageClientException;
import org.sakaiproject.nakamura.api.lite.accesscontrol.AccessDeniedException;
import org.sakaiproject.nakamura.lite.authorizable.AuthorizableActivator;
import org.sakaiproject.nakamura.lite.storage.StorageClient;
import org.sakaiproject.nakamura.lite.storage.StorageClientPool;
/**
* Base class for multithreads soak tests, this class is the thing that is
* runnable by a thread in a soak test.
*
* @author ieb
*
*/
public abstract class AbstractScalingClient implements Runnable {
protected StorageClientPool clientPool;
protected StorageClient client;
protected Configuration configuration;
public AbstractScalingClient(StorageClientPool clientPool, Configuration configuration) throws ClientPoolException,
StorageClientException, AccessDeniedException {
this.clientPool = clientPool;
this.configuration = configuration;
}
public void setup() throws ClientPoolException, StorageClientException, AccessDeniedException {
client = clientPool.getClient();
AuthorizableActivator authorizableActivator = new AuthorizableActivator(client,
configuration);
authorizableActivator.setup();
}
}
|
elliotwms/benthos | lib/input/package.go | <filename>lib/input/package.go
// Package input defines consumers for aggregating data from a variety of
// sources. All consumer types must implement interface input.Type.
//
// If the source of an input consumer supports acknowledgements then the
// implementation of the input will wait for each message to reach a permanent
// destination before acknowledging it.
package input
|
cserverpaasshow/smart-OA | src/main/flow/cn/com/smart/flow/helper/FlowFormUploadFileHelper.java | package cn.com.smart.flow.helper;
import java.util.Map;
import org.springframework.web.multipart.MultipartFile;
import org.springframework.web.multipart.MultipartHttpServletRequest;
import cn.com.smart.bean.SmartResponse;
import cn.com.smart.constant.IConstant;
import cn.com.smart.flow.bean.SubmitFormData;
import cn.com.smart.flow.service.FlowService;
import cn.com.smart.form.helper.AbstractFormUploadFileHelper;
import cn.com.smart.service.SmartContextService;
import cn.com.smart.web.bean.entity.TNAttachment;
/**
* 表单上传文件助手
* @author lmq 2017年4月12日
* @version 1.0
* @since 1.0
*/
public class FlowFormUploadFileHelper extends AbstractFormUploadFileHelper {
private FlowService flowServ;
private SubmitFormData submitFormData;
public FlowFormUploadFileHelper(MultipartHttpServletRequest multiRequest,
Map<String, Object> formArgs, SubmitFormData submitFormData, String userId) {
super(multiRequest, formArgs, userId);
this.submitFormData = submitFormData;
flowServ = SmartContextService.find(FlowService.class);
}
@Override
protected String upload(MultipartFile file) throws Exception {
TNAttachment att = uploadHandler.fileUpload(file.getInputStream(), file.getContentType(), file.getOriginalFilename(),file.getSize() ,userId);
String id = null;
if(null != att) {
SmartResponse<String> chRes = flowServ.saveAttachment(att, this.submitFormData);
if(IConstant.OP_SUCCESS.equals(chRes.getResult())) {
id = att.getId();
} else {
flowServ.deleteAttachment(att.getId());
}
chRes = null;
}
return id;
}
}
|
sbnair/PolkaJS | node_modules/dmg-license/lib/index.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const Plist = require("plist");
const assembleLicenses_1 = require("./assembleLicenses");
const BodySpec_1 = require("./BodySpec");
exports.BodySpec = BodySpec_1.default;
const Context_1 = require("./Context");
const Labels_1 = require("./Labels");
exports.Labels = Labels_1.Labels;
const makeLicensePlist_1 = require("./makeLicensePlist");
const specFromJSON_1 = require("./specFromJSON");
const writePlistToDmg_1 = require("./writePlistToDmg");
var Language_1 = require("./Language");
exports.Language = Language_1.Language;
var specFromJSON_2 = require("./specFromJSON");
exports.BadJSONLicenseSpecError = specFromJSON_2.BadJSONLicenseSpecError;
var Labels_2 = require("./Labels");
exports.LabelEncodingError = Labels_2.LabelEncodingError;
exports.NoDefaultLabelsError = Labels_2.NoDefaultLabelsError;
async function dmgLicense(imagePath, spec, options) {
return await writePlistToDmg_1.default(imagePath, (await dmgLicensePlist(spec, options)).plist);
}
exports.dmgLicense = dmgLicense;
exports.default = dmgLicense;
async function dmgLicensePlist(spec, options) {
const context = new Context_1.default(options);
const plist = makeLicensePlist_1.default(await assembleLicenses_1.default(spec, context), context);
return {
plist,
get plistText() {
return Plist.build(plist);
}
};
}
exports.dmgLicensePlist = dmgLicensePlist;
async function dmgLicenseFromJSON(imagePath, specJSON, options) {
return await dmgLicense(imagePath, specFromJSON_1.default(specJSON, options), options);
}
exports.dmgLicenseFromJSON = dmgLicenseFromJSON;
async function dmgLicensePlistFromJSON(specJSON, options) {
return await dmgLicensePlist(specFromJSON_1.default(specJSON, options), options);
}
exports.dmgLicensePlistFromJSON = dmgLicensePlistFromJSON;
//# sourceMappingURL=index.js.map |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.