repo_name stringlengths 6 101 | path stringlengths 4 300 | text stringlengths 7 1.31M |
|---|---|---|
VoigtSebastian/de.htwg.se.Gladiators | src/test/scala/de/htwg/se/gladiators/controller/ControllerSpec.scala | <reponame>VoigtSebastian/de.htwg.se.Gladiators<gh_stars>1-10
package de.htwg.se.gladiators.controller
import de.htwg.se.gladiators.aview.TestImplementation.EventQueue
import de.htwg.se.gladiators.controller.BaseImplementation.Controller
import de.htwg.se.gladiators.controller.BaseImplementation.ControllerJson._
import de.htwg.se.gladiators.controller.GameState._
import de.htwg.se.gladiators.model.Board
import de.htwg.se.gladiators.model.Player
import de.htwg.se.gladiators.model.TileType.Mine
import de.htwg.se.gladiators.model.TileType.Sand
import de.htwg.se.gladiators.util.Command._
import de.htwg.se.gladiators.util.Configuration
import de.htwg.se.gladiators.util.Coordinate
import de.htwg.se.gladiators.util.Events._
import de.htwg.se.gladiators.util.Factories.BoardFactory
import de.htwg.se.gladiators.util.Factories.GladiatorFactory
import de.htwg.se.gladiators.util.Factories.ShopFactory
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import play.api.libs.json.Json
class ControllerSpec extends AnyWordSpec with Matchers {
"A controller" when {
val baseConfig = Configuration(5, 15)
"created" should {
val controller = Controller(baseConfig)
controller.playerTwo = Some(Player(1, "", 0, 0, 100, false, Vector()))
controller.playerOne = Some(Player(2, "", 0, 0, 100, false, Vector()))
"be in the NamingPlayerOne state" in {
controller.gameState should be(GameState.NamingPlayerOne)
}
"have an initialized board" in {
controller.board.isInstanceOf[Board] should be(true)
}
"return the board as a simple string" in {
controller.boardToString should not be (empty)
}
"return the board as a colored string" in {
controller.boardToColoredString should not be (empty)
}
"return the shop as a string" in {
controller.shopToString should not be (empty)
}
"be able to reset Gladiators" in {
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = (0 to 10).map(_ => GladiatorFactory.initRandomGladiator).toVector))
controller.playerOne = (controller
.resetGladiatorsMoved(controller.playerOne.get))
controller
.playerOne
.get
.gladiators
.foreach(_.moved should be(false))
}
"have a json representation" in {
Json.toJson(controller).toString should not be (empty)
}
}
"returning the current player" should {
"return Player One" in {
val controller = Controller(baseConfig)
controller.playerOne = Some(Player(1, "", 0, 0, 100, false, Vector()))
controller.currentGameState = TurnPlayerOne
controller.currentPlayer should be(controller.playerOne)
}
"return Player Two" in {
val controller = Controller(baseConfig)
controller.playerTwo = Some(Player(2, "", 0, 0, 100, false, Vector()))
controller.currentGameState = TurnPlayerTwo
controller.currentPlayer should be(controller.playerTwo)
}
"return None" in {
val controller = Controller(baseConfig)
controller.currentGameState = NamingPlayerOne
controller.currentPlayer should be(None)
controller.currentGameState = NamingPlayerTwo
controller.currentPlayer should be(None)
}
}
"returning the enemy player" should {
"return Player One" in {
val controller = Controller(baseConfig)
controller.playerTwo = Some(Player(1, "", 0, 0, 100, false, Vector()))
controller.currentGameState = TurnPlayerOne
controller.enemyPlayer should be(controller.playerTwo)
}
"return Player Two" in {
val controller = Controller(baseConfig)
controller.playerOne = Some(Player(2, "", 0, 0, 100, false, Vector()))
controller.currentGameState = TurnPlayerTwo
controller.enemyPlayer should be(controller.playerOne)
}
"return None" in {
val controller = Controller(baseConfig)
controller.currentGameState = NamingPlayerOne
controller.enemyPlayer should be(None)
controller.currentGameState = NamingPlayerTwo
controller.enemyPlayer should be(None)
}
}
"updating the current player" should {
"throw an exception" in {
val controller = Controller(baseConfig)
the[Exception] thrownBy controller.updateCurrentPlayer(None) should have message controller.uncheckedStateMessage
}
"update player one correctly" in {
val controller = Controller(baseConfig)
controller.currentGameState = GameState.TurnPlayerOne
controller.playerOne = Some(Player(1, "", 0, 0, 100, false, Vector()))
controller.updateCurrentPlayer(None)
controller.playerOne should be(None)
}
"update player two correctly" in {
val controller = Controller(baseConfig)
controller.currentGameState = GameState.TurnPlayerTwo
controller.playerTwo = Some(Player(2, "", 0, 0, 100, false, Vector()))
controller.updateCurrentPlayer(None)
controller.playerTwo should be(None)
}
}
"updating the enemy player" should {
"throw an exception" in {
val controller = Controller(baseConfig)
the[Exception] thrownBy controller.updateEnemyPlayer(None) should have message controller.uncheckedStateMessage
}
"update player one correctly" in {
val controller = Controller(baseConfig)
controller.currentGameState = GameState.TurnPlayerTwo
controller.playerOne = Some(Player(1, "", 0, 0, 100, false, Vector()))
controller.updateEnemyPlayer(None)
controller.playerOne should be(None)
}
"update player two correctly" in {
val controller = Controller(baseConfig)
controller.currentGameState = GameState.TurnPlayerOne
controller.playerTwo = Some(Player(2, "", 0, 0, 100, false, Vector()))
controller.updateEnemyPlayer(None)
controller.playerTwo should be(None)
}
}
"receiving commands in init state" should {
val controller = Controller(baseConfig)
val eventQueue = EventQueue(controller)
"send out an init Event" in {
controller.publish(Init)
eventQueue.events.dequeue() should be(Init)
}
"send out an Error message" in {
controller.inputCommand(EndTurn)
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
}
}
"receiving commands to switch through states" should {
val controller = Controller(baseConfig)
val eventQueue = EventQueue(controller)
"send out player named events" in {
controller.inputCommand(NamePlayerOne("helmut"))
eventQueue.events.dequeue() should be(PlayerOneNamed("helmut"))
controller.inputCommand(NamePlayerTwo("herman"))
eventQueue.events.dequeue() should be(PlayerTwoNamed("herman"))
eventQueue.events.dequeue() should be(Turn(controller.playerOne.get))
}
"send out error messages" in {
controller.inputCommand(NamePlayerOne("helmut"))
controller.inputCommand(NamePlayerTwo("herman"))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
}
"end the current turn" in {
controller.inputCommand(EndTurn)
controller.inputCommand(EndTurn)
eventQueue.events.dequeue() should be(Turn(controller.playerTwo.get))
eventQueue.events.dequeue() should be(Turn(controller.playerOne.get))
}
}
"receiving buy commands" should {
"fail because the Player already bought a gladiator from the shop this turn" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.board = BoardFactory.initRandomBoard(percentageSand = 0)
controller.playerOne = Some(Player(1, "", controller.board.tiles.size - 1, Int.MaxValue, 100, true, Vector()))
controller.currentGameState = TurnPlayerOne
controller.buyUnit(1, Coordinate((controller.board.tiles.size / 2), 1))
eventQueue.events.dequeue.isInstanceOf[ErrorMessage] should be(true)
}
"fail because all tiles are blocked" in {
val initialCredits = 1000
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.board = BoardFactory.initRandomBoard(percentageSand = 0)
controller.playerOne = Some(Player(1, "", controller.board.tiles.size - 1, initialCredits, 100, false, Vector()))
controller.currentGameState = TurnPlayerOne
controller.inputCommand(BuyUnit(1, Coordinate((controller.board.tiles.size / 2), 1)))
controller.playerTwo = Some(Player(2, "", 0, initialCredits, 100, false, Vector()))
controller.currentGameState = TurnPlayerTwo
controller.inputCommand(BuyUnit(1, Coordinate((controller.board.tiles.size / 2), controller.board.tiles.size - 2)))
eventQueue.events.dequeue.isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.dequeue.isInstanceOf[ErrorMessage] should be(true)
}
"fail because of the wrong controller-state" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = NamingPlayerOne
controller.inputCommand(BuyUnit(1, Coordinate(0, 0)))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
}
"fail because the requested Unit does not exist" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = TurnPlayerOne
controller.inputCommand(BuyUnit(10, Coordinate(0, 0)))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
}
"fail because of insufficient credits" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.board = BoardFactory.initRandomBoard(percentageSand = 100)
controller.playerOne = Some(Player(1, "", controller.board.tiles.size - 1, 0, 100, false, Vector()))
controller.currentGameState = TurnPlayerOne
controller.inputCommand(BuyUnit(1, Coordinate((controller.board.tiles.size / 2), 1)))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
}
"fail because the coordinate is out of bounds" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = TurnPlayerOne
controller.playerOne = Some(Player(2, "", 1000, controller.board.tiles.size, 100, false, Vector()))
controller.inputCommand(BuyUnit(1, Coordinate(-1, -1)))
controller.inputCommand(BuyUnit(1, Coordinate(0, controller.board.tiles.size)))
controller.inputCommand(BuyUnit(1, Coordinate(controller.board.tiles.size, 0)))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
}
"send out successful messages" in {
val initialCredits = 1000
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.board = BoardFactory.initRandomBoard(percentageSand = 100)
controller.playerOne = Some(Player(1, "", controller.board.tiles.size - 1, initialCredits, 100, false, Vector()))
controller.currentGameState = TurnPlayerOne
controller.inputCommand(BuyUnit(1, Coordinate((controller.board.tiles.size / 2), 1)))
eventQueue.events.dequeue().asInstanceOf[SuccessfullyBoughtGladiator].player should be(controller.playerOne.get)
controller.playerOne.get.credits should be(initialCredits - controller.playerOne.get.gladiators(0).cost)
controller.playerTwo = Some(Player(2, "", 0, initialCredits, 100, false, Vector()))
controller.currentGameState = TurnPlayerTwo
controller.inputCommand(BuyUnit(1, Coordinate((controller.board.tiles.size / 2), controller.board.tiles.size - 2)))
eventQueue.events.dequeue().asInstanceOf[SuccessfullyBoughtGladiator].player should be(controller.playerTwo.get)
controller.playerTwo.get.credits should be(initialCredits - controller.playerTwo.get.gladiators(0).cost)
controller.playerOne.get.credits should be >= 0
controller.playerTwo.get.credits should be >= 0
}
}
"moving a unit" should {
"publish an error because of the wrong currentGameState" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = NamingPlayerOne
controller.inputCommand(Move(Coordinate(0, 0), Coordinate(1, 1)))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.isEmpty should be(true)
}
"publish an error because the move is out of bounds" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = TurnPlayerOne
controller.playerOne = Some(Player(1, "", 0, 0, 100, false, Vector(GladiatorFactory.createGladiator(position = Some(Coordinate(0, 0)), moved = Some(false)))))
controller.playerTwo = Some(Player(2, "", 0, 0, 100, false, Vector()))
controller.inputCommand(Move(Coordinate(0, 0), Coordinate(-1, -1)))
eventQueue.events.dequeue().isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.isEmpty should be(true)
}
"publish a successful move for player one" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = TurnPlayerOne
controller.board = BoardFactory.initRandomBoard(15, 100)
controller.playerOne = Some(Player(1, "", controller.board.tiles.size - 1, 0, 100, false, Vector(
GladiatorFactory
.createGladiator(
position = Some(Coordinate(0, 0)),
moved = Some(false),
movementPoints = Some(4)))))
controller.playerTwo = Some(Player(2, "", 0, 0, 100, false, Vector()))
controller.inputCommand(Move(Coordinate(0, 0), Coordinate(1, 0)))
eventQueue.events.dequeue().isInstanceOf[Moved] should be(true)
controller.playerOne.get.gladiators.head.moved should be(true)
eventQueue.events.isEmpty should be(true)
}
"publish a successful move for player two" in {
val (controller, eventQueue) = createControllerEventQueue(shopStockSize = Some(5))
controller.currentGameState = TurnPlayerTwo
controller.board = BoardFactory.initRandomBoard(15, 100)
controller.playerOne = Some(Player(1, "", controller.board.tiles.size - 1, 0, 100, false, Vector()))
controller.playerTwo = Some(Player(2, "", 0, 0, 100, false, Vector(
GladiatorFactory
.createGladiator(
position = Some(Coordinate(0, 0)),
moved = Some(false),
movementPoints = Some(4)))))
controller.inputCommand(Move(Coordinate(0, 0), Coordinate(1, 0)))
eventQueue.events.dequeue().isInstanceOf[Moved] should be(true)
controller.playerTwo.get.gladiators.head.moved should be(true)
eventQueue.events.isEmpty should be(true)
}
}
"used to attack" should {
"return an error message" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.currentGameState = Finished
controller.move(Coordinate(0, 0), Coordinate(0, 1)).isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.dequeue.isInstanceOf[ErrorMessage] should be(true)
eventQueue.events.isEmpty should be(true)
}
"return an attacked message" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.namePlayerOne("One")
controller.namePlayerTwo("Two")
controller.board = BoardFactory.createSandBoard3x3
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(position = Some(Coordinate(0, 0)), moved = Some(false), movementPoints = Some(2)))))
controller.playerTwo = Some(controller
.playerTwo
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(position = Some(Coordinate(0, 1))))))
controller.currentGameState = TurnPlayerOne
(1 to 3).foreach(_ => eventQueue.events.dequeue)
controller.move(Coordinate(0, 0), Coordinate(0, 1)).isInstanceOf[Attacked] should be(true)
eventQueue.events.dequeue.isInstanceOf[Attacked] should be(true)
eventQueue.events.isEmpty should be(true)
}
"return that there is one unit less" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.namePlayerOne("One")
controller.namePlayerTwo("Two")
controller.board = BoardFactory.createSandBoard3x3
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(
position = Some(Coordinate(0, 0)),
moved = Some(false),
movementPoints = Some(2),
attackPoints = Some(9000)))))
controller.playerTwo = Some(controller
.playerTwo
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(
position = Some(Coordinate(0, 1)),
healthPoints = Some(1)))))
controller.currentGameState = TurnPlayerOne
(1 to 3).foreach(_ => eventQueue.events.dequeue)
controller.move(Coordinate(0, 0), Coordinate(0, 1)).isInstanceOf[Attacked] should be(true)
eventQueue.events.dequeue.isInstanceOf[Attacked] should be(true)
controller.playerTwo.get.gladiators should be(empty)
eventQueue.events.isEmpty should be(true)
}
}
"used to do base attacks" should {
"return a BaseAttacked Event" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.board = BoardFactory.createNormalBoard3x3
controller.namePlayerOne("One")
controller.namePlayerTwo("Two")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(
position = Some(Coordinate(1, 1)),
moved = Some(false),
movementPoints = Some(2),
attackPoints = Some(1)))))
(1 to 3).foreach(_ => eventQueue.events.dequeue)
controller.currentGameState = TurnPlayerOne
val healthBeforeAttack = controller.playerTwo.get.health
controller.move(Coordinate(1, 1), Coordinate(1, controller.playerOne.get.enemyBaseLine)).isInstanceOf[BaseAttacked] should be(true)
eventQueue.events.dequeue.isInstanceOf[BaseAttacked] should be(true)
controller.playerTwo.get.health should be(healthBeforeAttack - controller.playerOne.get.gladiators.head.attackPoints)
eventQueue.events.isEmpty should be(true)
}
"return the Won event" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.board = BoardFactory.createNormalBoard3x3
controller.namePlayerOne("One")
controller.namePlayerTwo("Two")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(
position = Some(Coordinate(1, 1)),
moved = Some(false),
movementPoints = Some(2),
attackPoints = Some(100)))))
(1 to 3).foreach(_ => eventQueue.events.dequeue)
controller.currentGameState = TurnPlayerOne
controller.playerTwo = Some(controller.playerTwo.get.copy(health = 1))
controller.move(Coordinate(1, 1), Coordinate(1, controller.playerOne.get.enemyBaseLine)).isInstanceOf[Won] should be(true)
eventQueue.events.dequeue.isInstanceOf[Won] should be(true)
eventQueue.events.isEmpty should be(true)
}
}
"used to mine a mine" should {
"not deplete the mine" in {
val mine = Mine(100)
val (controller, eventQueue) = createControllerEventQueue()
controller.board = BoardFactory
.createNormalBoard3x3
.updateTile(Coordinate(0, 1), mine)
controller.namePlayerOne("One")
controller.namePlayerTwo("Two")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(
position = Some(Coordinate(0, 0)),
moved = Some(false),
movementPoints = Some(2),
attackPoints = Some(100)))))
(1 to 3).foreach(_ => eventQueue.events.dequeue)
controller.currentGameState = TurnPlayerOne
controller.move(Coordinate(0, 0), Coordinate(0, 1))
eventQueue.events.dequeue.isInstanceOf[Mined] should be(true)
controller.board.tiles(1)(0) should be(Mine(100 - mine.goldPerHit))
eventQueue.events.isEmpty should be(true)
}
"deplete the mine" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.board = BoardFactory
.createNormalBoard3x3
.updateTile(Coordinate(0, 1), Mine(1))
controller.namePlayerOne("One")
controller.namePlayerTwo("Two")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(
position = Some(Coordinate(0, 0)),
moved = Some(false),
movementPoints = Some(2),
attackPoints = Some(100)))))
(1 to 3).foreach(_ => eventQueue.events.dequeue)
controller.currentGameState = TurnPlayerOne
controller
.move(Coordinate(0, 0), Coordinate(0, 1))
.asInstanceOf[Mined].amount should be(1)
eventQueue.events.dequeue.isInstanceOf[Mined] should be(true)
controller.board.tiles(1)(0) should be(Sand)
eventQueue.events.isEmpty should be(true)
}
}
"receiving commands" should {
"publish a Shutdown event" in {
val (controller, eventQueue) = createControllerEventQueue()
controller.inputCommand(Quit)
eventQueue.events.dequeue() == Shutdown
controller.shouldShutdown.get should be(true)
}
}
"being asked if the current player occupies a tile" should {
"return false" in {
val (controller, _) = createControllerEventQueue()
val length = controller.board.tiles.length - 1
(0 to length).zip(0 to length).foreach({ case (x, y) => controller.tileOccupiedByCurrentPlayer(Coordinate(x, y)) should be(false) })
}
"return true" in {
val (controller, _) = createControllerEventQueue()
controller.namePlayerOne("jürgen")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory.createGladiator(position = Some(Coordinate(0, 0))))))
controller.currentGameState = TurnPlayerOne
controller.tileOccupiedByCurrentPlayer(Coordinate(0, 0)) should be(true)
}
}
"being asked for attack tiles from a certain position" should {
"return false because there is no current Player" in {
val (controller, _) = createControllerEventQueue()
val length = controller.board.tiles.length - 1
(0 to length).zip(0 to length).foreach({ case (x, y) => controller.attackTiles(Coordinate(x, y)) should be(None) })
}
"return true" in {
val (controller, _) = createControllerEventQueue()
controller.board = BoardFactory.createSandBoard3x3
controller.namePlayerOne("jürgen")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory
.createGladiator(
position = Some(Coordinate(0, 0)),
movementPoints = Some(1)))))
controller.currentGameState = TurnPlayerOne
controller.attackTiles(Coordinate(0, 0)) should not be empty
controller.attackTiles(Coordinate(0, 0)) should not(contain(Coordinate(0, 0)))
}
"return None because the tile is not occupied" in {
val (controller, _) = createControllerEventQueue()
controller.board = BoardFactory.createSandBoard3x3
controller.namePlayerOne("jürgen")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory
.createGladiator(
position = Some(Coordinate(0, 0)),
movementPoints = Some(1)))))
controller.currentGameState = TurnPlayerOne
controller
.attackTiles(Coordinate(1, 0)) should be(None)
}
}
"being asked for move tiles from a certain position" should {
"return false because there is no current Player" in {
val (controller, _) = createControllerEventQueue()
val length = controller.board.tiles.length - 1
(0 to length).zip(0 to length).foreach({ case (x, y) => controller.moveTiles(Coordinate(x, y)) should be(None) })
}
"return true" in {
val (controller, _) = createControllerEventQueue()
controller.board = BoardFactory.createSandBoard3x3
controller.namePlayerOne("jürgen")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory
.createGladiator(
position = Some(Coordinate(0, 0)),
movementPoints = Some(1)))))
controller.currentGameState = TurnPlayerOne
controller.moveTiles(Coordinate(0, 0)) should not be empty
controller.moveTiles(Coordinate(0, 0)) should not(contain(Coordinate(0, 0)))
}
"return None because the tile is not occupied" in {
val (controller, _) = createControllerEventQueue()
controller.board = BoardFactory.createSandBoard3x3
controller.namePlayerOne("jürgen")
controller.playerOne = Some(controller
.playerOne
.get
.copy(gladiators = Vector(GladiatorFactory
.createGladiator(
position = Some(Coordinate(0, 0)),
movementPoints = Some(1)))))
controller.currentGameState = TurnPlayerOne
controller
.moveTiles(Coordinate(1, 0)) should be(None)
}
}
"being tiles to place new units" should {
"return false" in {
val (controller, _) = createControllerEventQueue()
controller.newUnitPlacementTiles should be(None)
}
"return true" in {
val (controller, _) = createControllerEventQueue()
controller.board = BoardFactory.createSandBoard3x3
controller.namePlayerOne("jürgen")
controller.currentGameState = TurnPlayerOne
controller
.newUnitPlacementTiles should not be empty
}
}
}
"being asked for the TileTypes of the board" should {
"return them" in {
val (controller, _) = createControllerEventQueue()
controller.boardTiles should be(controller.board.tiles)
}
}
"being asked for gladiators" should {
"return None" in {
val controller = Controller(Configuration(5, 15))
controller.gladiatorsPlayerOne should be(None)
controller.gladiatorsPlayerTwo should be(None)
}
"return Some" in {
val (controller, _) = createControllerEventQueue()
controller.namePlayerOne("one")
controller.namePlayerTwo("two")
controller.playerOne = Some(controller.playerOne.get.copy(gladiators = Vector(GladiatorFactory.initRandomGladiator)))
controller.playerTwo = Some(controller.playerTwo.get.copy(gladiators = Vector(GladiatorFactory.initRandomGladiator)))
controller.gladiatorsPlayerOne.get should not be (empty)
controller.gladiatorsPlayerTwo.get should not be (empty)
}
}
"being asked for the Gladiators in stock" should {
"return the shops stock" in {
val controller = Controller(Configuration(5, 15))
controller.stock should be(controller.shop.stock)
}
}
def createControllerEventQueue(shopStockSize: Option[Int] = None) = {
val controller = Controller(Configuration(5, 15))
val eventQueue = EventQueue(controller)
controller.shop = ShopFactory.initRandomShop(shopStockSize.getOrElse(controller.shop.stock.length))
(controller, eventQueue)
}
}
|
jizongFox/deep-clustering-toolbox | deepclustering/decorator/__init__.py | from .cache_decorator import SingleProcessCache, MultiProcessCache
from .decorator import *
from .lazy_load_checkpoint import lazy_load_checkpoint
|
daponi/springboot2 | my_boot05_web_admin/src/main/java/com/atguigu/www/config/MyBatisConfig.java | package com.atguigu.www.config;
import com.baomidou.mybatisplus.annotation.DbType;
import com.baomidou.mybatisplus.extension.plugins.MybatisPlusInterceptor;
import com.baomidou.mybatisplus.extension.plugins.inner.PaginationInnerInterceptor;
import com.baomidou.mybatisplus.extension.plugins.pagination.optimize.JsqlParserCountOptimize;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* Mybatis-plus的分页插件
*/
@Configuration
public class MyBatisConfig {
@Bean
public MybatisPlusInterceptor paginationInterceptor() {
MybatisPlusInterceptor interceptor = new MybatisPlusInterceptor();
PaginationInnerInterceptor paginationInnerInterceptor = new PaginationInnerInterceptor();
// 设置请求的页面大于最大页后操作, true调回到首页,false 继续请求 默认false
paginationInnerInterceptor.setOverflow(false);
// 设置最大单页限制数量,默认 500 条,-1 不受限制
paginationInnerInterceptor.setMaxLimit(500L);
// 开启 count 的 join 优化,只针对部分 left join
interceptor.addInnerInterceptor(paginationInnerInterceptor);
return interceptor;
}
}
|
asedl/XpaPDPM_Calc | TestData/HomeHealthGrouper/HH_PPS_Java_source/com/mmm/cms/homehealth/v3110_1/HomeHealthRecordClinicalValidator_v3110_1.java | <reponame>asedl/XpaPDPM_Calc<filename>TestData/HomeHealthGrouper/HH_PPS_Java_source/com/mmm/cms/homehealth/v3110_1/HomeHealthRecordClinicalValidator_v3110_1.java
/*
* Home Health Grouper
* Developer for the Center for Medicare and Medicaid Services CMS
* by 3M Health Information Systems for CMS Home Health.
*
* All code is provided as is.
*/
package com.mmm.cms.homehealth.v3110_1;
import com.mmm.cms.homehealth.proto.HomeHealthGrouperIF;
import com.mmm.cms.homehealth.proto.record.HomeHealthRecordIF;
import com.mmm.cms.homehealth.proto.DiagnosisCodeIF;
import com.mmm.cms.homehealth.proto.HomeHealthEventListenerIF;
import com.mmm.cms.util.ScoringEventFormatter;
import java.util.Collection;
/**
* This builds on the abstract validator for the OASIS-C formatted record and
* provides specific validation for the diagnosis related columns as they
* pertain to the Clinical/Functional scoring model.
*
* @author 3M Health Information Systems for CMS Home Health
*/
public class HomeHealthRecordClinicalValidator_v3110_1 extends AbstractBaseValidator_v3110 {
/**
* constructs the validator with grouping engine as a reference
*
* @param grouper
*/
public HomeHealthRecordClinicalValidator_v3110_1(HomeHealthGrouperIF grouper) {
super(grouper);
}
/**
*
* Ensure a valid set of codes, and determines which codes to use in
* scoring.
*
* Psuedo code lines: 553 thru 665
*
* @param record
* @return true if the PDX is an allowable Principal code
*/
@Override
public boolean validateDiagnosisCodes(HomeHealthRecordIF record, Collection<HomeHealthEventListenerIF> listeners) {
boolean valid = true;
int idx;
DiagnosisCodeIF code;
DiagnosisCodeIF tmpCode;
code = record.getPRIMARY_DIAG_ICD();
if (code.isSecondaryOnly()) {
code.setValidForScoring(false);
valid = false;
}
//------------------------------
// Part 1: Determine if the code is valid or an
// optional payment code in order to determine which codes
// to use for scoring (pseudo code lines 595 thru 618)
//------------------------------
for (idx = 0; idx < 6; idx++) {
code = record.getDiagnosisCode(idx);
if (code.isValidForScoring()) {
// set the options codes as invalid for scoring
tmpCode = record.getOptionalDiagnosisCode3(idx);
if (!tmpCode.isEmpty()) {
tmpCode.setValidForScoring(false);
ScoringEventFormatter.fireIssueEvent(listeners, grouper, null,
"Diagnosis Code '" + tmpCode.getCode() + "' at position "
+ (idx + 7) + " is NOT valid for scoring because code in position "
+ (idx + 1) + " is valid for scoring.");
}
tmpCode = record.getOptionalDiagnosisCode4(idx);
if (!tmpCode.isEmpty()) {
tmpCode.setValidForScoring(false);
ScoringEventFormatter.fireIssueEvent(listeners, grouper, null,
"Diagnosis Code '" + tmpCode.getCode() + "' at position "
+ (idx + 13) + " is NOT valid for scoring because code in position "
+ (idx + 1) + " is valid for scoring.");
}
} else {
// check for being an optional VCode
if (!code.isOptionalPaymentCode()) {
// set the payment codes to not be scorable
tmpCode = record.getOptionalDiagnosisCode3(idx);
if (!tmpCode.isEmpty()) {
tmpCode.setValidForScoring(false);
ScoringEventFormatter.fireIssueEvent(listeners, grouper, null,
"Diagnosis Code '" + tmpCode.getCode() + "' at position "
+ (idx + 7) + " is NOT valid for scoring because code in position "
+ (idx + 1) + " is NOT an optional payment code.");
}
tmpCode = record.getOptionalDiagnosisCode4(idx);
if (!tmpCode.isEmpty()) {
tmpCode.setValidForScoring(false);
ScoringEventFormatter.fireIssueEvent(listeners, grouper, null,
"Diagnosis Code '" + tmpCode.getCode() + "' at position "
+ (idx + 13) + " is NOT valid for scoring because code in position "
+ (idx + 1) + " is NOT an optional payment code.");
}
} else if (record.getOptionalDiagnosisCode4(idx).isValidCode()
&& !record.getOptionalDiagnosisCode4(idx).isSecondaryOnly()) {
// the optional column 4 code is not valid
// so skip it - this is the default indicator on any
// invalid codes, so nothting to really do
tmpCode = record.getOptionalDiagnosisCode4(idx);
if (!tmpCode.isEmpty()) {
tmpCode.setValidForScoring(false);
ScoringEventFormatter.fireIssueEvent(listeners, grouper, null,
"Diagnosis Code '" + tmpCode.getCode() + "' at position "
+ (idx + 13) + " is NOT valid for scoring because it is NOT a manifestation code");
}
}
// pseudo code line 614 - 616
if (record.getOptionalDiagnosisCode3(idx).isValidCode()
&& record.getOptionalDiagnosisCode3(idx).isSecondaryOnly()) {
// the options column 3 code is valid
code.setValidForScoring(false);
record.getOptionalDiagnosisCode3(idx).setValidForScoring(false);
valid = false;
}
}
}
//------------------------------
// Part 2: Flag acceptable pairing of manifestion diagnosis
// in M0240 with etiologies; disqualify others
// (pseudo code lines 619 thru 641)
//------------------------------
for (idx = 1; idx < 6; idx++) {
code = record.getDiagnosisCode(idx);
// only check codes that are still valid to score
if (code.isValidCode() && code.isSecondaryOnly()) {
tmpCode = record.getDiagnosisCode(idx - 1);
// if the previous code is not valid or is an E-code, optional payment code
// or secondary only code, then this secondary is not valid
// for scoring.
if (!tmpCode.isValidCode() || tmpCode.isVCode()
|| tmpCode.isExternalCauseCode()
|| tmpCode.isSecondaryOnly()) {
// do not score this code
code.setValidForScoring(false);
valid = false;
} else {
// check if the previous code is a valid
// etiology pair for the current code
if ("785.4".equals(code.getCode())) {
// make sure the previous code is valid
// and not on the exclusion list
if (tmpCode.isValidCode() && !code.isEtiologyInPairingList(tmpCode)) {
code.setValidForScoring(true);
} else {
code.setValidForScoring(false);
valid = false;
}
} else if (code.isEtiologyInPairingList(tmpCode)) {
// the codes on this list are a list of inclusions.
// so, if the code is found, then it is an acceptable
// pair and this secondary-only code can be
// included in the scoring
code.setValidForScoring(true);
// the previous code may not be a scorable code
// unless it is a paired code
tmpCode.setValidForScoring(true);
} else {
// The code does not have an acceptable code pair
// so don't score this code
code.setValidForScoring(false);
valid = false;
}
}
}
}
//------------------------------
// Part 3: Flag acceptable pairing of manifestion diagnosis
// in m0246x4 with etiologies; disqualify others
// (pseudo code lines 643 thru 665)
//------------------------------
for (idx = 0; idx < 6; idx++) {
code = record.getOptionalDiagnosisCode4(idx);
// only check codes that are still valid to score
if (code.isValidForScoring()
&& code.isSecondaryOnly()) {
tmpCode = record.getOptionalDiagnosisCode3(idx);
// if the previous code is an E, V, or secondary only
if (tmpCode.isVCode()
|| tmpCode.isExternalCauseCode()
|| tmpCode.isSecondaryOnly()) {
// do not score this code
code.setValidForScoring(false);
valid = false;
} else {
// check if the previous code is a valid
// etiology pair for the current code
if ("785.4".equals(code.getCode())) {
// make sure the previous code is valid
// and not on the exclusion list
if (tmpCode.isValidCode() && !code.isEtiologyInPairingList(tmpCode)) {
code.setValidForScoring(true);
} else {
code.setValidForScoring(false);
valid = false;
}
} else if (code.isEtiologyInPairingList(tmpCode)) {
// the codes on this list are a list of inclusions.
// so, if the code is found, then it is an acceptable
// pair and this secondary-only code can be
// included in the scoring
code.setValidForScoring(true);
// Although pseudo code line 655 does not make this
// check, it seems appropriate
// only score valid casemix codes that are paired
if (tmpCode.isValidCode()) {
tmpCode.setValidForScoring(true);
}
} else {
// The code does not have an acceptable code pair
code.setValidForScoring(false);
valid = false;
}
}
}
}
//------------------------------
// Part 4: ensure that there are no manifestation codes in
// column 3
//------------------------------
for (idx = 0; idx < 6; idx++) {
code = record.getOptionalDiagnosisCode3(idx);
if (code.isSecondaryOnly()) {
code.setValidForScoring(false);
}
}
return valid;
}
}
|
weucode/COMFORT | artifact_evaluation/data/codeCoverage/fuzzilli_generate/899.js | <reponame>weucode/COMFORT<filename>artifact_evaluation/data/codeCoverage/fuzzilli_generate/899.js
function main() {
var v1 = [1000000000000.0,1000000000000.0,1000000000000.0,1000000000000.0];
// v1 = .object(ofGroup: Array, withProperties: ["length", "constructor", "__proto__"], withMethods: ["sort", "fill", "flatMap", "indexOf", "toString", "every", "join", "slice", "splice", "shift", "concat", "entries", "lastIndexOf", "forEach", "reduce", "reverse", "map", "pop", "reduceRight", "find", "includes", "copyWithin", "keys", "filter", "toLocaleString", "findIndex", "values", "some", "unshift", "push", "flat"])
var v3 = [1337,1337,1337,1337,1337];
// v3 = .object(ofGroup: Array, withProperties: ["length", "constructor", "__proto__"], withMethods: ["sort", "fill", "flatMap", "indexOf", "toString", "every", "join", "slice", "splice", "shift", "concat", "entries", "lastIndexOf", "forEach", "reduce", "reverse", "map", "pop", "reduceRight", "find", "includes", "copyWithin", "keys", "filter", "toLocaleString", "findIndex", "values", "some", "unshift", "push", "flat"])
var v4 = v3;
var v5 = Object;
// v5 = .object(ofGroup: ObjectConstructor, withProperties: ["prototype"], withMethods: ["getPrototypeOf", "setPrototypeOf", "freeze", "create", "seal", "entries", "values", "keys", "isSealed", "isExtensible", "assign", "getOwnPropertyDescriptor", "getOwnPropertyDescriptors", "preventExtensions", "getOwnPropertySymbols", "getOwnPropertyNames", "isFrozen", "is", "fromEntries", "defineProperties", "defineProperty"]) + .function([.anything...] => .object()) + .constructor([.anything...] => .object())
var v7 = v4 + 1337;
// v7 = .primitive
for (var v8 in v1) {
}
}
main();
|
MirahImage/bosh-bootloader | gcp/client_provider.go | package gcp
import (
"context"
"fmt"
"net/http"
"github.com/cloudfoundry/bosh-bootloader/storage"
compute "google.golang.org/api/compute/v1"
"golang.org/x/oauth2/google"
"golang.org/x/oauth2/jwt"
)
func gcpHTTPClientFunc(config *jwt.Config) *http.Client {
return config.Client(context.Background())
}
var gcpHTTPClient = gcpHTTPClientFunc
func NewClient(gcpConfig storage.GCP, basePath string) (Client, error) {
config, err := google.JWTConfigFromJSON([]byte(gcpConfig.ServiceAccountKey), compute.ComputeScope)
if err != nil {
return Client{}, fmt.Errorf("parse service account key: %s", err)
}
if basePath != "" {
config.TokenURL = basePath
}
service, err := compute.New(gcpHTTPClient(config))
if err != nil {
return Client{}, fmt.Errorf("create gcp client: %s", err)
}
if basePath != "" {
service.BasePath = basePath
}
client := Client{
computeClient: gcpComputeClient{service: service},
projectID: gcpConfig.ProjectID,
zone: gcpConfig.Zone,
}
_, err = client.GetRegion(gcpConfig.Region)
if err != nil {
return Client{}, fmt.Errorf("get region: %s", err)
}
return client, nil
}
|
morecar/azure-libraries-for-java | azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/model/HasBackendPort.java | /**
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*/
package com.microsoft.azure.management.network.model;
import com.microsoft.azure.management.apigeneration.Fluent;
/**
* An interface representing a model's ability to reference a backend port.
*/
@Fluent
public interface HasBackendPort {
/**
* @return the backend port number the network traffic is sent to
*/
int backendPort();
/**
* Grouping of definition stages involving specifying a backend port.
*/
interface DefinitionStages {
/**
* The stage of a definition allowing to specify the backend port.
* @param <ReturnT> the next stage of the definition
*/
interface WithBackendPort<ReturnT> {
/**
* Specifies a backend port to send network traffic to.
* <p>
* If not specified, the same backend port number is assumed as that used by the frontend.
* @param port a port number
* @return the next stage of the definition
*/
ReturnT toBackendPort(int port);
}
}
/**
* Grouping of update stages involving modifying a backend port.
*/
interface UpdateStages {
/**
* The stage of an update allowing to modify the backend port.
* @param <ReturnT> the next stage of the update
*/
interface WithBackendPort<ReturnT> {
/**
* Specifies a backend port to send network traffic to.
* @param port a port number
* @return the next stage of the update
*/
ReturnT toBackendPort(int port);
}
}
/**
* Grouping of definition stages applicable as part of a resource update, involving modifying the backend port.
*/
interface UpdateDefinitionStages {
/**
* The stage of a definition allowing to specify the backend port.
* @param <ReturnT> the next stage of the definition
*/
interface WithBackendPort<ReturnT> {
/**
* Specifies a backend port to send network traffic to.
* <p>
* If not specified, the same backend port number is assumed as that used by the frontend.
* @param port a port number
* @return the next stage of the definition
*/
ReturnT toBackendPort(int port);
}
}
}
|
BhasherBEL/LEPL1503 | S3/global_local_variables.c | <reponame>BhasherBEL/LEPL1503
int result;
void sum1(int a1, int b1) {
a1 = a1 + b1;
}
void main(int argc, char **argv) {
int a1 = 5, b1 = 6;
sum1(a1, b1);
printf("sum1: %d\n", a1);
int a2 = 3, b2 = 7;
sum2(a2, b2);
printf("sum2: %d\n", result);
int a3 = 1, b3 = 8;
int r;
sum3(&a3, &b3, &r);
printf("sum3: %d\n", r);
}
// ANSWER
// QUESTION 1
// 5
// QUESTION 2
void sum2(int a, int b)
{
result = a + b;
}
/*
* @post stores the sum of the values pointed by a and b in the memory pointed by r
*/
void sum3 (int *a, int *b, int *r)
{
*r = *a + *b;
} |
bdshadow/kubernetes-client-android | kubernetes-model/vendor/github.com/openshift/origin/tools/junitreport/pkg/parser/stack/interfaces.go | <gh_stars>0
/**
* Copyright (C) 2015 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package stack
import "github.com/openshift/origin/tools/junitreport/pkg/api"
// TestDataParser knows how to take raw test data and extract the useful information from it
type TestDataParser interface {
// MarksBeginning determines if the line marks the beginning of a test case
MarksBeginning(line string) bool
// ExtractName extracts the name of the test case from test output lines
ExtractName(line string) (name string, succeeded bool)
// ExtractResult extracts the test result from a test output line
ExtractResult(line string) (result api.TestResult, succeeded bool)
// ExtractDuration extracts the test duration from a test output line
ExtractDuration(line string) (duration string, succeeded bool)
// ExtractMessage extracts a message (e.g. for signalling why a failure or skip occurred) from a test output line
ExtractMessage(line string) (message string, succeeded bool)
// MarksCompletion determines if the line marks the completion of a test case
MarksCompletion(line string) bool
}
// TestSuiteDataParser knows how to take raw test suite data and extract the useful information from it
type TestSuiteDataParser interface {
// MarksBeginning determines if the line marks the beginning of a test suite
MarksBeginning(line string) bool
// ExtractName extracts the name of the test suite from a test output line
ExtractName(line string) (name string, succeeded bool)
// ExtractProperties extracts any metadata properties of the test suite from a test output line
ExtractProperties(line string) (properties map[string]string, succeeded bool)
// MarksCompletion determines if the line marks the completion of a test suite
MarksCompletion(line string) bool
}
|
SENA-CEET/1262154G1G2-Trimestre-3 | Programming/src/main/java/co/edu/sena/programming/ejemplo03/Cheking.java | <reponame>SENA-CEET/1262154G1G2-Trimestre-3<filename>Programming/src/main/java/co/edu/sena/programming/ejemplo03/Cheking.java
package co.edu.sena.programming.ejemplo03;
/**
* Created by Enrique on 13/06/2017.
*/
public class Cheking extends Acount {
}
|
carvalhoandre/ws_legio_app | legioApp/src/screens/founders/frank.js | import React, { Component } from "react";
import { Text, View, StyleSheet, ScrollView } from 'react-native'
import commonStyles from '../../styles/commonStyles'
export default class Frank extends Component {
render() {
return (
<ScrollView
contentContainerStyle={styles.scrollView}>
<View style={styles.container}>
<Text style={styles.title}>Oração para pedir a beatificação de <NAME></Text>
<Text style={styles.paragraph}>
Deus Pai, Vós inspirastes ao vosso servo <NAME>, um profundo discernimento do mistério de vossa Igreja, Corpo de Cristo, e do lugar de Maria, Mãe de Jesus, nesse mistério. Em seu imenso desejo de compartilhar esse discernimento com outros e, com filial confiança em Maria, ele fundou uma Legião, para ser um sinal do maternal Amor da Virgem pelo mundo e um meio de engajar todos os seus filhos no trabalho de evangelização da Igreja. Nós vos agradecemos, Pai, pelas graças a ele concedidas e pelos benefícios advindos à Igreja, por sua corajosa e radiante fé. Agora, confiadamente, rogamos que, por sua intercessão, nos concedais a graça que agora vos suplicamos...
</Text>
<Text style={styles.paragraph}>
Humildemente vos pedimos, também que, se de acordo com Vossa Vontade, a santidade de sua vida possa ser reconhecida pela Igreja, o mais breve possível, para a glória de Vosso Nome. É o que vos pedimos, por Cristo, Nosso Senhor, Amém!
</Text>
<Text style={styles.observation}>(Com aprovação eclesiástica)</Text>
</View>
</ScrollView>
)
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
paddingTop: 80,
paddingEnd: 50,
paddingLeft: 20,
paddingRight: 20,
alignItems: 'center',
justifyContent: 'center',
},
title: {
fontFamily: commonStyles.fontFamily.title,
color: commonStyles.colors.primaryColor,
fontSize: commonStyles.fontSize.medium,
marginBottom: 20,
textAlign: 'center'
},
paragraph: {
lineHeight: 30,
textAlign: 'justify',
fontFamily: commonStyles.fontFamily.text,
marginBottom: 10,
fontSize: commonStyles.fontSize.small,
},
observation: {
fontFamily: commonStyles.fontFamily.light,
color: commonStyles.colors.textColorLight,
marginBottom: 15,
}
}) |
trevorHsu/docxload | src/Complier/parser/toComponentConf.js | <reponame>trevorHsu/docxload
import { syncMap, flatArray } from '@src/utils/functional'
import { imgSrcToBase64 } from '@src/utils/parse'
import { COMPONENT_TYPES } from '../types'
const { SECTION, TITLE, TABLE, PARAGRAPH, TEXT, ROW, CELL, IMAGE, BREAK, TEMPLATE } = COMPONENT_TYPES
const TAG_MAP = {
page: SECTION,
title: TITLE,
p: PARAGRAPH,
table: TABLE,
row: ROW,
cell: CELL,
span: TEXT,
img: IMAGE,
br: BREAK,
template: TEMPLATE,
}
async function componentConfFactory(tagData) {
let result = {
type: TAG_MAP[tagData.tag],
children: (await syncMap(tagData.children, item => toComponentConf(item))).filter(item => item),
attrs: tagData.attrs
}
switch (TAG_MAP[tagData.tag]) {
case IMAGE:
if (!tagData.attrs || !tagData.attrs.src) { // 无图片资源时,去掉该配置
result = null
} else if (tagData.attrs.src) {
tagData.attrs.src = await imgSrcToBase64(tagData.attrs.src)
}
break
case TEXT:
delete result.children
result.value = tagData.children && tagData.children[0] ? tagData.children[0].text : ''
break
}
return result
}
async function toComponentConf(tagData, rootTag) {
let result = null
if (typeof rootTag !== 'undefined' && tagData.tag === rootTag) {
result = {
type: rootTag,
children: (await syncMap(tagData.children, item => toComponentConf(item))).filter(item => item)
}
} else if (TAG_MAP[tagData.tag] === TEMPLATE) { // 若为template,则以数组形式返回结果,相当于忽略template
result = (await syncMap(tagData.children, item => toComponentConf(item))).filter(item => item)
} else if (TAG_MAP[tagData.tag]) {
result = await componentConfFactory(tagData)
} else if (typeof tagData.text !== 'undefined') {
result = {
type: TEXT,
value: tagData.text,
attrs: {}
}
}
if (
Object.prototype.toString.call(result) === '[object Object]'
&& result.children
) { // 将children中元素为数组类型的内容展开
result.children = flatArray(result.children)
}
return result
}
export default toComponentConf
|
rslakra/Java | java/src/main/java/com/rslakra/jdk8/OptionalDemo.java | /**
*
*/
package com.rslakra.jdk8;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* @author <NAME> (<EMAIL>)
* @Created Mar 8, 2019 9:49:32 PM
* @version 1.0.0
*
*/
public class OptionalDemo<T> {
private List<T> listOfOptional;
public void initOptional(int limit, boolean value) {
System.out.println("IntStream.range:");
IntStream.range(1, limit).forEach(System.out::println);
System.out.println("IntStream.rangeClosed:");
IntStream.rangeClosed(1, limit).forEach(System.out::println);
System.out.println("IntStream.iterate-Even:");
IntStream.iterate(2, i -> i + 2).limit(limit).forEach(System.out::println);
System.out.println("IntStream.iterate-Odd:");
IntStream.iterate(1, i -> i + 2).limit(limit).forEach(System.out::println);
System.out.println("IntStream.iterate-Counting:");
IntStream.iterate(1, i -> i + 1).limit(limit).forEach(System.out::println);
}
/**
*
* @param limit
*/
public void counting(int limit) {
System.out.println("Counting [1-" + limit + "]");
IntStream.rangeClosed(1, limit).forEach(System.out::println);
System.out.println();
}
/**
*
* @param limit
*/
public void even(int limit) {
System.out.println(limit + " even numbers:");
IntStream.iterate(2, i -> i + 2).limit(limit).forEach(System.out::println);
System.out.println();
}
/**
*
* @param limit
*/
public void squared(int limit) {
System.out.println(limit + " squared numbers:");
IntStream.rangeClosed(1, limit).map(i -> i * i).forEach(System.out::println);
System.out.println();
}
/**
*
* @param limit
*/
public void fibonacci(int limit) {
System.out.println(limit + " even numbers:");
IntStream.iterate(0, i -> i + i).limit(limit).forEach(System.out::println);
System.out.println();
}
/**
*
* @param limit
*/
public void odd(int limit) {
System.out.println(limit + " odd numbers:");
IntStream.iterate(1, i -> i + 2).limit(limit).forEach(System.out::println);
System.out.println();
}
/**
*
* @param limit
*/
public void initOptional(int limit) {
listOfOptional = (List<T>) IntStream.rangeClosed(1, limit).boxed().collect(Collectors.toList());
}
/**
*
* @param list
*/
public void initOptional(List<T> list) {
listOfOptional = list.stream().collect(Collectors.toList());
}
/**
*
*/
public void print() {
listOfOptional.forEach(System.out::println);
}
/**
* @param args
*/
public static void main(String[] args) {
OptionalDemo<Integer> demoOptional = new OptionalDemo<>();
demoOptional.initOptional(Arrays.asList());
demoOptional.print();
demoOptional.initOptional(10);
demoOptional.print();
demoOptional.counting(10);
demoOptional.even(10);
demoOptional.odd(10);
demoOptional.squared(5);
}
}
|
deniscostadsc/playground | solutions/beecrowd/1963/1963.cpp | <filename>solutions/beecrowd/1963/1963.cpp<gh_stars>10-100
#include <cstdint>
#include <iomanip>
#include <iostream>
int main() {
double price1, price2;
while (std::cin >> price1 >> price2) {
std::cout << std::fixed << std::setprecision(2)
<< 100 * (price2 - price1) / price1 << "%" << std::endl;
}
return 0;
}
|
crashvb/docker-sign-verify | tests/test_archiverepositories.py | <reponame>crashvb/docker-sign-verify
#!/usr/bin/env python
# pylint: disable=redefined-outer-name
"""Manifest tests."""
import pytest
from docker_registry_client_async import FormattedSHA256, ImageName
from docker_sign_verify import ArchiveRepositories
from .testutils import get_test_data
@pytest.fixture
def archive_repositories(archive_repositories_raw: bytes) -> ArchiveRepositories:
"""Provides an ArchiveRepositories instance for the sample archive manifest."""
# Do not use caching; get a new instance for each test
return ArchiveRepositories(archive_repositories_raw)
@pytest.fixture
def archive_repositories_raw(request) -> bytes:
"""Provides a sample archive manifest."""
return get_test_data(request, __name__, "archive_repositories.json")
@pytest.fixture(
params=[
"busybox:1.30.1@sha256:a57c26390d4b78fd575fac72ed31f16a7a2fa3ebdccae4598513e8964dace9b2"
]
)
def image_name(request) -> ImageName:
"""Provides a 'known good' image name."""
yield ImageName.parse(request.param)
def test___init__(archive_repositories: ArchiveRepositories):
"""Test that an ArchiveRepositories can be instantiated."""
assert archive_repositories
def test___bytes__(
archive_repositories: ArchiveRepositories, archive_repositories_raw: bytes
):
"""Test __str__ pass-through for different variants."""
assert bytes(archive_repositories) == archive_repositories_raw
def test___str__(
archive_repositories: ArchiveRepositories, archive_repositories_raw: bytes
):
"""Test __str__ pass-through for different variants."""
assert str(archive_repositories) == archive_repositories_raw.decode("utf-8")
def test_get_tag(archive_repositories: ArchiveRepositories, image_name: ImageName):
"""Test repository tag retrieval."""
tag = archive_repositories.get_tag(image_name)
assert tag
assert FormattedSHA256(tag) == image_name.digest
assert not archive_repositories.get_tag(ImageName("does_not_exist"))
assert not archive_repositories.get_tag(ImageName("does_not", tag="exist"))
@pytest.mark.parametrize(
"name", ["image1", "image2:tag2", "library/image3", "library/image4:tag4"]
)
def test_set_tag(
archive_repositories: ArchiveRepositories, image_name: ImageName, name: str
):
"""Test repository tag assignment."""
tag = archive_repositories.get_tag(image_name)
assert tag
assert FormattedSHA256(tag) == image_name.digest
digest = FormattedSHA256.calculate(name.encode("utf-8"))
name = ImageName.parse(name)
archive_repositories.set_tag(name, digest)
assert FormattedSHA256(archive_repositories.get_tag(name)) == digest
|
Niekvdplas/ktrain | ktrain/text/ner/predictor.py | <filename>ktrain/text/ner/predictor.py
from ... import utils as U
from ...imports import *
from ...predictor import Predictor
from .. import textutils as TU
from .preprocessor import NERPreprocessor
class NERPredictor(Predictor):
"""
predicts classes for string-representation of sentence
"""
def __init__(self, model, preproc, batch_size=U.DEFAULT_BS):
if not isinstance(model, keras.Model):
raise ValueError("model must be of instance keras.Model")
if not isinstance(preproc, NERPreprocessor):
# if type(preproc).__name__ != 'NERPreprocessor':
raise ValueError("preproc must be a NERPreprocessor object")
self.model = model
self.preproc = preproc
self.c = self.preproc.get_classes()
self.batch_size = batch_size
def get_classes(self):
return self.c
def predict(
self,
sentences,
return_proba=False,
merge_tokens=False,
custom_tokenizer=None,
return_offsets=False,
):
"""
```
Makes predictions for a string-representation of a sentence
Args:
sentences(list|str): either a single sentence as a string or a list of sentences
return_proba(bool): If return_proba is True, returns probability distribution for each token
merge_tokens(bool): If True, tokens will be merged together by the entity
to which they are associated:
('Paul', 'B-PER'), ('Newman', 'I-PER') becomes ('<NAME>', 'PER')
custom_tokenizer(Callable): If specified, sentence will be tokenized based on custom tokenizer
return_offsets(bool): If True, will return the chracter offsets in the results [experimental]
Returns:
list: If sentences is a string representation of single sentence:
list containing a tuple for each token in sentence
IF sentences is a list of sentences:
list of lists: Each inner list represents a sentence and contains a tuple for each token in sentence
If return_proba and return_offsets are both True, then tuples are of the form: (token, label, probability, character offsets)
```
"""
is_array = not isinstance(sentences, str)
if not isinstance(sentences, (str, list)):
raise ValueError(
"Param sentence must be either string-representation of a sentence or a list of sentence strings."
)
# if return_proba and merge_tokens:
# raise ValueError(
# "return_proba and merge_tokens are mutually exclusive with one another."
# )
if isinstance(sentences, str):
sentences = [sentences]
lang = TU.detect_lang(sentences)
# batchify
num_chunks = math.ceil(len(sentences) / self.batch_size)
batches = U.list2chunks(sentences, n=num_chunks)
# process batches
results = []
for batch in batches:
nerseq = self.preproc.preprocess(
batch, lang=lang, custom_tokenizer=custom_tokenizer
)
if not nerseq.prepare_called:
nerseq.prepare()
nerseq.batch_size = len(batch)
x_true, _ = nerseq[0]
lengths = nerseq.get_lengths(0)
y_pred = self.model.predict_on_batch(x_true)
y_labels = self.preproc.p.inverse_transform(y_pred, lengths)
# TODO: clean this up
if return_proba:
try:
probs = np.max(y_pred, axis=2)
except:
probs = (
y_pred[0].numpy().tolist()
) # TODO: remove after confirmation (#316)
for i, (x, y, prob) in enumerate(zip(nerseq.x, y_labels, probs)):
if return_offsets:
offsets = TU.extract_offsets(
sentences[i], tokens=[entry[0] for entry in x]
)
result = [
(
x[i],
y[i],
prob[i],
(offsets[i]["start"], offsets[i]["end"]),
)
for i in range(len(x))
]
else:
result = [(x[i], y[i], prob[i]) for i in range(len(x))]
if merge_tokens:
result = self.merge_tokens(result, lang, True)
results.append(result)
else:
for i, (x, y) in enumerate(zip(nerseq.x, y_labels)):
if return_offsets:
offsets = TU.extract_offsets(
sentences[i], tokens=[entry[0] for entry in x]
)
result = list(
zip(x, y, [(o["start"], o["end"])
for o in offsets])
)
else:
result = list(zip(x, y))
if merge_tokens:
result = self.merge_tokens(result, lang, False)
results.append(result)
if not is_array:
results = results[0]
return results
def merge_tokens(self, annotated_sentence, lang, return_proba):
if TU.is_chinese(
lang, strict=False
): # strict=False: workaround for langdetect bug on short chinese texts
sep = ""
else:
sep = " "
current_token = ""
current_tag = ""
prob_list = []
entities = []
start = None
last_end = None
for tup in annotated_sentence:
token = tup[0]
entity = tup[1]
if return_proba:
prob = tup[2]
offsets = tup[3] if len(tup) > 3 else None
else:
offsets = tup[2] if len(tup) > 2 else None
tag = entity.split("-")[1] if "-" in entity else None
prefix = entity.split("-")[0] if "-" in entity else None
# not within entity
if tag is None and not current_token:
continue
# beginning of entity
# elif tag and prefix=='B':
elif tag and (prefix == "B" or prefix == "I" and not current_token):
if current_token: # consecutive entities
entities.append(
self._build_merge_tuple(
current_token, current_tag, start, last_end, prob_list
)
)
prob_list = []
current_token = ""
current_tag = None
start, end = None, None
current_token = token
current_tag = tag
start = offsets[0] if offsets else None
last_end = offsets[1] if offsets else None
if return_proba:
prob_list.append(prob)
# end of entity
elif tag is None and current_token:
entities.append(
self._build_merge_tuple(
current_token, current_tag, start, last_end, prob_list)
)
prob_list = []
current_token = ""
current_tag = None
continue
# within entity
elif tag and current_token: # prefix I
current_token = current_token + sep + token
current_tag = tag
last_end = offsets[1] if offsets else None
if return_proba:
prob_list.append(prob)
if current_token and current_tag:
entities.append(
self._build_merge_tuple(
current_token, current_tag, start, last_end, prob_list)
)
return entities
def _build_merge_tuple(self, current_token, current_tag, start=None, end=None, prob_list=[]):
entry = [current_token, current_tag]
if start is not None and end is not None:
entry.append((start, end))
if prob_list:
entry.append(np.mean(prob_list))
return tuple(entry)
def _save_preproc(self, fpath):
# ensure transformers embedding model is saved in a subdirectory
p = self.preproc.p
hf_dir = os.path.join(fpath, "hf")
if p.te is not None:
os.makedirs(hf_dir, exist_ok=True)
p.te.model.save_pretrained(hf_dir)
p.te.tokenizer.save_pretrained(hf_dir)
p.te.config.save_pretrained(hf_dir)
p.te_model = hf_dir
# save preproc
with open(os.path.join(fpath, U.PREPROC_NAME), "wb") as f:
pickle.dump(self.preproc, f)
return
|
enimamms/aoi.js | package/functions/funcs/disableRoleMentions.js | module.exports = async (d) => {
return {
disabledMentions: d.disabledMentions.filter((f) => f !== "roles"),
code: d.command.code.replaceLast(`$disableRoleMentions`, ""),
};
};
|
jkdubr/Proj4 | Pod/Classes/Projection/MOBProjectionEPSG2355.h | <reponame>jkdubr/Proj4
#import "MOBProjection.h"
@interface MOBProjectionEPSG2355 : MOBProjection
@end
|
kakashidinho/HQEngine | ThirdParty-mod/java2cpp/java/util/concurrent/atomic/AtomicReference.hpp | <reponame>kakashidinho/HQEngine<gh_stars>1-10
/*================================================================================
code generated by: java2cpp
author: <NAME>, mailto://<EMAIL>
class: java.util.concurrent.atomic.AtomicReference
================================================================================*/
#ifndef J2CPP_INCLUDE_IMPLEMENTATION
#ifndef J2CPP_JAVA_UTIL_CONCURRENT_ATOMIC_ATOMICREFERENCE_HPP_DECL
#define J2CPP_JAVA_UTIL_CONCURRENT_ATOMIC_ATOMICREFERENCE_HPP_DECL
namespace j2cpp { namespace java { namespace io { class Serializable; } } }
namespace j2cpp { namespace java { namespace lang { class Object; } } }
namespace j2cpp { namespace java { namespace lang { class String; } } }
#include <java/io/Serializable.hpp>
#include <java/lang/Object.hpp>
#include <java/lang/String.hpp>
namespace j2cpp {
namespace java { namespace util { namespace concurrent { namespace atomic {
class AtomicReference;
class AtomicReference
: public object<AtomicReference>
{
public:
J2CPP_DECLARE_CLASS
J2CPP_DECLARE_METHOD(0)
J2CPP_DECLARE_METHOD(1)
J2CPP_DECLARE_METHOD(2)
J2CPP_DECLARE_METHOD(3)
J2CPP_DECLARE_METHOD(4)
J2CPP_DECLARE_METHOD(5)
J2CPP_DECLARE_METHOD(6)
J2CPP_DECLARE_METHOD(7)
explicit AtomicReference(jobject jobj)
: object<AtomicReference>(jobj)
{
}
operator local_ref<java::lang::Object>() const;
operator local_ref<java::io::Serializable>() const;
AtomicReference(local_ref< java::lang::Object > const&);
AtomicReference();
local_ref< java::lang::Object > get();
void set(local_ref< java::lang::Object > const&);
jboolean compareAndSet(local_ref< java::lang::Object > const&, local_ref< java::lang::Object > const&);
jboolean weakCompareAndSet(local_ref< java::lang::Object > const&, local_ref< java::lang::Object > const&);
local_ref< java::lang::Object > getAndSet(local_ref< java::lang::Object > const&);
local_ref< java::lang::String > toString();
}; //class AtomicReference
} //namespace atomic
} //namespace concurrent
} //namespace util
} //namespace java
} //namespace j2cpp
#endif //J2CPP_JAVA_UTIL_CONCURRENT_ATOMIC_ATOMICREFERENCE_HPP_DECL
#else //J2CPP_INCLUDE_IMPLEMENTATION
#ifndef J2CPP_JAVA_UTIL_CONCURRENT_ATOMIC_ATOMICREFERENCE_HPP_IMPL
#define J2CPP_JAVA_UTIL_CONCURRENT_ATOMIC_ATOMICREFERENCE_HPP_IMPL
namespace j2cpp {
java::util::concurrent::atomic::AtomicReference::operator local_ref<java::lang::Object>() const
{
return local_ref<java::lang::Object>(get_jobject());
}
java::util::concurrent::atomic::AtomicReference::operator local_ref<java::io::Serializable>() const
{
return local_ref<java::io::Serializable>(get_jobject());
}
java::util::concurrent::atomic::AtomicReference::AtomicReference(local_ref< java::lang::Object > const &a0)
: object<java::util::concurrent::atomic::AtomicReference>(
call_new_object<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(0),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(0)
>(a0)
)
{
}
java::util::concurrent::atomic::AtomicReference::AtomicReference()
: object<java::util::concurrent::atomic::AtomicReference>(
call_new_object<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(1),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(1)
>()
)
{
}
local_ref< java::lang::Object > java::util::concurrent::atomic::AtomicReference::get()
{
return call_method<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(2),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(2),
local_ref< java::lang::Object >
>(get_jobject());
}
void java::util::concurrent::atomic::AtomicReference::set(local_ref< java::lang::Object > const &a0)
{
return call_method<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(3),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(3),
void
>(get_jobject(), a0);
}
jboolean java::util::concurrent::atomic::AtomicReference::compareAndSet(local_ref< java::lang::Object > const &a0, local_ref< java::lang::Object > const &a1)
{
return call_method<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(4),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(4),
jboolean
>(get_jobject(), a0, a1);
}
jboolean java::util::concurrent::atomic::AtomicReference::weakCompareAndSet(local_ref< java::lang::Object > const &a0, local_ref< java::lang::Object > const &a1)
{
return call_method<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(5),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(5),
jboolean
>(get_jobject(), a0, a1);
}
local_ref< java::lang::Object > java::util::concurrent::atomic::AtomicReference::getAndSet(local_ref< java::lang::Object > const &a0)
{
return call_method<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(6),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(6),
local_ref< java::lang::Object >
>(get_jobject(), a0);
}
local_ref< java::lang::String > java::util::concurrent::atomic::AtomicReference::toString()
{
return call_method<
java::util::concurrent::atomic::AtomicReference::J2CPP_CLASS_NAME,
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_NAME(7),
java::util::concurrent::atomic::AtomicReference::J2CPP_METHOD_SIGNATURE(7),
local_ref< java::lang::String >
>(get_jobject());
}
J2CPP_DEFINE_CLASS(java::util::concurrent::atomic::AtomicReference,"java/util/concurrent/atomic/AtomicReference")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,0,"<init>","(Ljava/lang/Object;)V")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,1,"<init>","()V")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,2,"get","()Ljava/lang/Object;")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,3,"set","(Ljava/lang/Object;)V")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,4,"compareAndSet","(Ljava/lang/Object;Ljava/lang/Object;)Z")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,5,"weakCompareAndSet","(Ljava/lang/Object;Ljava/lang/Object;)Z")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,6,"getAndSet","(Ljava/lang/Object;)Ljava/lang/Object;")
J2CPP_DEFINE_METHOD(java::util::concurrent::atomic::AtomicReference,7,"toString","()Ljava/lang/String;")
} //namespace j2cpp
#endif //J2CPP_JAVA_UTIL_CONCURRENT_ATOMIC_ATOMICREFERENCE_HPP_IMPL
#endif //J2CPP_INCLUDE_IMPLEMENTATION
|
incidincer/twice | ServerPush/src/main/java/ch/unifr/pai/twice/comm/serverPush/server/AtmosphereHandler.java | <gh_stars>0
package ch.unifr.pai.twice.comm.serverPush.server;
/*
* Copyright 2013 <NAME>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.List;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.atmosphere.config.service.AtmosphereHandlerService;
import org.atmosphere.cpr.Broadcaster;
import org.atmosphere.cpr.BroadcasterFactory;
import org.atmosphere.gwt.server.AtmosphereGwtHandler;
import org.atmosphere.gwt.server.GwtAtmosphereResource;
/**
* The server side handler
*
* @author <NAME>
*
*/
@AtmosphereHandlerService
public class AtmosphereHandler extends AtmosphereGwtHandler {
public static final String BROADCASTERSESSIONKEY = "ch.unifr.pai.mice.comm.atmosphere.broadcaster";
public static final String GLOBALBROADCASTERID = "ch.unifr.pai.mice.comm.atmosphere.globalBroadcaster";
private final EventProcessing eventProcessing = new EventProcessing();
/*
* (non-Javadoc)
* @see org.atmosphere.gwt.server.AtmosphereGwtHandler#doComet(org.atmosphere.gwt.server.GwtAtmosphereResource)
*/
@Override
public int doComet(GwtAtmosphereResource resource) throws ServletException, IOException {
HttpSession session = resource.getAtmosphereResource().getRequest().getSession();
if (session.getAttribute(BROADCASTERSESSIONKEY) == null)
session.setAttribute(BROADCASTERSESSIONKEY, BroadcasterFactory.getDefault().get());
resource.getAtmosphereResource().setBroadcaster((Broadcaster) session.getAttribute(BROADCASTERSESSIONKEY));
Broadcaster b = BroadcasterFactory.getDefault().lookup(GLOBALBROADCASTERID);
if (b == null) {
b = BroadcasterFactory.getDefault().get(GLOBALBROADCASTERID);
}
b.addAtmosphereResource(resource.getAtmosphereResource());
if (logger.isDebugEnabled()) {
logger.debug("Url: " + resource.getAtmosphereResource().getRequest().getRequestURL() + "?"
+ resource.getAtmosphereResource().getRequest().getQueryString());
}
return NO_TIMEOUT;
}
/*
* (non-Javadoc)
* @see org.atmosphere.gwt.server.AtmosphereGwtHandler#doPost(javax.servlet.http.HttpServletRequest, javax.servlet.http.HttpServletResponse, java.util.List,
* org.atmosphere.gwt.server.GwtAtmosphereResource)
*/
@Override
public void doPost(HttpServletRequest postRequest, HttpServletResponse postResponse, List<?> messages, GwtAtmosphereResource cometResource) {
for (final Object s : messages) {
eventProcessing.processMessage(s, cometResource.getAtmosphereResource());
}
}
}
|
tmtbb/adstar | app/src/main/java/com/cloudTop/starshare/been/EventBusMessage.java | package com.cloudTop.starshare.been;
/**
* eventBus消息类
* Created by Administrator on 2017/4/4.
*/
public class EventBusMessage {
public int Message;
public EventBusMessage(int message) {
Message = message;
}
public CheckUpdateInfoEntity checkUpdateInfoEntity;
public int getMessage() {
return Message;
}
public void setMessage(int message) {
Message = message;
}
public CheckUpdateInfoEntity getCheckUpdateInfoEntity() {
return checkUpdateInfoEntity;
}
public void setCheckUpdateInfoEntity(CheckUpdateInfoEntity checkUpdateInfoEntity) {
this.checkUpdateInfoEntity = checkUpdateInfoEntity;
}
}
|
sumedhpb/testrunner | pytests/transfer/conversionusetransfer.py | <reponame>sumedhpb/testrunner
from transfer.transfer_base import TransferBaseTest
from membase.api.rest_client import RestConnection, Bucket
from scripts.install import InstallerJob
from testconstants import COUCHBASE_DATA_PATH, WIN_COUCHBASE_DATA_PATH
from remote.remote_util import RemoteMachineShellConnection
import time
import sys
class ConversionUseTransfer(TransferBaseTest):
def setUp(self):
self.times_teardown_called = 1
super(ConversionUseTransfer, self).setUp()
self.command_options = self.input.param("command_options", '-x rehash=1')
self.latest_version = self.input.param('latest_version', None)
if self.latest_version is None:
self.fail("for the test you need to specify 'latest_version'")
self.openssl = self.input.param('openssl', '')
def tearDown(self):
if not self.input.param("skip_cleanup", True):
if self.times_teardown_called > 1 :
self.shell.disconnect()
del self.buckets
if self.input.param("skip_cleanup", True):
if self.case_number > 1 or self.times_teardown_called > 1:
self.shell.disconnect()
del self.buckets
self.times_teardown_called += 1
super(ConversionUseTransfer, self).tearDown()
def _install(self, servers, version='1.8.1-937-rel', vbuckets=1024):
params = {}
params['num_nodes'] = len(servers)
params['product'] = 'couchbase-server'
params['version'] = version
params['vbuckets'] = [vbuckets]
params['openssl'] = self.openssl
InstallerJob().parallel_install(servers, params)
success = True
for server in servers:
success &= RemoteMachineShellConnection(server).is_couchbase_installed()
if not success:
sys.exit("some nodes were not install successfully!")
def convert_sqlite_to_couchstore(self):
"""Convert data with 181 sqlite files to a 2.0+ online server
We load a number of items to one 181 node first and then do some mutation on these items.
Later we use cbtranfer to transfer the sqlite files we have on this
node to a new node. We verify the data by comparison between the items in KVStore
and items in the new node."""
self._install([self.server_origin])
if self.default_bucket:
bucket_params = self._create_bucket_params(server=self.server_origin, size=self.bucket_size,
replicas=self.num_replicas)
self.cluster.create_default_bucket(bucket_params)
self.buckets.append(Bucket(name="default", num_replicas=self.num_replicas, bucket_size=self.bucket_size))
self._create_sasl_buckets(self.server_origin, self.sasl_buckets)
self._create_standard_buckets(self.server_origin, self.standard_buckets)
self.load_data()
if self.os == 'windows':
output, error = self.shell.execute_command("taskkill /F /T /IM cbtransfer.exe")
self.shell.log_command_output(output, error)
self.shell.delete_files("/cygdrive/c%s" % self.win_data_location)
self.shell.execute_command("mkdir /cygdrive/c%s" % self.win_data_location)
self.shell.execute_command("cp -rf %s /cygdrive/c/tmp/" % (WIN_COUCHBASE_DATA_PATH))
else:
self.shell.delete_files(self.backup_location)
self.shell.execute_command("mkdir %s" % self.backup_location)
self.shell.execute_command("cp -rf %s %s" % (COUCHBASE_DATA_PATH, self.backup_location))
self._install([self.server_origin], version=self.latest_version)
self._install([self.server_recovery], version=self.latest_version)
kvs_before = {}
bucket_names = []
for bucket in self.buckets:
kvs_before[bucket.name] = bucket.kvs[1]
bucket_names.append(bucket.name)
del self.buckets
self.buckets = []
if self.default_bucket:
bucket_params = self._create_bucket_params(server=self.server_recovery, size=self.bucket_size,
replicas=self.num_replicas)
self.cluster.create_default_bucket(bucket_params)
self.buckets.append(Bucket(name="default", num_replicas=self.num_replicas, bucket_size=self.bucket_size))
self._create_sasl_buckets(self.server_recovery, self.sasl_buckets)
self._create_standard_buckets(self.server_recovery, self.standard_buckets)
for bucket in self.buckets:
bucket.kvs[1] = kvs_before[bucket.name]
transfer_source = "%s/data/%s-data/%s" % (self.backup_location, bucket.name, bucket.name)
if self.os == 'windows':
transfer_source = "C:%s/%s-data/%s" % (self.win_data_location, bucket.name, bucket.name)
transfer_destination = "http://%s@%s:%s" % (self.couchbase_login_info,
self.server_recovery.ip,
self.server_recovery.port)
self.shell.execute_cbtransfer(transfer_source, transfer_destination, "-b %s -B %s %s" % (bucket.name, bucket.name, self.command_options))
del kvs_before
time.sleep(self.expire_time + 1)
shell_server_recovery = RemoteMachineShellConnection(self.server_recovery)
for bucket in self.buckets:
shell_server_recovery.execute_cbepctl(bucket, "", "set flush_param", "exp_pager_stime", 5)
shell_server_recovery.disconnect()
time.sleep(self.wait_timeout)
self._wait_for_stats_all_buckets([self.server_recovery])
self._verify_stats_all_buckets([self.server_recovery])
self._verify_all_buckets(self.server_recovery, 1, self.wait_timeout * 50, self.max_verify, True, 1)
|
krasnopv/das | businesslogic/reference_test.go | <filename>businesslogic/reference_test.go
package businesslogic_test
import (
"errors"
"github.com/DancesportSoftware/das/businesslogic"
"github.com/DancesportSoftware/das/mock/businesslogic"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/assert"
"testing"
)
func TestCountry_GetStates(t *testing.T) {
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
mockRepo := mock_businesslogic.NewMockIStateRepository(mockCtrl)
mockRepo.EXPECT().SearchState(businesslogic.SearchStateCriteria{}).Return([]businesslogic.State{
{ID: 1, Name: "Alaska", Abbreviation: "AK"},
{ID: 2, Name: "Michigan", Abbreviation: "MI"},
}, nil)
country := businesslogic.Country{}
states, err := country.GetStates(mockRepo)
assert.Nil(t, err, "search states of a Country should not return errors")
assert.EqualValues(t, len(states), 2, "should return all states when search with empty criteria")
}
func TestCountry_GetFederations(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
mockFederationRepo := mock_businesslogic.NewMockIFederationRepository(ctrl)
mockFederationRepo.EXPECT().SearchFederation(businesslogic.SearchFederationCriteria{}).Return(
[]businesslogic.Federation{
{ID: 1, Name: "WDSF"},
{ID: 2, Name: "WDC"},
}, nil,
)
country := businesslogic.Country{}
federations, err := country.GetFederations(mockFederationRepo)
assert.Nil(t, err)
assert.EqualValues(t, len(federations), 2, "search federation with empty criteria should return all federations")
}
func TestState_GetCities(t *testing.T) {
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
mockRepo := mock_businesslogic.NewMockICityRepository(mockCtrl)
// behavior 1
mockRepo.EXPECT().SearchCity(businesslogic.SearchCityCriteria{StateID: 1}).Return([]businesslogic.City{
{ID: 1, Name: "City of ID 1", StateID: 1},
{ID: 2, Name: "City of ID 2", StateID: 1},
}, nil)
// behavior 2
mockRepo.EXPECT().SearchCity(businesslogic.SearchCityCriteria{StateID: 2}).Return(nil,
errors.New("state does not exist"))
state_1 := businesslogic.State{ID: 1}
cities_1, err_1 := state_1.GetCities(mockRepo)
assert.EqualValues(t, 2, len(cities_1))
assert.Nil(t, err_1)
state_2 := businesslogic.State{ID: 2}
cities_2, err_2 := state_2.GetCities(mockRepo)
assert.Nil(t, cities_2)
assert.NotNil(t, err_2)
cities_3, err_3 := state_1.GetCities(nil)
assert.Nil(t, cities_3)
assert.NotNil(t, err_3)
}
func TestFederation_GetDivisions(t *testing.T) {
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
mockRepo := mock_businesslogic.NewMockIDivisionRepository(mockCtrl)
// behavior 1
mockRepo.EXPECT().SearchDivision(businesslogic.SearchDivisionCriteria{FederationID: 1}).Return([]businesslogic.Division{
{ID: 1, Name: "Correct Division 1", FederationID: 1},
{ID: 2, Name: "Correct Division 2", FederationID: 2},
}, nil)
// behavior 2
mockRepo.EXPECT().SearchDivision(businesslogic.SearchDivisionCriteria{FederationID: 2}).Return(nil, errors.New("invalid search"))
federation_1 := businesslogic.Federation{ID: 1}
federation_2 := businesslogic.Federation{ID: 2}
result_1, err_1 := federation_1.GetDivisions(mockRepo)
assert.EqualValues(t, 2, len(result_1))
assert.Nil(t, err_1)
result_2, err_2 := federation_2.GetDivisions(mockRepo)
assert.Nil(t, result_2)
assert.NotNil(t, err_2)
result_3, err_3 := federation_1.GetDivisions(nil)
assert.Nil(t, result_3)
assert.NotNil(t, err_3)
}
|
Team2168/2014_Main_Robot | src/org/team2168/PIDController/sensors/AverageEncoder.java | <filename>src/org/team2168/PIDController/sensors/AverageEncoder.java
package org.team2168.PIDController.sensors;
import edu.wpi.first.wpilibj.Encoder;
/**
* This class extends the basic WPI encoder class. Its purpose is to provide a
* smoother rate output by averaging the rate of N samplesIt Implements the
* SpeedSensorInterface for use with our custom PID controller. Encoder with N
* point averager Misspelling intentional.
*
* @author <NAME>, Team 2168 <NAME>
*
*/
public class AverageEncoder extends Encoder implements PIDSensorInterface {
private int averagorSize;
private double[] averagorArray;
private int arrayPos = 0; // Next array position to put values to be
// averaged
long timeNow;
long oldTime;
double countNow;
double countBefore;
double rate;
private SpeedReturnType speedReturnType;
private PositionReturnType posReturnType;
int PPR;
double distPerTick;
/**
* Constructor for end point average class
*
* @param n
* the size of end point average
*/
public AverageEncoder(int channelA, int channelB, int PPR,
double distPerTick, boolean reverseDirection,
EncodingType encoderType, int averageN) {
super(channelA, channelB, reverseDirection, encoderType);
this.averagorSize = averageN;
this.averagorArray = new double[averagorSize];
this.timeNow = 0;
this.oldTime = 0;
this.countNow = 0;
this.countBefore = 0;
this.rate = 0;
this.PPR = PPR;
this.distPerTick = distPerTick;
this.posReturnType = PositionReturnType.DEGREE;
this.speedReturnType = SpeedReturnType.RPM;
super.setDistancePerPulse(distPerTick);
}
public AverageEncoder(int channelA, int channelB, int PPR,
double distPerTick, boolean reverseDirection,
EncodingType encoderType, SpeedReturnType speedReturnType,
PositionReturnType posReturnType, int averageN) {
this(channelA, channelB, PPR, distPerTick, reverseDirection,
encoderType, averageN);
this.speedReturnType = speedReturnType;
this.posReturnType = posReturnType;
}
/**
* returns (gets) Average of last n values sent, as name says.
*
* @return the Average
*/
private double getAverage() {
double sum = 0;
for (int i = 0; i < averagorSize; i++)
sum += averagorArray[i];
return sum / averagorSize;
}
/**
* puts data in to array to be averaged, hence the class name and method
* name. Its like magic but cooler.
*
* @param value
* the value being inserted into the array to be averaged.
*/
private void putData(double value) {
averagorArray[arrayPos] = value;
arrayPos++;
if (arrayPos >= averagorSize) // Is equal or greater to averagorSize
// because array is zero indexed. Rolls
// over index position.
arrayPos = 0;
}
//
public double getRate() {
// getRate
timeNow = System.currentTimeMillis();
countNow = super.get();
rate = (countNow - countBefore) / (timeNow - oldTime); // counts per
// millisecond
oldTime = timeNow;
countBefore = countNow;
switch (speedReturnType.value) {
case SpeedReturnType.IPS_val:
putData(rate * distPerTick * 1000);
break;
case SpeedReturnType.FPS_val:
putData(rate * distPerTick * 1000 / 12); // feet per second
break;
case SpeedReturnType.RPM_val:
putData(rate * 1000 * 60 / PPR); // ticks per minute... rpm
break;
case SpeedReturnType.PERIOD_val:
putData(super.getPeriod()); // ticks per minute... rpm
break;
default:
// should be unreachable
putData(0);
break;
}
return getAverage(); // ticks per minute... rpm
}
public double getPos() {
switch (posReturnType.value) {
case PositionReturnType.TICKS_val:
return get();
case PositionReturnType.INCH_val:
return super.getDistance();
case PositionReturnType.DEGREE_val:
return (double) (super.get()) / PPR * 360;
case PositionReturnType.RADIANS_val:
return (double) (super.get()) / PPR * (2 * Math.PI);
default:
// should be unreachable
return 0;
}
}
// public double getPos()
// {
// return super.get();
// }
public static class SpeedReturnType {
/**
* The integer value representing this enumeration
*/
static final int IPS_val = 0;
static final int RPM_val = 1;
static final int FPS_val = 2;
static final int PERIOD_val = 3;
final int value;
/**
* Count only the rising edge
*/
public static final SpeedReturnType IPS = new SpeedReturnType(IPS_val);
/**
* Count both the rising and falling edge
*/
public static final SpeedReturnType RPM = new SpeedReturnType(RPM_val);
/**
* Count rising and falling on both channels
*/
public static final SpeedReturnType FPS = new SpeedReturnType(FPS_val);
public static final SpeedReturnType PERIOD = new SpeedReturnType(
PERIOD_val);
private SpeedReturnType(int value) {
this.value = value;
}
}
public static class PositionReturnType {
static final int TICKS_val = 0;
static final int INCH_val = 1;
static final int DEGREE_val = 2;
static final int RADIANS_val = 3;
public final int value;
/**
* Count only the rising edge
*/
public static final PositionReturnType TICKS = new PositionReturnType(
TICKS_val);
/**
* Count both the rising and falling edge
*/
public static final PositionReturnType INCH = new PositionReturnType(
INCH_val);
/**
* Count rising and falling on both channels
*/
public static final PositionReturnType DEGREE = new PositionReturnType(
DEGREE_val);
public static final PositionReturnType RADIANS = new PositionReturnType(
RADIANS_val);
private PositionReturnType(int value) {
this.value = value;
}
}
public void setPosReturnType(PositionReturnType value) {
this.posReturnType = value;
}
}
|
dolio/TemplePlus | tpdatasrc/co8fixes/scr/py00281Yvy.py | from toee import *
from utilities import *
from combat_standard_routines import *
def san_dialog( attachee, triggerer ):
if (game.leader.reputation_has(32) == 1 or game.leader.reputation_has(30) == 1 or game.leader.reputation_has(29) == 1):
attachee.float_line(11004,triggerer)
else:
triggerer.begin_dialog( attachee, 1 )
attachee.turn_towards(triggerer)
return SKIP_DEFAULT
def san_dying( attachee, triggerer ):
if should_modify_CR( attachee ):
modify_CR( attachee, get_av_level() )
return RUN_DEFAULT
def san_insert_item( attachee, triggerer ):
# far shot feature now implemented in far shot feat
return RUN_DEFAULT |
aitalshashank2/Floo | backend/code/Floo/models/topic.py | <filename>backend/code/Floo/models/topic.py
from django.db import models
from django.conf import settings
from Floo.models.team import Team
class Topic(models.Model):
"""
A class representing a Topic Instance
Attributes
----------
title : CharField
A field storing the title of the topic instance
description : CharField
A field storing the description of the topic instance
creator : ForeignKey
A field which maps the topic to the user who has created the topic
team : ForeignKey
A field which maps the topic to the team in which it was published
publish_time : DateTimeField
A field which stores the time at which the topic was published
"""
title = models.CharField(
max_length=1023,
blank=False,
null=False
)
description = models.CharField(
max_length=2055,
blank=True,
null=True
)
creator = models.ForeignKey(
settings.AUTH_USER_MODEL,
null=False,
on_delete=models.CASCADE,
related_name="topics"
)
team = models.ForeignKey(
Team,
null=False,
on_delete=models.CASCADE,
related_name="topics"
)
publish_time = models.DateTimeField(
auto_now_add=True
)
def __str__(self):
return f"Team: {self.team.name}, Title: {self.title}"
|
jfnavarro/st_misc | python_scripts/old_unsorted/misc_rel_0.1/checkResultIndexes.py | <reponame>jfnavarro/st_misc
import sys
import os
def usage():
print "Usage:"
print "python checkResultIndexes.py testResults.fastq mismatches_out.txt"
def main(result, mismatches_out):
res = open(result)
mismatches = open(mismatches_out, "w")
wrongMappings = 0
missedMappings = 0
mismatch_format = "{}\n" \
"Quality: {}\n" \
"Observed: {}\n" \
"Wrong: {}\n" \
"Correct: {}\n" \
"Shift: {} -> {}\n\n"
while True:
name = res.readline().rstrip()
if not name:
break
seq = res.readline().rstrip()
#drop 2 more
optional = res.readline().rstrip()
qual = res.readline()
try:
foundId = optional.split(' ')[1].split('=')[1]
except IndexError:
missedMappings += 1
continue
correct_id, c_x, c_y = name.split("\t")[1:4]
if foundId != correct_id:
wrongMappings += 1
o_x, o_y = optional.split("\t")[1:]
mismatches.write(mismatch_format.format(seq,
qual[10:10 + len(correct_id)],
seq[10:10 + len(correct_id)],
foundId,
correct_id,
(c_x, c_y),
(o_x, o_y)))
res.close()
mismatches.close()
print 'Wrong: ' + str(wrongMappings)
print 'Missed: ' + str(missedMappings)
if __name__ == "__main__":
#Should have three inputs.
if len(sys.argv) != 3:
usage()
sys.exit(1)
main(sys.argv[1], sys.argv[2])
|
CURSORSoftwareAG/radiance | demos/substance-demo/src/main/java/org/pushingpixels/demo/substance/main/check/CellsPanel.java | <gh_stars>0
/*
* Copyright (c) 2005-2019 <NAME>. All Rights Reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* o Neither the name of the copyright holder nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.pushingpixels.demo.substance.main.check;
import com.jgoodies.forms.factories.Paddings;
import org.pushingpixels.demo.substance.main.check.command.*;
import org.pushingpixels.substance.api.SubstanceCortex;
import org.pushingpixels.substance.api.SubstanceCortex.ComponentOrParentChainScope;
import org.pushingpixels.substance.api.SubstanceSlices.DecorationAreaType;
import javax.swing.*;
import javax.swing.tree.*;
import java.awt.*;
import java.util.Enumeration;
public class CellsPanel extends JPanel implements Deferrable {
private boolean isInitialized;
@Override
public boolean isInitialized() {
return this.isInitialized;
}
public CellsPanel() {
}
@Override
public synchronized void initialize() {
TestFormLayoutBuilder builderCells = new TestFormLayoutBuilder(
"right:pref, 10dlu, fill:pref:grow(1), 4dlu,"
+ "fill:pref:grow(1), 4dlu, fill:pref:grow(1), 4dlu, fill:pref:grow(1)",
5, 13).columnGroups(new int[][] { { 3, 5, 7, 9 } }).
padding(Paddings.DIALOG);
builderCells.append("");
builderCells.append(new JLabel("NONE"), new JLabel("GENERAL"));
builderCells.append(new JLabel("HEADER"), new JLabel("FOOTER"));
builderCells.appendSeparator("Lists");
addControlRow(builderCells, "List", new CreationCommand<JComponent>() {
public JComponent create() {
JList list = new JList(new Object[] { "entry1", "entry2", "entry3" });
return list;
}
}, null);
addControlRow(builderCells, "List watermark", new CreationCommand<JComponent>() {
public JComponent create() {
JList list = new JList(new Object[] { "entry1", "entry2", "entry3" });
return list;
}
}, (JComponent jc) -> SubstanceCortex.ComponentOrParentChainScope.setWatermarkVisible(jc,
true));
addControlRow(builderCells, "List disabled", new CreationCommand<JComponent>() {
public JComponent create() {
JList list = new JList(new Object[] { "entry1", "entry2", "entry3" });
return list;
}
}, new DisableCommand());
CreationCommand<JComponent> tableCreationCmd = new CreationCommand<JComponent>() {
public JComponent create() {
final JTable table = new JTable(
new Object[][] { { "entry11", "entry12", "entry13" },
{ "entry21", "entry22", "entry23" },
{ "entry31", "entry32", "entry33" } },
new Object[] { "Column1", "Column2", "Column3" });
table.setName("Table ");
JScrollPane tableScroll = new JScrollPane(table) {
@Override
public void setBackground(Color bg) {
super.setBackground(bg);
table.setBackground(bg);
table.getTableHeader().setBackground(bg);
}
@Override
public void setForeground(Color fg) {
super.setForeground(fg);
table.setForeground(fg);
table.getTableHeader().setForeground(fg);
}
};
Dimension prefTable = table.getPreferredSize();
tableScroll.setPreferredSize(new Dimension(prefTable.width, prefTable.height + 25));
return tableScroll;
}
};
builderCells.appendSeparator("Tables");
addControlRow(builderCells, "Table", tableCreationCmd, null);
addControlRow(builderCells, "Table watermark", tableCreationCmd,
(JComponent jc) -> SubstanceCortex.ComponentOrParentChainScope
.setWatermarkVisible(jc, true));
addControlRow(builderCells, "Table disabled", tableCreationCmd,
new DisableViewportCommand());
CreationCommand<JComponent> treeCreationCmd = new CreationCommand<JComponent>() {
public void expandAll(JTree tree, boolean expand) {
TreeNode root = (TreeNode) tree.getModel().getRoot();
// Traverse tree from root
expandAll(tree, new TreePath(root), expand);
}
private void expandAll(JTree tree, TreePath parent, boolean expand) {
// Traverse children
TreeNode node = (TreeNode) parent.getLastPathComponent();
if (node.getChildCount() >= 0) {
for (Enumeration e = node.children(); e.hasMoreElements();) {
TreeNode n = (TreeNode) e.nextElement();
TreePath path = parent.pathByAddingChild(n);
expandAll(tree, path, expand);
}
}
// Expansion or collapse must be done bottom-up
if (expand) {
tree.expandPath(parent);
} else {
tree.collapsePath(parent);
}
}
public JComponent create() {
DefaultMutableTreeNode root = new DefaultMutableTreeNode("root");
DefaultMutableTreeNode son1 = new DefaultMutableTreeNode("son1");
DefaultMutableTreeNode son2 = new DefaultMutableTreeNode("son2");
DefaultMutableTreeNode son3 = new DefaultMutableTreeNode("son3");
DefaultMutableTreeNode gson11 = new DefaultMutableTreeNode("gson11");
DefaultMutableTreeNode gson12 = new DefaultMutableTreeNode("gson12");
DefaultMutableTreeNode gson21 = new DefaultMutableTreeNode("gson21");
DefaultMutableTreeNode gson22 = new DefaultMutableTreeNode("gson22");
DefaultMutableTreeNode gson31 = new DefaultMutableTreeNode("gson31");
DefaultMutableTreeNode gson32 = new DefaultMutableTreeNode("gson32");
DefaultMutableTreeNode ggson111 = new DefaultMutableTreeNode("ggson111");
DefaultMutableTreeNode ggson112 = new DefaultMutableTreeNode("ggson112");
DefaultMutableTreeNode ggson113 = new DefaultMutableTreeNode("ggson113");
gson11.add(ggson111);
gson11.add(ggson112);
gson11.add(ggson113);
son1.add(gson11);
son1.add(gson12);
son2.add(gson21);
son2.add(gson22);
son3.add(gson31);
son3.add(gson32);
root.add(son1);
root.add(son2);
root.add(son3);
JTree tree = new JTree(root);
expandAll(tree, true);
return tree;
}
};
builderCells.appendSeparator("Trees");
addControlRow(builderCells, "Tree", treeCreationCmd, null);
addControlRow(builderCells, "Tree watermark", treeCreationCmd,
(JComponent jc) -> SubstanceCortex.ComponentOrParentChainScope
.setWatermarkVisible(jc, true));
addControlRow(builderCells, "Tree disabled", treeCreationCmd, new DisableCommand());
JPanel panelCells = builderCells.build();
JScrollPane jspCells = new JScrollPane(panelCells);
panelCells.setOpaque(false);
jspCells.setOpaque(false);
jspCells.getViewport().setOpaque(false);
this.setLayout(new BorderLayout());
this.add(jspCells);
this.isInitialized = true;
}
/**
* Adds a row of components configured with the specified configuration command.
*
* @param builder
* Form builder.
* @param label
* Text to set.
* @param creationCmd
* Creation command.
* @param configurationCmd
* Configuration command to apply.
*/
private void addControlRow(TestFormLayoutBuilder builder, String label,
CreationCommand<JComponent> creationCmd,
ConfigurationCommand<JComponent> configurationCmd) {
JComponent[] row = new JComponent[4];
row[0] = creationCmd.create();
ComponentOrParentChainScope.setDecorationType(((JComponent) row[0]),
DecorationAreaType.NONE);
row[0].setName(row[0].getName() + ": NONE");
row[1] = creationCmd.create();
ComponentOrParentChainScope.setDecorationType(((JComponent) row[1]),
DecorationAreaType.GENERAL);
row[1].setName(row[1].getName() + ": GENERAL");
row[2] = creationCmd.create();
ComponentOrParentChainScope.setDecorationType(((JComponent) row[2]),
DecorationAreaType.HEADER);
row[2].setName(row[2].getName() + ": HEADER");
row[3] = creationCmd.create();
ComponentOrParentChainScope.setDecorationType(((JComponent) row[3]),
DecorationAreaType.FOOTER);
row[3].setName(row[3].getName() + ": FOOTER");
if (configurationCmd != null) {
for (JComponent comp : row) {
configurationCmd.configure(comp);
}
}
JLabel jl = new JLabel(label);
builder.append(jl);
for (Component comp : row)
builder.append(comp);
}
}
|
datenstrudel/bulbs-core | src/main/java/net/datenstrudel/bulbs/core/application/messaging/eventStore/PublishedMessageTrackerStore.java | <reponame>datenstrudel/bulbs-core
package net.datenstrudel.bulbs.core.application.messaging.eventStore;
import net.datenstrudel.bulbs.core.domain.model.infrastructure.BCoreBaseRepository;
import org.springframework.stereotype.Repository;
/**
*
* @author <NAME>
*/
@Repository
public interface PublishedMessageTrackerStore extends BCoreBaseRepository<PublishedMessageTracker, String> {
//~ Member(s) //////////////////////////////////////////////////////////////
//~ Method(s) //////////////////////////////////////////////////////////////
public PublishedMessageTracker findByType(String type);
}
|
Cosium/vet | src/main/java/com/cosium/vet/git/GitClientFactory.java | <filename>src/main/java/com/cosium/vet/git/GitClientFactory.java
package com.cosium.vet.git;
/**
* Created on 16/02/18.
*
* @author Reda.Housni-Alaoui
*/
public interface GitClientFactory {
/** @return A new Git client */
GitClient build();
}
|
real-fakeuser/ChaynsTemplateClone | node_modules/chayns-components/lib/react-chayns-selectbutton/component/SelectButton.js | var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _class, _temp;
function _defineProperty(obj, key, value) { if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
/* eslint-disable jsx-a11y/click-events-have-key-events */
import React, { Component } from 'react';
import PropTypes from 'prop-types';
import classnames from 'classnames';
var SelectButton = (_temp = _class = function (_Component) {
_inherits(SelectButton, _Component);
function SelectButton(props) {
_classCallCheck(this, SelectButton);
var _this = _possibleConstructorReturn(this, (SelectButton.__proto__ || Object.getPrototypeOf(SelectButton)).call(this, props));
_this.state = {
// eslint-disable-next-line react/no-unused-state
selected: []
};
_this.onClick = _this.onClick.bind(_this);
_this.onSelect = _this.onSelect.bind(_this);
return _this;
}
_createClass(SelectButton, [{
key: 'onSelect',
value: function onSelect(selected) {
var onSelect = this.props.onSelect;
var selection = selected.selection;
if (selection.length === 1) {
this.setLabel(selection[0].name);
}
if (onSelect) {
onSelect(this.getReturnList(selected));
}
}
}, {
key: 'onClick',
value: function onClick() {
var _this2 = this;
var _props = this.props,
quickFind = _props.quickFind,
multiSelect = _props.multiSelect,
title = _props.title,
description = _props.description,
list = _props.list,
listKey = _props.listKey,
listValue = _props.listValue;
var _list = SelectButton.getDialogList(list, listKey, listValue);
chayns.dialog.select({
title: title,
message: description,
quickfind: quickFind,
multiselect: multiSelect,
list: _list
}).then(function (selected) {
_this2.onSelect(selected);
}).catch(function (e) {
console.error(e);
});
}
}, {
key: 'getReturnList',
value: function getReturnList(selected) {
var _props2 = this.props,
list = _props2.list,
listKey = _props2.listKey;
var buttonType = selected.buttonType,
selectedItems = selected.selection;
var result = [];
selectedItems.map(function (item) {
list.map(function (listItem) {
if (listItem[listKey] === item.value) result.push(listItem);
});
});
return { buttonType: buttonType, selection: result };
}
}, {
key: 'setLabel',
value: function setLabel(text) {
var showSelection = this.props.showSelection;
if (showSelection) {
this._btn.innerText = text;
}
}
}, {
key: 'render',
value: function render() {
var _this3 = this;
var _props3 = this.props,
className = _props3.className,
label = _props3.label;
var classNames = classnames(_defineProperty({
choosebutton: true
}, className, className));
return React.createElement(
'div',
{
className: classNames,
onClick: this.onClick,
ref: function ref(_ref) {
_this3._btn = _ref;
}
},
label
);
}
}], [{
key: 'getDialogList',
value: function getDialogList(_list, listKey, listValue) {
var list = [];
if (_list) {
_list.map(function (item, i) {
var curListKey = listKey || i;
if (item[curListKey] && item[listValue]) {
list.push({ name: item[listValue], value: item[curListKey], isSelected: !!item.isSelected });
}
});
}
return list;
}
}]);
return SelectButton;
}(Component), _class.defaultProps = {
quickFind: false,
multiSelect: false,
title: 'Select Dialog',
description: 'Please select an item',
label: 'Select',
showSelection: true,
className: null,
onSelect: null
}, _temp);
export { SelectButton as default }; |
libremoney/main | Lm/Modules/UserServer/Api/UnlockAccount.js | /**!
* LibreMoney 0.2
* Copyright (c) LibreMoney Team <<EMAIL>>
* CC0 license
*/
/*
import nxt.Account;
import nxt.Block;
import nxt.Nxt;
import nxt.Transaction;
import nxt.util.Convert;
import nxt.util.DbIterator;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONStreamAware;
*/
/*
private static final Comparator<JSONObject> myTransactionsComparator = new Comparator<JSONObject>() {
public int compare(JSONObject o1, JSONObject o2) {
int t1 = ((Number)o1.get("timestamp")).intValue();
int t2 = ((Number)o2.get("timestamp")).intValue();
if (t1 < t2) {
return 1;
}
if (t1 > t2) {
return -1;
}
String id1 = (String)o1.get("id");
String id2 = (String)o2.get("id");
return id2.compareTo(id1);
}
};
*/
// res = user
function UnlockAccount(req, res) {
//return UserRequestHandler.Create();
/*
String secretPhrase = req.getParameter("secretPhrase");
// lock all other instances of this account being unlocked
for (User u : Users.getAllUsers()) {
if (secretPhrase.equals(u.getSecretPhrase())) {
u.lockAccount();
if (! u.isInactive()) {
u.enqueue(JsonResponses.LockAccount);
}
}
}
Long accountId = user.unlockAccount(secretPhrase);
JSONObject response = new JSONObject();
response.put("response", "unlockAccount");
response.put("account", Convert.toUnsignedLong(accountId));
if (secretPhrase.length() < 30) {
response.put("secretPhraseStrength", 1);
} else {
response.put("secretPhraseStrength", 5);
}
Account account = Account.getAccount(accountId);
if (account == null) {
response.put("balanceNQT", 0);
} else {
response.put("balanceNQT", account.getUnconfirmedBalanceNQT());
JSONArray myTransactions = new JSONArray();
byte[] accountPublicKey = account.getPublicKey();
for (Transaction transaction : Nxt.getTransactionProcessor().getAllUnconfirmedTransactions()) {
if (Arrays.equals(transaction.getSenderPublicKey(), accountPublicKey)) {
JSONObject myTransaction = new JSONObject();
myTransaction.put("index", Users.getIndex(transaction));
myTransaction.put("transactionTimestamp", transaction.getTimestamp());
myTransaction.put("deadline", transaction.getDeadline());
myTransaction.put("account", Convert.toUnsignedLong(transaction.getRecipientId()));
myTransaction.put("sentAmountNQT", transaction.getAmountNQT());
if (accountId.equals(transaction.getRecipientId())) {
myTransaction.put("receivedAmountNQT", transaction.getAmountNQT());
}
myTransaction.put("feeNQT", transaction.getFeeNQT());
myTransaction.put("numberOfConfirmations", -1);
myTransaction.put("id", transaction.getStringId());
myTransactions.add(myTransaction);
} else if (accountId.equals(transaction.getRecipientId())) {
JSONObject myTransaction = new JSONObject();
myTransaction.put("index", Users.getIndex(transaction));
myTransaction.put("transactionTimestamp", transaction.getTimestamp());
myTransaction.put("deadline", transaction.getDeadline());
myTransaction.put("account", Convert.toUnsignedLong(transaction.getSenderId()));
myTransaction.put("receivedAmountNQT", transaction.getAmountNQT());
myTransaction.put("feeNQT", transaction.getFeeNQT());
myTransaction.put("numberOfConfirmations", -1);
myTransaction.put("id", transaction.getStringId());
myTransactions.add(myTransaction);
}
}
SortedSet<JSONObject> myTransactionsSet = new TreeSet<>(myTransactionsComparator);
int blockchainHeight = Nxt.getBlockchain().getLastBlock().getHeight();
try (DbIterator<? extends Block> blockIterator = Nxt.getBlockchain().getBlocks(account, 0)) {
while (blockIterator.hasNext()) {
Block block = blockIterator.next();
if (block.getTotalFeeNQT() > 0) {
JSONObject myTransaction = new JSONObject();
myTransaction.put("index", "block" + Users.getIndex(block));
myTransaction.put("blockTimestamp", block.getTimestamp());
myTransaction.put("block", block.getStringId());
myTransaction.put("earnedAmountNQT", block.getTotalFeeNQT());
myTransaction.put("numberOfConfirmations", blockchainHeight - block.getHeight());
myTransaction.put("id", "-");
myTransaction.put("timestamp", block.getTimestamp());
myTransactionsSet.add(myTransaction);
}
}
}
try (DbIterator<? extends Transaction> transactionIterator = Nxt.getBlockchain().getTransactions(account, (byte) -1, (byte) -1, 0)) {
while (transactionIterator.hasNext()) {
Transaction transaction = transactionIterator.next();
if (transaction.getSenderId().equals(accountId)) {
JSONObject myTransaction = new JSONObject();
myTransaction.put("index", Users.getIndex(transaction));
myTransaction.put("blockTimestamp", transaction.getBlockTimestamp());
myTransaction.put("transactionTimestamp", transaction.getTimestamp());
myTransaction.put("account", Convert.toUnsignedLong(transaction.getRecipientId()));
myTransaction.put("sentAmountNQT", transaction.getAmountNQT());
if (accountId.equals(transaction.getRecipientId())) {
myTransaction.put("receivedAmountNQT", transaction.getAmountNQT());
}
myTransaction.put("feeNQT", transaction.getFeeNQT());
myTransaction.put("numberOfConfirmations", blockchainHeight - transaction.getHeight());
myTransaction.put("id", transaction.getStringId());
myTransaction.put("timestamp", transaction.getTimestamp());
myTransactionsSet.add(myTransaction);
} else if (transaction.getRecipientId().equals(accountId)) {
JSONObject myTransaction = new JSONObject();
myTransaction.put("index", Users.getIndex(transaction));
myTransaction.put("blockTimestamp", transaction.getBlockTimestamp());
myTransaction.put("transactionTimestamp", transaction.getTimestamp());
myTransaction.put("account", Convert.toUnsignedLong(transaction.getSenderId()));
myTransaction.put("receivedAmountNQT", transaction.getAmountNQT());
myTransaction.put("feeNQT", transaction.getFeeNQT());
myTransaction.put("numberOfConfirmations", blockchainHeight - transaction.getHeight());
myTransaction.put("id", transaction.getStringId());
myTransaction.put("timestamp", transaction.getTimestamp());
myTransactionsSet.add(myTransaction);
}
}
}
Iterator<JSONObject> iterator = myTransactionsSet.iterator();
while (myTransactions.size() < 1000 && iterator.hasNext()) {
myTransactions.add(iterator.next());
}
if (myTransactions.size() > 0) {
JSONObject response2 = new JSONObject();
response2.put("response", "processNewData");
response2.put("addedMyTransactions", myTransactions);
user.enqueue(response2);
}
}
return response;
*/
}
module.exports = UnlockAccount;
|
brtjkzl/games-tracker | app/client/state/collection/index.test.js | <gh_stars>1-10
import { Status } from "../../constants";
import {
resolveGameUpdate,
resolveGames,
resolveGamesBulkUpdate
} from "./actionCreators";
import reducer, { initialState } from ".";
describe("Collection", () => {
test("GAMES_RESOLVED", () => {
const action = resolveGames([{ name: "Foo" }]);
const returnedState = reducer(initialState, action);
expect(returnedState.games).toEqual([{ name: "Foo" }]);
});
test("GAME_UPDATE_RESOLVED", () => {
const action = resolveGameUpdate({
id: "foo",
status: Status.COMPLETED
});
const returnedState = reducer(
{
...initialState,
games: [
{
id: "foo",
status: Status.BACKLOG
}
]
},
action
);
expect(returnedState.games).toContainEqual({
id: "foo",
status: Status.COMPLETED
});
});
test("GAMES_BULK_UPDATE_RESOLVED", () => {
const action = resolveGamesBulkUpdate([{ id: "foo", rating: 8 }]);
const returnedState = reducer(
{
...initialState,
games: [
{
id: "foo",
status: Status.BACKLOG
}
]
},
action
);
expect(returnedState.games).toContainEqual({
id: "foo",
status: Status.BACKLOG,
rating: 8
});
});
});
|
CSSSR/csssr.blog | components/Post/Body/utils/compilerMdx.js | import { Link } from '@csssr/core-design'
import { compiler } from 'markdown-to-jsx'
import { Fragment } from 'react'
import Subtitle from '../../Subtitle'
import Blockquote from '../Blockquote'
import Caption from '../Caption'
import Heading from '../Heading'
import Img from '../Img'
import List from '../List'
import ListItem from '../ListItem'
import Note from '../Note'
import ParagraphWithImage from '../ParagraphWithImage'
import Quote from '../Quote'
import Separator from '../Separator'
import Table from '../Table'
import Text from '../Text'
import VerticalGap from '../VerticalGap'
import Video from '../Video'
export const compilerMdx = ({ content, images }) =>
compiler(content, {
wrapper: Fragment,
forceWrapper: true,
overrides: {
h1: {
component: Heading,
props: {
type: 'regular',
size: 'l',
},
},
h2: {
component: Heading.H2,
props: {
type: 'regular',
size: 'm',
},
},
h3: {
component: Heading.H3,
props: {
type: 'regular',
size: 's',
},
},
h4: {
component: Heading.H4,
props: {
type: 'regular',
size: 's',
},
},
h5: {
component: Heading.H5,
props: {
type: 'regular',
size: 's',
},
},
h6: {
component: Heading.H6,
props: {
type: 'regular',
size: 's',
},
},
hr: {
component: VerticalGap,
},
p: {
// https://github.com/probablyup/markdown-to-jsx/issues/209
component: function ParagraphWrapper(props) {
return props.children.some((child) => child && child.type && child.type === Img) ? (
<>{props.children}</>
) : (
<Text {...props} />
)
},
props: {
type: 'regular',
size: 'm',
},
},
a: {
component: function LinkWrapper(props) {
const isExternal = !props.href.startsWith('/')
if (props.children.some((child) => child && child.componentID === Img.componentID)) {
return isExternal ? (
// eslint-disable-next-line jsx-a11y/anchor-has-content
<a {...props} target="_blank" rel="noopener noreferrer" />
) : (
// eslint-disable-next-line jsx-a11y/anchor-has-content
<a {...props} />
)
}
// eslint-disable-next-line jsx-a11y/anchor-has-content
return isExternal ? <Link {...props} external /> : <Link {...props} />
},
props: {
'data-testid': 'Post:link',
},
},
blockquote: {
component: Blockquote,
},
ul: {
component: List,
},
ol: {
component: List,
props: {
isOrdered: true,
},
},
li: {
component: ListItem,
},
Quote: {
component: Quote,
props: {
className: 'quote-wrapper',
testId: 'Post:link',
},
},
code: {
props: {
className: 'line-numbers',
},
},
Img: {
component: function ImgWrapper({ imageName, ...rest }) {
return <Img className="picture" sources={images[imageName]} {...rest} />
},
},
img: {
component: Img,
props: {
className: 'picture',
withOutProcessing: true,
},
},
ParagraphWithImage: {
component: ParagraphWithImage,
},
Separator: {
component: function SeparatorWrapper({ imageName, ...rest }) {
return <Separator imageName={imageName} sources={images[imageName]} {...rest} />
},
},
Note: {
component: Note,
props: {
className: 'note',
},
},
Subtitle: {
component: Subtitle,
},
Video: {
component: Video,
},
Caption: {
component: Caption,
},
Table: {
component: Table,
},
},
})
|
ninnghazad/moGL | example/ModelLoader.hh | <gh_stars>10-100
#ifndef MODELLOADER_HH
#define MODELLOADER_HH
#include <fstream>
#include <string>
#include <map>
#include "Model.hh"
class ModelLoader
{
public:
ModelLoader();
~ModelLoader();
public:
Model* load(std::string filename);
private:
Model* loadOBJ(std::ifstream& src);
private:
using MLF = Model* (ModelLoader::*)(std::ifstream&);
std::map<std::string, MLF> _parsers;
};
#endif // MODELLOADER_HH
|
npocmaka/Windows-Server-2003 | multimedia/directx/gamectrl/default/resource.h | <reponame>npocmaka/Windows-Server-2003
//{{NO_DEPENDENCIES}}
// Microsoft Developer Studio generated include file.
// Used by cplsvr1.rc
//
#define IDS_SHEETCAPTION 1
#define IDS_ERROR 2
#define IDS_NOFORCEFEEDBACK 3
#define IDS_ACQUIRED 4
#define IDS_UNACQUIRED 5
#define IDS_DEFAULT 6
#define IDD_INITIAL 10
#define IDD_XY 11
#define IDD_SLIDER 12
#define IDD_POV 13
//#define IDC_POVLABEL 14
#define IDI_WINFLAG 105
#define IDI_CONFIG 108
#define IDC_STOP 1001
#define IDC_XDATA 1002
#define IDC_YDATA 1003
#define IDC_ZDATA 1004
#define IDC_RXDATA 1005
#define IDC_RYDATA 1006
#define IDC_RZDATA 1007
#define IDC_S0DATA 1008
#define IDC_S1DATA 1009
#define IDC_POV0DATA 1010
#define IDC_POV1DATA 1011
#define IDC_POV2DATA 1012
#define IDC_POV3DATA 1013
#define IDC_BUTTONSDOWN 1014
#define IDC_EFFECTLIST 1018
#define IDC_NUMAXES 1019
#define IDC_NUMBUTTONS 1020
#define IDC_NUMPOVS 1021
#define IDC_AXISLIST 1022
#define IDC_BUTTONLIST 1023
#define IDC_POVLIST 1024
#define IDC_DISPNAME 1025
#define IDC_TYPENAME 1026
#define IDC_CLSID 1027
#define IDC_CALLOUT 1028
#define IDC_DEVSTATUS 1029
// blj: Calibration ID's
#define IDD_CALIBRATE 1032
#define IDD_CALIBRATE1 1033
#define IDC_CALIBRATE 1034
#define IDC_GROUPBOX 1035
#define IDC_JOYLIST1 1036
#define IDC_JOYLIST1_LABEL 1037
// THESE ID's MUST REMAIN UNDESTURBED!
#define IDC_JOYLIST2 1038
#define IDC_JOYLIST3 IDC_JOYLIST2+1
#define IDC_JOYLIST4 IDC_JOYLIST2+2
#define IDC_JOYLIST5 IDC_JOYLIST2+3
#define IDC_JOYLIST6 IDC_JOYLIST2+4
#define IDC_JOYLIST7 IDC_JOYLIST2+5
//******************************************************
#define IDC_JOYLIST2_LABEL 1071
#define IDC_JOYLIST3_LABEL IDC_JOYLIST2_LABEL+1
#define IDC_JOYLIST4_LABEL IDC_JOYLIST2_LABEL+2
#define IDC_JOYLIST5_LABEL IDC_JOYLIST2_LABEL+3
#define IDC_JOYLIST6_LABEL IDC_JOYLIST2_LABEL+4
#define IDC_JOYLIST7_LABEL IDC_JOYLIST2_LABEL+5
#define IDC_JOYPOV_LABEL IDC_JOYLIST2_LABEL+7
// ID's related to items on the IDD_CALIBRATE page
#define IDC_JOYPICKPOV 1101
#define IDC_JOYCALBACK 1102
#define IDC_JOYCALNEXT 1103
#define IDC_JOYCALDONE 1104
#define IDC_JOYCALMSG 1105
#define IDC_JOYPOV 1106
// IDS's related to items on the IDD_CALIBRATE page
#define IDS_JOYCALCAPN 1200
#define IDS_JOYCALBACK 1201
#define IDS_JOYCALNEXT 1202
#define IDS_JOYCALDONE 1203
#define IDS_JOYCALXY_CENTERYOKE 1205
#define IDS_JOYCALXY_CENTERCAR 1206
#define IDS_JOYCALXY_CENTERGAMEPAD 1207
#define IDS_JOYCALXY_CENTER 1208
#define IDS_JOYCALXY_MOVEYOKE 1209
#define IDS_JOYCALXY_MOVECAR 1210
#define IDS_JOYCALXY_MOVEGAMEPAD 1211
#define IDS_JOYCALXY_MOVE 1212
#define IDS_JOYCALXY_CENTERYOKE2 1213
#define IDS_JOYCALXY_CENTERCAR2 1214
#define IDS_JOYCALXY_CENTERGAMEPAD2 1215
#define IDS_JOYCALXY_CENTER2 1216
#ifdef DEADZONE
// String defines!
#define IDS_DEADZONE_TITLE 2000
#define IDS_JOYSTICK_DEADZONE 2001
#define IDS_GAMEPAD_DEADZONE 2002
#define IDS_CAR_DEADZONE 2003
#define IDS_YOKE_DEADZONE 2004
// Spin Control defines!
#define IDC_X_DEADZONE_SPIN 2005
#define IDC_Y_DEADZONE_SPIN 2006
#define IDC_X_SATURATION_SPIN 2007
#define IDC_Y_SATURATION_SPIN 2008
// Edit Controls associated with the
// Spin controls above!
#define IDC_X_DEADZONE 2010
#define IDC_Y_DEADZONE 2011
#define IDC_X_SATURATION 2015
#define IDC_Y_SATURATION 2016
// Static text field defines!
#define IDC_DEADZONE_TITLE 2009
#define IDC_SATURATION_TITLE 2014
#define IDC_X_AXIS_LABEL_DEADZONE 2012
#define IDC_Y_AXIS_LABEL_DEADZONE 2013
#define IDC_X_AXIS_LABEL_SATURATION 2017
#define IDC_Y_AXIS_LABEL_SATURATION 2018
#endif // DEADZONE
// Next default values for new objects
//
#ifdef APSTUDIO_INVOKED
#ifndef APSTUDIO_READONLY_SYMBOLS
#define _APS_NEXT_RESOURCE_VALUE 110
#define _APS_NEXT_COMMAND_VALUE 40001
#define _APS_NEXT_CONTROL_VALUE 1030
#define _APS_NEXT_SYMED_VALUE 101
#endif
#endif
|
work-mohit/Placement-Practice | Love Babber OnGoing Placement/Queue/Reverse Queue using Recursion.cpp | void reverseQ(queue<int>& q){
if(q.empty()){
return;
}
int ele = q.front();
q.pop();
reverseQ(q);
q.push(ele);
}
queue<int> rev(queue<int> q)
{
reverseQ(q);
return q;
}
////////////////////////////////////////////////////////////////
void reverseQ(queue<int>& q){
if(q.empty()){
return;
}
int ele = q.front();
q.pop();
reverseQ(q);
q.push(ele);
}
// tail call optimization
void auxReverseQ(queue<int>& q){
reverseQ(q);
}
queue<int> rev(queue<int> q)
{
auxReverseQ(q);
return q;
} |
JE-Chen/je_old_repo | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GL/ARB/half_float_pixel.py | <reponame>JE-Chen/je_old_repo<filename>OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GL/ARB/half_float_pixel.py<gh_stars>0
'''OpenGL extension ARB.half_float_pixel
This module customises the behaviour of the
OpenGL.raw.GL.ARB.half_float_pixel to provide a more
Python-friendly API
Overview (from the spec)
This extension introduces a new data type for half-precision (16-bit)
floating-point quantities. The floating-point format is very similar
to the IEEE single-precision floating-point standard, except that it
has only 5 exponent bits and 10 mantissa bits. Half-precision floats
are smaller than full precision floats and provide a larger dynamic
range than similarly sized normalized scalar data types.
This extension allows applications to use half-precision floating-
point data when specifying pixel data. It extends the existing image
specification commands to accept the new data type.
Floating-point data is clamped to [0, 1] at various places in the
GL unless clamping is disabled with the ARB_color_buffer_float
extension.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/ARB/half_float_pixel.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.ARB.half_float_pixel import *
from OpenGL.raw.GL.ARB.half_float_pixel import _EXTENSION_NAME
def glInitHalfFloatPixelARB():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION |
HedgehogCode/javacpp-presets | llvm/samples/llvm/EmitBitcode.java | <filename>llvm/samples/llvm/EmitBitcode.java
/*
* Copyright (C) 2021 <NAME>
*
* Licensed either under the Apache License, Version 2.0, or (at your option)
* under the terms of the GNU General Public License as published by
* the Free Software Foundation (subject to the "Classpath" exception),
* either version 2, or any later version (collectively, the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.gnu.org/licenses/
* http://www.gnu.org/software/classpath/license.html
*
* or as provided in the LICENSE.txt file that accompanied this code.
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.bytedeco.javacpp.BytePointer;
import org.bytedeco.javacpp.Pointer;
import org.bytedeco.javacpp.PointerPointer;
import org.bytedeco.llvm.LLVM.LLVMBasicBlockRef;
import org.bytedeco.llvm.LLVM.LLVMBuilderRef;
import org.bytedeco.llvm.LLVM.LLVMContextRef;
import org.bytedeco.llvm.LLVM.LLVMExecutionEngineRef;
import org.bytedeco.llvm.LLVM.LLVMGenericValueRef;
import org.bytedeco.llvm.LLVM.LLVMMemoryBufferRef;
import org.bytedeco.llvm.LLVM.LLVMModuleRef;
import org.bytedeco.llvm.LLVM.LLVMTargetMachineRef;
import org.bytedeco.llvm.LLVM.LLVMTargetRef;
import org.bytedeco.llvm.LLVM.LLVMTypeRef;
import org.bytedeco.llvm.LLVM.LLVMValueRef;
import static org.bytedeco.llvm.global.LLVM.*;
/**
* This file contains two samples related to emitting LLVM bitcode and interpreting it.
* <p>
* - EmitBitcodeAndRelocatableObject
* - EvaluateBitcode
* <p>
* The EvaluateBitcode sample depends on EmitBitcodeAndRelocatableObject
* <p>
* The samples should be ran in declaration order, meaning EmitBitcodeAndRelocatableObject
* should run before EvaluateBitcode.
*/
public class EmitBitcode {
public static final BytePointer error = new BytePointer();
/**
* Sample for generating both LLVM bitcode and relocatable object file from an LLVM module
* <p>
* The generated module (and objec file) will have a single sum function, which returns
* the sum of two integers.
* <p>
* declare i32 @sum(i32 %lhs, i32 %rhs)
* <p>
* This sample contains code for the following steps
* <p>
* 1. Initializing required LLVM components
* 2. Generating LLVM IR for a sum function
* 3. Write the LLVM bitcode to a file on disk
* 4. Write the relocatable object file to a file on disk
* 5. Dispose of the allocated resources
*/
public static void EmitBitcodeAndRelocatableObject() {
// Stage 1: Initialize LLVM components
LLVMInitializeCore(LLVMGetGlobalPassRegistry());
LLVMInitializeNativeAsmPrinter();
LLVMInitializeNativeAsmParser();
LLVMInitializeNativeDisassembler();
LLVMInitializeNativeTarget();
// Stage 2: Build the sum function
LLVMContextRef context = LLVMContextCreate();
LLVMModuleRef module = LLVMModuleCreateWithNameInContext("sum", context);
LLVMBuilderRef builder = LLVMCreateBuilderInContext(context);
LLVMTypeRef i32Type = LLVMInt32TypeInContext(context);
PointerPointer<Pointer> sumArgumentTypes = new PointerPointer<>(2)
.put(0, i32Type)
.put(1, i32Type);
LLVMTypeRef sumType = LLVMFunctionType(i32Type, sumArgumentTypes, /* argumentCount */ 2, /* isVariadic */ 0);
LLVMValueRef sum = LLVMAddFunction(module, "sum", sumType);
LLVMSetFunctionCallConv(sum, LLVMCCallConv);
LLVMValueRef lhs = LLVMGetParam(sum, 0);
LLVMValueRef rhs = LLVMGetParam(sum, 1);
LLVMBasicBlockRef entry = LLVMAppendBasicBlockInContext(context, sum, "entry");
LLVMPositionBuilderAtEnd(builder, entry);
LLVMValueRef result = LLVMBuildAdd(builder, lhs, rhs, "result = lhs + rhs");
LLVMBuildRet(builder, result);
LLVMDumpModule(module);
if (LLVMVerifyModule(module, LLVMPrintMessageAction, error) != 0) {
System.out.println("Failed to validate module: " + error.getString());
return;
}
// Stage 3: Dump the module to file
if (LLVMWriteBitcodeToFile(module, "./sum.bc") != 0) {
System.err.println("Failed to write bitcode to file");
return;
}
// Stage 4: Create the relocatable object file
BytePointer triple = LLVMGetDefaultTargetTriple();
LLVMTargetRef target = new LLVMTargetRef();
if (LLVMGetTargetFromTriple(triple, target, error) != 0) {
System.out.println("Failed to get target from triple: " + error.getString());
LLVMDisposeMessage(error);
return;
}
String cpu = "generic";
String cpuFeatures = "";
int optimizationLevel = 0;
LLVMTargetMachineRef tm = LLVMCreateTargetMachine(
target, triple.getString(), cpu, cpuFeatures, optimizationLevel,
LLVMRelocDefault, LLVMCodeModelDefault
);
BytePointer outputFile = new BytePointer("./sum.o");
if (LLVMTargetMachineEmitToFile(tm, module, outputFile, LLVMObjectFile, error) != 0) {
System.err.println("Failed to emit relocatable object file: " + error.getString());
LLVMDisposeMessage(error);
return;
}
// Stage 5: Dispose of allocated resources
outputFile.deallocate();
LLVMDisposeMessage(triple);
LLVMDisposeBuilder(builder);
LLVMDisposeModule(module);
LLVMContextDispose(context);
}
/**
* Sample code for importing a LLVM bitcode file and running a function
* inside of the imported module
* <p>
* This sample depends on EmitBitcode to produce the bitcode file. Make sure
* you've ran the EmitBitcode sample and have the 'sum.bc' bitcode file.
* <p>
* This sample contains code for the following steps:
* <p>
* 1. Initializing required LLVM components
* 2. Load and parse the bitcode
* 3. Run the 'sum' function inside the module
* 4. Dispose of the allocated resources
*/
public static void EvaluateBitcode() {
// Stage 1: Initialize LLVM components
LLVMInitializeCore(LLVMGetGlobalPassRegistry());
LLVMInitializeNativeAsmPrinter();
LLVMInitializeNativeAsmParser();
LLVMInitializeNativeTarget();
// Stage 2: Load and parse bitcode
LLVMContextRef context = LLVMContextCreate();
LLVMTypeRef i32Type = LLVMInt32TypeInContext(context);
LLVMModuleRef module = new LLVMModuleRef();
LLVMMemoryBufferRef membuf = new LLVMMemoryBufferRef();
BytePointer inputFile = new BytePointer("./sum.bc");
if (LLVMCreateMemoryBufferWithContentsOfFile(inputFile, membuf, error) != 0) {
System.err.println("Failed to read file into memory buffer: " + error.getString());
LLVMDisposeMessage(error);
return;
}
if (LLVMParseBitcodeInContext2(context, membuf, module) != 0) {
System.err.println("Failed to parser module from bitcode");
return;
}
LLVMExecutionEngineRef engine = new LLVMExecutionEngineRef();
if (LLVMCreateInterpreterForModule(engine, module, error) != 0) {
System.err.println("Failed to create LLVM interpreter: " + error.getString());
LLVMDisposeMessage(error);
return;
}
LLVMValueRef sum = LLVMGetNamedFunction(module, "sum");
PointerPointer<Pointer> arguments = new PointerPointer<>(2)
.put(0, LLVMCreateGenericValueOfInt(i32Type, 42, /* signExtend */ 0))
.put(1, LLVMCreateGenericValueOfInt(i32Type, 30, /* signExtend */ 0));
LLVMGenericValueRef result = LLVMRunFunction(engine, sum, 2, arguments);
System.out.println();
System.out.print("The result of add(42, 30) imported from bitcode and executed with LLVM interpreter is: ");
System.out.println(LLVMGenericValueToInt(result, /* signExtend */ 0));
// Stage 4: Dispose of the allocated resources
LLVMDisposeModule(module);
LLVMContextDispose(context);
}
public static void main(String[] args) {
if (args.length == 1) switch (args[0]) {
case "-emit":
EmitBitcodeAndRelocatableObject();
System.exit(0);
case "-evaluate":
EvaluateBitcode();
System.exit(0);
default:
// Display help
}
System.err.println("Pass `-emit` or `-evaluate`.");
System.exit(1);
}
}
|
aHenryJard/jetty.project | jetty-util/src/main/java/org/eclipse/jetty/util/annotation/ManagedOperation.java | //
// ========================================================================
// Copyright (c) 1995-2020 Mort Bay Consulting Pty Ltd and others.
//
// This program and the accompanying materials are made available under
// the terms of the Eclipse Public License 2.0 which is available at
// https://www.eclipse.org/legal/epl-2.0
//
// This Source Code may also be made available under the following
// Secondary Licenses when the conditions for such availability set
// forth in the Eclipse Public License, v. 2.0 are satisfied:
// the Apache License v2.0 which is available at
// https://www.apache.org/licenses/LICENSE-2.0
//
// SPDX-License-Identifier: EPL-2.0 OR Apache-2.0
// ========================================================================
//
package org.eclipse.jetty.util.annotation;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* The <code>@ManagedOperation</code> annotation is used to indicate that a given method
* should be considered a JMX operation.
*/
@Retention(RetentionPolicy.RUNTIME)
@Documented
@Target({ElementType.METHOD})
public @interface ManagedOperation
{
/**
* Description of the Managed Object
*
* @return value
*/
String value() default "Not Specified";
/**
* The impact of an operation.
*
* NOTE: Valid values are UNKNOWN, ACTION, INFO, ACTION_INFO
*
* NOTE: applies to METHOD
*
* @return String representing the impact of the operation
*/
String impact() default "UNKNOWN";
/**
* Does the managed field exist on a proxy object?
*
* @return true if a proxy object is involved
*/
boolean proxied() default false;
}
|
hrjaco/mcrouter | mcrouter/standalone_options.h | <reponame>hrjaco/mcrouter<gh_stars>1-10
/**
* Copyright (c) 2014, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
#pragma once
#include <sys/resource.h>
#include "mcrouter/lib/fbi/debug.h"
#include "mcrouter/options.h"
namespace facebook { namespace memcache { namespace mcrouter {
/*
** Observation of mcrouter in production indicates that
** that delta(RSS) / delta(outstanding reqs) is just under
** 3K.
*/
#define OUTSTANDING_REQ_BYTES (3 * 1024)
#define DEFAULT_MAX_GLOBAL_OUTSTANDING_REQS \
(uint32_t)((1024 * 1024 * 200) / OUTSTANDING_REQ_BYTES)
#define DEFAULT_MAX_CLIENT_OUTSTANDING_REQS \
(uint32_t)((1024 * 1024 * 100) / OUTSTANDING_REQ_BYTES)
#define DEFAULT_FDLIMIT 65535
#define OPTIONS_FILE "mcrouter/standalone_options_list.h"
#define OPTIONS_NAME McrouterStandaloneOptions
#include "mcrouter/options-template.h"
#undef OPTIONS_FILE
#undef OPTIONS_NAME
namespace options {
McrouterStandaloneOptions substituteTemplates(McrouterStandaloneOptions opts);
} // facebook::memcache::mcrouter::options
}}} // facebook::memcache::mcrouter
|
reels-research/iOS-Private-Frameworks | LoggingSupport.framework/_OSLogEventStoreMetadata.h | /* Generated by RuntimeBrowser
Image: /System/Library/PrivateFrameworks/LoggingSupport.framework/LoggingSupport
*/
@interface _OSLogEventStoreMetadata : NSObject {
NSUUID * _archiveUUID;
_OSLogEventStoreTimeRef * _end;
_OSLogEventStoreTimeRef * _oldestHighVolume;
_OSLogEventStoreTimeRef * _oldestLive;
_OSLogEventStoreTimeRef * _oldestPersist;
_OSLogEventStoreTimeRef * _oldestSignpost;
_OSLogEventStoreTimeRef * _oldestSpecial;
NSUUID * _sourceUUID;
_OSLogEventStoreTimeRef * _ttls;
}
@property (nonatomic, readonly) NSUUID *archiveUUID;
@property (nonatomic, readonly) _OSLogEventStoreTimeRef *end;
@property (nonatomic, readonly) _OSLogEventStoreTimeRef *oldestHighVolume;
@property (nonatomic, readonly) _OSLogEventStoreTimeRef *oldestLive;
@property (nonatomic, readonly) _OSLogEventStoreTimeRef *oldestPersist;
@property (nonatomic, readonly) _OSLogEventStoreTimeRef *oldestSignpost;
@property (nonatomic, readonly) _OSLogEventStoreTimeRef *oldestSpecial;
@property (nonatomic, readonly) NSUUID *sourceUUID;
- (void).cxx_destruct;
- (id)archiveUUID;
- (id)end;
- (id)initWithCollection:(id)arg1 localStorePlist:(id)arg2 liveDataDescriptor:(int)arg3;
- (id)initWithDictionary:(id)arg1;
- (id)oldestHighVolume;
- (id)oldestLive;
- (id)oldestPersist;
- (id)oldestSignpost;
- (id)oldestSpecial;
- (id)sourceUUID;
- (id)timeRefForTTLClass:(unsigned char)arg1;
@end
|
gclm/RESTKit | src/main/java/io/github/newhoo/restkit/toolwindow/action/toolbar/EnableLibraryAction.java | package io.github.newhoo.restkit.toolwindow.action.toolbar;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.openapi.project.Project;
import io.github.newhoo.restkit.config.CommonSettingComponent;
import io.github.newhoo.restkit.toolwindow.RestServiceToolWindow;
import io.github.newhoo.restkit.toolwindow.RestToolWindowFactory;
import org.jetbrains.annotations.NotNull;
/**
* EnableLibraryAction
*/
public class EnableLibraryAction extends ToggleAction {
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
Project project = e.getProject();
if (project == null) {
return false;
}
return CommonSettingComponent.getInstance(project).getState().isScanServiceWithLib();
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
Project project = e.getProject();
if (project == null) {
return;
}
CommonSettingComponent.getInstance(project).getState().setScanServiceWithLib(state);
RestToolWindowFactory.getRestServiceToolWindow(project, RestServiceToolWindow::scheduleUpdateTree);
}
}
|
StephanErb/pants | tests/python/pants_test/scm/test_git.py | <gh_stars>0
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import os
import unittest
from builtins import open
from contextlib import contextmanager
from textwrap import dedent
from unittest import skipIf
from pants.scm.git import Git
from pants.scm.scm import Scm
from pants.util.contextutil import environment_as, pushd, temporary_dir
from pants.util.dirutil import chmod_plus_x, safe_mkdir, safe_mkdtemp, safe_open, safe_rmtree, touch
from pants.util.process_handler import subprocess
from pants_test.testutils.git_util import MIN_REQUIRED_GIT_VERSION, git_version
@skipIf(git_version() < MIN_REQUIRED_GIT_VERSION,
'The GitTest requires git >= {}.'.format(MIN_REQUIRED_GIT_VERSION))
class GitTest(unittest.TestCase):
@staticmethod
def init_repo(remote_name, remote):
# TODO (peiyu) clean this up, use `git_util.initialize_repo`.
subprocess.check_call(['git', 'init'])
subprocess.check_call(['git', 'config', 'user.email', '<EMAIL>'])
subprocess.check_call(['git', 'config', 'user.name', '<NAME>'])
subprocess.check_call(['git', 'remote', 'add', remote_name, remote])
def setUp(self):
self.origin = safe_mkdtemp()
with pushd(self.origin):
subprocess.check_call(['git', 'init', '--bare'])
self.gitdir = safe_mkdtemp()
self.worktree = safe_mkdtemp()
self.readme_file = os.path.join(self.worktree, 'README')
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
self.init_repo('depot', self.origin)
touch(self.readme_file)
subprocess.check_call(['git', 'add', 'README'])
safe_mkdir(os.path.join(self.worktree, 'dir'))
with open(os.path.join(self.worktree, 'dir', 'f'), 'w') as f:
f.write("file in subdir")
# Make some symlinks
os.symlink('f', os.path.join(self.worktree, 'dir', 'relative-symlink'))
os.symlink('no-such-file', os.path.join(self.worktree, 'dir', 'relative-nonexistent'))
os.symlink('dir/f', os.path.join(self.worktree, 'dir', 'not-absolute\u2764'))
os.symlink('../README', os.path.join(self.worktree, 'dir', 'relative-dotdot'))
os.symlink('dir', os.path.join(self.worktree, 'link-to-dir'))
os.symlink('README/f', os.path.join(self.worktree, 'not-a-dir'))
os.symlink('loop1', os.path.join(self.worktree, 'loop2'))
os.symlink('loop2', os.path.join(self.worktree, 'loop1'))
subprocess.check_call(['git', 'add', 'README', 'dir', 'loop1', 'loop2',
'link-to-dir', 'not-a-dir'])
subprocess.check_call(['git', 'commit', '-am', 'initial commit with decode -> \x81b'])
self.initial_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
subprocess.check_call(['git', 'tag', 'first'])
subprocess.check_call(['git', 'push', '--tags', 'depot', 'master'])
subprocess.check_call(['git', 'branch', '--set-upstream-to', 'depot/master'])
with safe_open(self.readme_file, 'wb') as readme:
readme.write('Hello World.\u2764'.encode('utf-8'))
subprocess.check_call(['git', 'commit', '-am', 'Update README.'])
self.current_rev = subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
self.clone2 = safe_mkdtemp()
with pushd(self.clone2):
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
with safe_open(os.path.realpath('README'), 'a') as readme:
readme.write('--')
subprocess.check_call(['git', 'commit', '-am', 'Update README 2.'])
subprocess.check_call(['git', 'push', '--tags', 'origin', 'master'])
self.git = Git(gitdir=self.gitdir, worktree=self.worktree)
@contextmanager
def mkremote(self, remote_name):
with temporary_dir() as remote_uri:
subprocess.check_call(['git', 'remote', 'add', remote_name, remote_uri])
try:
yield remote_uri
finally:
subprocess.check_call(['git', 'remote', 'remove', remote_name])
def tearDown(self):
safe_rmtree(self.origin)
safe_rmtree(self.gitdir)
safe_rmtree(self.worktree)
safe_rmtree(self.clone2)
def test_listdir(self):
reader = self.git.repo_reader(self.initial_rev)
for dirname in '.', './.':
results = reader.listdir(dirname)
self.assertEqual([b'README',
b'dir',
b'link-to-dir',
b'loop1',
b'loop2',
b'not-a-dir'],
sorted(results))
for dirname in 'dir', './dir':
results = reader.listdir(dirname)
self.assertEqual([b'f',
'not-absolute\u2764'.encode('utf-8'),
b'relative-dotdot',
b'relative-nonexistent',
b'relative-symlink'],
sorted(results))
results = reader.listdir('link-to-dir')
self.assertEqual([b'f',
'not-absolute\u2764'.encode('utf-8'),
b'relative-dotdot',
b'relative-nonexistent',
b'relative-symlink'],
sorted(results))
with self.assertRaises(reader.MissingFileException):
with reader.listdir('bogus'):
pass
def test_lstat(self):
reader = self.git.repo_reader(self.initial_rev)
def lstat(*components):
return type(reader.lstat(os.path.join(*components)))
self.assertEqual(reader.Symlink, lstat('dir', 'relative-symlink'))
self.assertEqual(reader.Symlink, lstat('not-a-dir'))
self.assertEqual(reader.File, lstat('README'))
self.assertEqual(reader.Dir, lstat('dir'))
self.assertEqual(type(None), lstat('nope-not-here'))
def test_readlink(self):
reader = self.git.repo_reader(self.initial_rev)
def readlink(*components):
return reader.readlink(os.path.join(*components))
self.assertEqual('dir/f', readlink('dir', 'relative-symlink'))
self.assertEqual(None, readlink('not-a-dir'))
self.assertEqual(None, readlink('README'))
self.assertEqual(None, readlink('dir'))
self.assertEqual(None, readlink('nope-not-here'))
def test_open(self):
reader = self.git.repo_reader(self.initial_rev)
with reader.open('README') as f:
self.assertEqual(b'', f.read())
with reader.open('dir/f') as f:
self.assertEqual(b'file in subdir', f.read())
with self.assertRaises(reader.MissingFileException):
with reader.open('no-such-file') as f:
self.assertEqual(b'', f.read())
with self.assertRaises(reader.MissingFileException):
with reader.open('dir/no-such-file') as f:
pass
with self.assertRaises(reader.IsDirException):
with reader.open('dir') as f:
self.assertEqual(b'', f.read())
current_reader = self.git.repo_reader(self.current_rev)
with current_reader.open('README') as f:
self.assertEqual('Hello World.\u2764'.encode('utf-8'), f.read())
with current_reader.open('link-to-dir/f') as f:
self.assertEqual(b'file in subdir', f.read())
with current_reader.open('dir/relative-symlink') as f:
self.assertEqual(b'file in subdir', f.read())
with self.assertRaises(current_reader.SymlinkLoopException):
with current_reader.open('loop1') as f:
pass
with self.assertRaises(current_reader.MissingFileException):
with current_reader.open('dir/relative-nonexistent') as f:
pass
with self.assertRaises(current_reader.NotADirException):
with current_reader.open('not-a-dir') as f:
pass
with self.assertRaises(current_reader.MissingFileException):
with current_reader.open('dir/not-absolute\u2764') as f:
pass
with self.assertRaises(current_reader.MissingFileException):
with current_reader.open('dir/relative-nonexistent') as f:
pass
with current_reader.open('dir/relative-dotdot') as f:
self.assertEqual('Hello World.\u2764'.encode('utf-8'), f.read())
def test_integration(self):
self.assertEqual(set(), self.git.changed_files())
self.assertEqual({'README'}, self.git.changed_files(from_commit='HEAD^'))
tip_sha = self.git.commit_id
self.assertTrue(tip_sha)
self.assertTrue(tip_sha in self.git.changelog())
merge_base = self.git.merge_base()
self.assertTrue(merge_base)
self.assertTrue(merge_base in self.git.changelog())
with self.assertRaises(Scm.LocalException):
self.git.server_url
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
with self.mkremote('origin') as origin_uri:
# We shouldn't be fooled by remotes with origin in their name.
with self.mkremote('temp_origin'):
origin_url = self.git.server_url
self.assertEqual(origin_url, origin_uri)
self.assertTrue(self.git.tag_name.startswith('first-'), msg='un-annotated tags should be found')
self.assertEqual('master', self.git.branch_name)
def edit_readme():
with open(self.readme_file, 'a') as fp:
fp.write('More data.')
edit_readme()
with open(os.path.join(self.worktree, 'INSTALL'), 'w') as untracked:
untracked.write('make install')
self.assertEqual({'README'}, self.git.changed_files())
self.assertEqual({'README', 'INSTALL'}, self.git.changed_files(include_untracked=True))
# Confirm that files outside of a given relative_to path are ignored
self.assertEqual(set(), self.git.changed_files(relative_to='non-existent'))
self.git.commit('API Changes.')
try:
# These changes should be rejected because our branch point from origin is 1 commit behind
# the changes pushed there in clone 2.
self.git.push()
except Scm.RemoteException:
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
subprocess.check_call(['git', 'reset', '--hard', 'depot/master'])
self.git.refresh()
edit_readme()
self.git.commit('''API '"' " Changes.''')
self.git.push()
# HEAD is merged into master
self.assertEqual(self.git.commit_date(self.git.merge_base()), self.git.commit_date('HEAD'))
self.assertEqual(self.git.commit_date('HEAD'), self.git.commit_date('HEAD'))
self.git.tag('second', message='''Tagged ' " Changes''')
with temporary_dir() as clone:
with pushd(clone):
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
with open(os.path.realpath('README'), 'r') as readme:
self.assertEqual('--More data.', readme.read())
git = Git()
# Check that we can pick up committed and uncommitted changes.
with safe_open(os.path.realpath('CHANGES'), 'w') as changes:
changes.write('none')
subprocess.check_call(['git', 'add', 'CHANGES'])
self.assertEqual({'README', 'CHANGES'}, git.changed_files(from_commit='first'))
self.assertEqual('master', git.branch_name)
self.assertEqual('second', git.tag_name, msg='annotated tags should be found')
def test_detect_worktree(self):
with temporary_dir() as _clone:
with pushd(_clone):
clone = os.path.realpath(_clone)
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
def worktree_relative_to(cwd, expected):
# Given a cwd relative to the worktree, tests that the worktree is detected as 'expected'.
orig_cwd = os.getcwd()
try:
abs_cwd = os.path.join(clone, cwd)
if not os.path.isdir(abs_cwd):
os.mkdir(abs_cwd)
os.chdir(abs_cwd)
actual = Git.detect_worktree()
self.assertEqual(expected, actual)
finally:
os.chdir(orig_cwd)
worktree_relative_to('..', None)
worktree_relative_to('.', clone)
worktree_relative_to('is', clone)
worktree_relative_to('is/a', clone)
worktree_relative_to('is/a/dir', clone)
def test_detect_worktree_no_cwd(self):
with temporary_dir() as _clone:
with pushd(_clone):
clone = os.path.realpath(_clone)
self.init_repo('origin', self.origin)
subprocess.check_call(['git', 'pull', '--tags', 'origin', 'master:master'])
def worktree_relative_to(some_dir, expected):
# Given a directory relative to the worktree, tests that the worktree is detected as 'expected'.
subdir = os.path.join(clone, some_dir)
if not os.path.isdir(subdir):
os.mkdir(subdir)
actual = Git.detect_worktree(subdir=subdir)
self.assertEqual(expected, actual)
worktree_relative_to('..', None)
worktree_relative_to('.', clone)
worktree_relative_to('is', clone)
worktree_relative_to('is/a', clone)
worktree_relative_to('is/a/dir', clone)
@property
def test_changes_in(self):
"""Test finding changes in a diffspecs
To some extent this is just testing functionality of git not pants, since all pants says
is that it will pass the diffspec to git diff-tree, but this should serve to at least document
the functionality we belive works.
"""
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
def commit_contents_to_files(content, *files):
for path in files:
with safe_open(os.path.join(self.worktree, path), 'w') as fp:
fp.write(content)
subprocess.check_call(['git', 'add', '.'])
subprocess.check_call(['git', 'commit', '-m', 'change {}'.format(files)])
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
# We can get changes in HEAD or by SHA
c1 = commit_contents_to_files('1', 'foo')
self.assertEqual({'foo'}, self.git.changes_in('HEAD'))
self.assertEqual({'foo'}, self.git.changes_in(c1))
# Changes in new HEAD, from old-to-new HEAD, in old HEAD, or from old-old-head to new.
commit_contents_to_files('2', 'bar')
self.assertEqual({'bar'}, self.git.changes_in('HEAD'))
self.assertEqual({'bar'}, self.git.changes_in('HEAD^..HEAD'))
self.assertEqual({'foo'}, self.git.changes_in('HEAD^'))
self.assertEqual({'foo'}, self.git.changes_in('HEAD~1'))
self.assertEqual({'foo', 'bar'}, self.git.changes_in('HEAD^^..HEAD'))
# New commit doesn't change results-by-sha
self.assertEqual({'foo'}, self.git.changes_in(c1))
# Files changed in multiple diffs within a range
c3 = commit_contents_to_files('3', 'foo')
self.assertEqual({'foo', 'bar'}, self.git.changes_in('{}..{}'.format(c1, c3)))
# Changes in a tag
subprocess.check_call(['git', 'tag', 'v1'])
self.assertEqual({'foo'}, self.git.changes_in('v1'))
# Introduce a new filename
c4 = commit_contents_to_files('4', 'baz')
self.assertEqual({'baz'}, self.git.changes_in('HEAD'))
# Tag-to-sha
self.assertEqual({'baz'}, self.git.changes_in('{}..{}'.format('v1', c4)))
# We can get multiple changes from one ref
commit_contents_to_files('5', 'foo', 'bar')
self.assertEqual({'foo', 'bar'}, self.git.changes_in('HEAD'))
self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('HEAD~4..HEAD'))
self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('{}..HEAD'.format(c1)))
self.assertEqual({'foo', 'bar', 'baz'}, self.git.changes_in('{}..{}'.format(c1, c4)))
def test_changelog_utf8(self):
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
def commit_contents_to_files(message, encoding, content, *files):
for path in files:
with safe_open(os.path.join(self.worktree, path), 'w') as fp:
fp.write(content)
subprocess.check_call(['git', 'add', '.'])
subprocess.check_call(['git', 'config', '--local', '--add', 'i18n.commitencoding',
encoding])
try:
subprocess.check_call(['git', 'commit', '-m', message.encode(encoding)])
finally:
subprocess.check_call(['git', 'config', '--local', '--unset-all', 'i18n.commitencoding'])
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).strip()
# Mix in a non-UTF-8 author to all commits to exercise the corner described here does not
# adversely impact the ability to render the changelog (even if rendering for certain
# characters is incorrect): http://comments.gmane.org/gmane.comp.version-control.git/262685
# NB: This method of override requires we include `user.name` and `user.email` even though we
# only use `user.name` to exercise non-UTF-8. Without `user.email`, it will be unset and
# commits can then fail on machines without a proper hostname setup for git to fall back to
# when concocting a last-ditch `user.email`.
non_utf8_config = dedent("""
[user]
name = <NAME>
email = <EMAIL>
""").encode('iso-8859-1')
with open(os.path.join(self.gitdir, 'config'), 'wb') as fp:
fp.write(non_utf8_config)
# Note the copyright symbol is used as the non-ascii character in the next 3 commits
commit_contents_to_files('START1 © END', 'iso-8859-1', '1', 'foo')
commit_contents_to_files('START2 © END', 'latin1', '1', 'bar')
commit_contents_to_files('START3 © END', 'utf-8', '1', 'baz')
commit_contents_to_files('START4 ~ END', 'us-ascii', '1', 'bip')
# Prove our non-utf-8 encodings were stored in the commit metadata.
log = subprocess.check_output(['git', 'log', '--format=%e'])
self.assertEqual([b'us-ascii', b'latin1', b'iso-8859-1'], [_f for _f in log.strip().splitlines() if _f])
# And show that the git log successfully transcodes all the commits none-the-less to utf-8
changelog = self.git.changelog()
# The ascii commit should combine with the iso-8859-1 author an fail to transcode the
# o-with-stroke character, and so it should be replaced with the utf-8 replacement character
# \uFFF or �.
self.assertIn('<NAME>', changelog)
self.assertIn('<NAME>\uFFFDnnes', changelog)
# For the other 3 commits, each of iso-8859-1, latin1 and utf-8 have an encoding for the
# o-with-stroke character - \u00F8 or ø - so we should find it;
self.assertIn('<NAME>', changelog)
self.assertIn('<NAME>\u00F8nnes', changelog)
self.assertIn('START1 © END', changelog)
self.assertIn('START2 © END', changelog)
self.assertIn('START3 © END', changelog)
self.assertIn('START4 ~ END', changelog)
def test_refresh_with_conflict(self):
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
self.assertEqual(set(), self.git.changed_files())
self.assertEqual({'README'}, self.git.changed_files(from_commit='HEAD^'))
self.assertEqual({'README'}, self.git.changes_in('HEAD'))
# Create a change on this branch that is incompatible with the change to master
with open(self.readme_file, 'w') as readme:
readme.write('Conflict')
subprocess.check_call(['git', 'commit', '-am', 'Conflict'])
self.assertEqual(set(), self.git.changed_files(include_untracked=True, from_commit='HEAD'))
with self.assertRaises(Scm.LocalException):
self.git.refresh(leave_clean=False)
# The repo is dirty
self.assertEqual({'README'},
self.git.changed_files(include_untracked=True, from_commit='HEAD'))
with environment_as(GIT_DIR=self.gitdir, GIT_WORK_TREE=self.worktree):
subprocess.check_call(['git', 'reset', '--hard', 'HEAD'])
# Now try with leave_clean
with self.assertRaises(Scm.LocalException):
self.git.refresh(leave_clean=True)
# The repo is clean
self.assertEqual(set(), self.git.changed_files(include_untracked=True, from_commit='HEAD'))
def test_commit_with_new_untracked_file_adds_file(self):
new_file = os.path.join(self.worktree, 'untracked_file')
touch(new_file)
self.assertEqual({'untracked_file'}, self.git.changed_files(include_untracked=True))
self.git.add(new_file)
self.assertEqual({'untracked_file'}, self.git.changed_files())
self.git.commit('API Changes.')
self.assertEqual(set(), self.git.changed_files(include_untracked=True))
class DetectWorktreeFakeGitTest(unittest.TestCase):
@contextmanager
def empty_path(self):
with temporary_dir() as path:
with environment_as(PATH=path):
yield path
@contextmanager
def unexecutable_git(self):
with self.empty_path() as path:
git = os.path.join(path, 'git')
touch(git)
yield git
@contextmanager
def executable_git(self):
with self.unexecutable_git() as git:
chmod_plus_x(git)
yield git
def test_detect_worktree_no_git(self):
with self.empty_path():
self.assertIsNone(Git.detect_worktree())
def test_detect_worktree_unexectuable_git(self):
with self.unexecutable_git() as git:
self.assertIsNone(Git.detect_worktree())
self.assertIsNone(Git.detect_worktree(binary=git))
def test_detect_worktree_invalid_executable_git(self):
with self.executable_git() as git:
self.assertIsNone(Git.detect_worktree())
self.assertIsNone(Git.detect_worktree(binary=git))
def test_detect_worktree_failing_git(self):
with self.executable_git() as git:
with open(git, 'w') as fp:
fp.write('#!/bin/sh\n')
fp.write('exit 1')
self.assertIsNone(Git.detect_worktree())
self.assertIsNone(Git.detect_worktree(git))
def test_detect_worktree_working_git(self):
expected_worktree_dir = '/a/fake/worktree/dir'
with self.executable_git() as git:
with open(git, 'w') as fp:
fp.write('#!/bin/sh\n')
fp.write('echo ' + expected_worktree_dir)
self.assertEqual(expected_worktree_dir, Git.detect_worktree())
self.assertEqual(expected_worktree_dir, Git.detect_worktree(binary=git))
|
JinmingHu-MSFT/azure-sdk-for-cpp | sdk/identity/azure-identity/inc/azure/identity/client_certificate_credential.hpp | <gh_stars>1-10
// Copyright (c) Microsoft Corporation. All rights reserved.
// SPDX-License-Identifier: MIT
/**
* @file
* @brief Client Certificate Credential and options.
*/
#pragma once
#include "azure/identity/dll_import_export.hpp"
#include <azure/core/credentials/credentials.hpp>
#include <azure/core/credentials/token_credential_options.hpp>
#include <azure/core/url.hpp>
#include <memory>
#include <string>
namespace Azure { namespace Identity {
namespace _detail {
class TokenCredentialImpl;
} // namespace _detail
/**
* @brief Options for client certificate authentication.
*
*/
struct ClientCertificateCredentialOptions final : public Core::Credentials::TokenCredentialOptions
{
};
/**
* @brief Client Certificate Credential authenticates with the Azure services using a Tenant ID,
* Client ID and a client certificate.
*
*/
class ClientCertificateCredential final : public Core::Credentials::TokenCredential {
private:
std::unique_ptr<_detail::TokenCredentialImpl> m_tokenCredentialImpl;
Core::Url m_requestUrl;
std::string m_requestBody;
std::string m_tokenHeaderEncoded;
std::string m_tokenPayloadStaticPart;
void* m_pkey;
public:
/**
* @brief Constructs a Client Secret Credential.
*
* @param tenantId Tenant ID.
* @param clientId Client ID.
* @param clientCertificatePath Client certificate path.
* @param options Options for token retrieval.
*/
explicit ClientCertificateCredential(
std::string const& tenantId,
std::string const& clientId,
std::string const& clientCertificatePath,
Core::Credentials::TokenCredentialOptions const& options
= Core::Credentials::TokenCredentialOptions());
/**
* @brief Constructs a Client Secret Credential.
*
* @param tenantId Tenant ID.
* @param clientId Client ID.
* @param clientCertificatePath Client certificate path.
* @param options Options for token retrieval.
*/
explicit ClientCertificateCredential(
std::string const& tenantId,
std::string const& clientId,
std::string const& clientCertificatePath,
ClientCertificateCredentialOptions const& options);
/**
* @brief Destructs `%ClientCertificateCredential`.
*
*/
~ClientCertificateCredential() override;
/**
* @brief Gets an authentication token.
*
* @param tokenRequestContext A context to get the token in.
* @param context A context to control the request lifetime.
*
* @throw Azure::Core::Credentials::AuthenticationException Authentication error occurred.
*/
Core::Credentials::AccessToken GetToken(
Core::Credentials::TokenRequestContext const& tokenRequestContext,
Core::Context const& context) const override;
};
}} // namespace Azure::Identity
|
onezens/QQTweak | qqtw/qqheaders7.2/QQCustomTipsPbProxy.h | <reponame>onezens/QQTweak<gh_stars>1-10
//
// Generated by class-dump 3.5 (64 bit).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>.
//
#import "NSObject.h"
#import "IPacketSendResultHandler.h"
@class NSMutableArray, NSString;
@interface QQCustomTipsPbProxy : NSObject <IPacketSendResultHandler>
{
NSString *_readConfirmLicense;
NSMutableArray *_pushSeqs;
}
+ (id)sharedInstance;
- (void)dealloc;
- (_Bool)didRecievedCustomTipsRsp:(char *)arg1 bufferLen:(unsigned int)arg2 seq:(unsigned int)arg3 readConfirmParams:(id)arg4;
- (_Bool)didRecievedDiscussGroupCustomTipsRsp:(char *)arg1 bufferLen:(unsigned int)arg2 seq:(unsigned int)arg3 readConfirmParams:(id)arg4;
- (id)getReadConfirmLicense;
- (id)init;
- (void)notifyMsgSentResult:(id)arg1 result:(int)arg2;
- (void)readedConfirmReport:(id)arg1 seq:(unsigned int)arg2;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
|
cybersamx/long-poll | di/pkg/models.go | package pkg
type City struct {
ID int `json:"id"`
Name string `json:"name"`
}
func NewCity(id int, name string) *City {
return &City{
ID: id,
Name: name,
}
}
|
christabor/etude | 11-15-2013/void.js | <reponame>christabor/etude
var canvas;
var height = $(window).height();
var width = $(window).width();
var animation_opts;
$('canvas').attr({
'width': width,
'height': height
});
canvas = new fabric.Canvas('void');
// performance tweak
canvas.renderOnAddRemove = false;
// get visible window for canvas size
animation_opts = {
duration: 20,
onChange: canvas.renderAll.bind(canvas),
easing: fabric.util.ease.easeOut
};
function addShape(opts) {
var circle = new fabric.Circle({
radius: opts.radius,
left: width / 2,
top: height / 2,
selectable: false
});
circle.setGradient('fill', {
x1: 0,
y1: rando(circle.height),
x2: rando(circle.width),
y2: rando(circle.width),
colorStops: {
0: randomColor(255),
1: randomColor(255)
}
});
canvas.add(circle);
return;
}
function wiggleCircles() {
$(canvas._objects).each(function(k, object){
if(object.radius >= width) {
object.remove();
addShape({
radius: 10
});
} else {
object.animate('radius', '+=10', animation_opts);
}
});
canvas.renderAll();
return;
}
function recursiveShapes(num, times) {
if(times !== 0 && num > 0) {
num = Math.floor(num/2);
addShape({
left: num,
top: num,
radius: num
});
return recursiveShapes(num - 10, times - 1);
} else {
return;
}
}
recursiveShapes(width, 10);
setInterval(wiggleCircles, 10);
|
dmj/jing-trang | mod/convert-to-dtd/src/main/com/thaiopensource/relaxng/output/dtd/NamespaceManager.java | <reponame>dmj/jing-trang<gh_stars>100-1000
package com.thaiopensource.relaxng.output.dtd;
import com.thaiopensource.relaxng.edit.NameClass;
import com.thaiopensource.relaxng.edit.NameNameClass;
import com.thaiopensource.xml.util.WellKnownNamespaces;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
class NamespaceManager {
// map namespace URIs to non-empty prefix
private final Map<String, String> namespaceUriMap = new HashMap<String, String>();
private String defaultNamespaceUri = null;
private final Set<String> usedPrefixes = new HashSet<String>();
private final Set<String> unassignedNamespaceUris = new HashSet<String>();
NamespaceManager() {
usedPrefixes.add("xml");
namespaceUriMap.put(WellKnownNamespaces.XML, "xml");
}
String getPrefixForNamespaceUri(String ns) {
return namespaceUriMap.get(ns);
}
String getDefaultNamespaceUri() {
return defaultNamespaceUri;
}
void assignPrefixes() {
if (defaultNamespaceUri == null)
defaultNamespaceUri = "";
int n = 0;
for (String ns : unassignedNamespaceUris) {
for (; ;) {
++n;
String prefix = "ns" + Integer.toString(n);
if (!usedPrefixes.contains(prefix)) {
namespaceUriMap.put(ns, prefix);
break;
}
}
}
}
void noteName(NameNameClass nc, boolean defaultable) {
String ns = nc.getNamespaceUri();
if (ns.equals("") || ns == NameClass.INHERIT_NS) {
if (defaultable)
defaultNamespaceUri = "";
return;
}
String assignedPrefix = namespaceUriMap.get(ns);
if (assignedPrefix != null)
return;
String prefix = nc.getPrefix();
if (prefix == null) {
if (defaultNamespaceUri == null && defaultable)
defaultNamespaceUri = ns;
unassignedNamespaceUris.add(ns);
}
else {
if (usedPrefixes.contains(prefix))
unassignedNamespaceUris.add(ns);
else {
usedPrefixes.add(prefix);
namespaceUriMap.put(ns, prefix);
unassignedNamespaceUris.remove(ns);
}
}
}
}
|
mdsol/mauth-client-ruby | spec/support/shared_contexts/client.rb | <reponame>mdsol/mauth-client-ruby
shared_context 'client' do
let(:app_uuid) { 'signer' }
let(:request) { TestSignableRequest.new(verb: 'PUT', request_url: '/', body: 'himom') }
let(:v2_only_sign_requests) { false }
let(:v1_only_sign_requests) { false }
let(:v2_only_authenticate) { false }
let(:disable_fallback_to_v1_on_v2_failure) { false }
let(:v1_signed_req) { client.signed_v1(request) }
let(:v2_signed_req) { client.signed_v2(request) }
let(:signing_key) { OpenSSL::PKey::RSA.generate(2048) }
let(:client) do
MAuth::Client.new(
private_key: signing_key,
app_uuid: app_uuid,
v2_only_sign_requests: v2_only_sign_requests,
v2_only_authenticate: v2_only_authenticate,
v1_only_sign_requests: v1_only_sign_requests,
disable_fallback_to_v1_on_v2_failure: disable_fallback_to_v1_on_v2_failure
)
end
require 'mauth/request_and_response'
class TestSignableRequest < MAuth::Request
include MAuth::Signed
attr_accessor :headers
def merge_headers(headers)
self.class.new(@attributes_for_signing).tap{ |r| r.headers = (@headers || {}).merge(headers) }
end
def x_mws_time
headers['X-MWS-Time']
end
def x_mws_authentication
headers['X-MWS-Authentication']
end
def mcc_authentication
headers['MCC-Authentication']
end
def mcc_time
headers['MCC-Time']
end
end
end
|
americanstone/image-crawler | src/main/java/edu/vanderbilt/imagecrawler/platform/JavaImage.java | <reponame>americanstone/image-crawler<gh_stars>0
package edu.vanderbilt.imagecrawler.platform;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import javax.imageio.ImageIO;
import edu.vanderbilt.imagecrawler.transforms.Transform;
import edu.vanderbilt.imagecrawler.utils.Filters;
import kotlin.Unit;
import static edu.vanderbilt.imagecrawler.platform.Cache.Operation.TRANSFORM;
/**
* Stores platform-specific meta-data about an Image and also provides
* methods for common image- and file-related tasks. This
* implementation is specific to the Java platform.
*/
public class JavaImage implements PlatformImage {
/**
* Cache item used to report progress.
*/
private Cache.Item mCacheItem;
/**
* The Bitmap our Image stores.
*/
private BufferedImage mImage;
/**
* Size of image.
*/
private int mSize = 0;
/**
* Package only constructor only accessed by Platform.
*/
JavaImage(InputStream inputStream, Cache.Item item) {
setImage(inputStream, item);
}
/**
* Private constructor only accessed internally by this class.
*/
private JavaImage(BufferedImage image) {
mImage = image;
}
/**
* Decodes a input stream into an @a Image that can be used in the rest
* of the application.
*/
@Override
public void setImage(InputStream inputStream, Cache.Item item) {
try {
mSize = inputStream.available();
mImage = ImageIO.read(inputStream);
mCacheItem = item;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Write the @a image to the @a mOutputStream.
*
* @param outputStream
*/
@Override
public void writeImage(OutputStream outputStream) throws IOException {
if (mImage == null) {
System.out.println("null image");
} else {
ImageIO.write(mImage,
"png",
outputStream);
}
}
/**
* Routes transform request to transform method.
*/
@Override
public PlatformImage applyTransform(Transform.Type type, Cache.Item item) {
// Forward to the platform-specific implementation of this transform.
BufferedImage originalImage = mImage;
BufferedImage filteredImage =
new BufferedImage
(originalImage.getColorModel(),
originalImage.copyData(null),
originalImage.getColorModel().isAlphaPremultiplied(),
null);
int[] pixels = filteredImage.getRGB(
0, 0, filteredImage.getWidth(), filteredImage.getHeight(),
null, 0, filteredImage.getWidth());
int[] lastProgress = new int[1];
switch (type) {
case GRAY_SCALE_TRANSFORM:
Filters.grayScale(pixels, filteredImage.getColorModel().hasAlpha(), progress -> {
lastProgress[0] = updateProgress(mCacheItem, progress, lastProgress[0]);
return Unit.INSTANCE;
});
case TINT_TRANSFORM:
case SEPIA_TRANSFORM:
Filters.sepia(pixels, filteredImage.getColorModel().hasAlpha(), progress -> {
lastProgress[0] = updateProgress(mCacheItem, progress, lastProgress[0]);
return Unit.INSTANCE;
});
break;
default:
return this;
}
filteredImage.setRGB(
0, 0, filteredImage.getWidth(), filteredImage.getHeight(),
pixels, 0, filteredImage.getWidth());
mCacheItem.progress(Cache.Operation.CLOSE, 1f, 0);
return new JavaImage(filteredImage);
}
/**
* @return Size of image.
*/
@Override
public int size() {
return mSize;
}
/**
* Returns the associated cache item.
*/
@Override
public Cache.Item getCacheItem() {
return mCacheItem;
}
private int updateProgress(Cache.Item newItem, float progress, int lastProgress) {
int percent = (int) (progress * 100);
if (percent > lastProgress) {
newItem.progress(TRANSFORM, progress, 0);
}
return percent;
}
}
|
atveit/vespa | document/src/tests/testbytebuffer.h | <gh_stars>1-10
// Copyright 2017 Yahoo Holdings. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
/* $Id$*/
#pragma once
#include <cppunit/extensions/HelperMacros.h>
/**
CPPUnit test case for ByteBuffer class.
*/
class ByteBuffer_Test : public CppUnit::TestFixture {
CPPUNIT_TEST_SUITE( ByteBuffer_Test);
CPPUNIT_TEST(test_constructors);
CPPUNIT_TEST(test_copy_constructor);
CPPUNIT_TEST(test_assignment_operator);
CPPUNIT_TEST(test_slice);
CPPUNIT_TEST(test_slice2);
CPPUNIT_TEST(test_putGetFlip);
CPPUNIT_TEST(test_NumberEncodings);
CPPUNIT_TEST(test_NumberLengths);
CPPUNIT_TEST(test_SerializableArray);
CPPUNIT_TEST_SUITE_END();
public:
/**
Initialization.
*/
void setUp() override;
protected:
/**
Test construction and deletion.
*/
void test_constructors();
void test_SerializableArray();
/**
Test copy constructor
*/
void test_copy_constructor();
/**
Test construction and deletion.
*/
void test_assignment_operator();
/**
Test the slice() method
*/
void test_slice();
/**
Test the slice2() method
*/
void test_slice2();
/**
Test put(), get() and flip() methods.
*/
void test_putGetFlip();
/**
Test writing integers with funny encodings.
*/
void test_NumberEncodings();
/**
Tests lengths of those encodings.
*/
void test_NumberLengths();
};
|
Queentaker/uzh | Informatik1/Actual Finals/Finals/1 Warmup/2_wp_1.py | <filename>Informatik1/Actual Finals/Finals/1 Warmup/2_wp_1.py
#works
def calc(expression):
lst = expression.split()
if lst[0] == "+":
return float(lst[1])+float(lst[2])
if lst[0] == "-":
return float(lst[1])-float(lst[2])
if lst[0] == "*":
return float(lst[1])*float(lst[2])
if lst[0] == "/":
if float(lst[2]) == 0:
raise ValueError
else: return float(lst[1]) / float(lst[2])
# DO NOT SUBMIT THE LINES BELOW!
assert calc("+ 1 2") == 3
assert calc("- 1 2") == -1
assert calc("* 1 2") == 2
assert calc("/ 1 2") == 0.5
assert calc("* 1 -2") == -2
assert calc("* 10.5 2") == 21
assert calc("* -10.5 -2") == 21 |
salaboy/activiti-cloud-runtime-bundle-service | activiti-cloud-services-runtime-bundle/activiti-cloud-services-rest-impl/src/test/java/org/activiti/cloud/services/rest/controllers/ProcessInstanceVariableControllerImplIT.java | /*
* Copyright 2017 Alfresco, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.cloud.services.rest.controllers;
import static org.activiti.alfresco.rest.docs.AlfrescoDocumentation.processInstanceIdParameter;
import static org.activiti.alfresco.rest.docs.HALDocumentation.unpagedVariableFields;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.BDDMockito.given;
import static org.mockito.Mockito.verify;
import static org.springframework.restdocs.mockmvc.MockMvcRestDocumentation.document;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.get;
import static org.springframework.restdocs.mockmvc.RestDocumentationRequestBuilders.post;
import static org.springframework.restdocs.request.RequestDocumentation.parameterWithName;
import static org.springframework.restdocs.request.RequestDocumentation.pathParameters;
import static org.springframework.test.web.servlet.result.MockMvcResultHandlers.print;
import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.activiti.api.process.model.builders.ProcessPayloadBuilder;
import org.activiti.api.process.runtime.ProcessRuntime;
import org.activiti.api.runtime.conf.impl.CommonModelAutoConfiguration;
import org.activiti.api.runtime.conf.impl.ProcessModelAutoConfiguration;
import org.activiti.api.runtime.model.impl.VariableInstanceImpl;
import org.activiti.cloud.services.events.ProcessEngineChannels;
import org.activiti.cloud.services.events.configuration.CloudEventsAutoConfiguration;
import org.activiti.cloud.services.events.configuration.RuntimeBundleProperties;
import org.activiti.cloud.services.events.listeners.CloudProcessDeployedProducer;
import org.activiti.cloud.services.rest.conf.ServicesRestAutoConfiguration;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.restdocs.AutoConfigureRestDocs;
import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc;
import org.springframework.boot.test.autoconfigure.web.servlet.WebMvcTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import org.springframework.boot.test.mock.mockito.SpyBean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Import;
import org.springframework.data.web.config.EnableSpringDataWebSupport;
import org.springframework.hateoas.MediaTypes;
import org.springframework.http.MediaType;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.servlet.MockMvc;
@RunWith(SpringRunner.class)
@WebMvcTest(ProcessInstanceVariableControllerImpl.class)
@EnableSpringDataWebSupport
@AutoConfigureMockMvc(secure = false)
@AutoConfigureRestDocs(outputDir = "target/snippets")
@Import({CommonModelAutoConfiguration.class,
ProcessModelAutoConfiguration.class,
RuntimeBundleProperties.class,
CloudEventsAutoConfiguration.class,
ServicesRestAutoConfiguration.class})
@ComponentScan(basePackages = {"org.activiti.cloud.services.rest.assemblers", "org.activiti.cloud.alfresco"})
public class ProcessInstanceVariableControllerImplIT {
private static final String DOCUMENTATION_IDENTIFIER = "process-instance-variables";
private static final String PROCESS_INSTANCE_ID = UUID.randomUUID().toString();
@Autowired
private MockMvc mockMvc;
@MockBean
private ProcessRuntime processRuntime;
@Autowired
private ObjectMapper mapper;
@SpyBean
private ResourcesAssembler resourcesAssembler;
@MockBean
private ProcessEngineChannels processEngineChannels;
@MockBean
private CloudProcessDeployedProducer processDeployedProducer;
@Before
public void setUp() {
//this assertion is not really necessary. It's only here to remove warning
//telling that resourcesAssembler is never used. Even if we are not directly
//using it in the test we need to to declare it as @SpyBean so it get inject
//in the controller
assertThat(resourcesAssembler).isNotNull();
assertThat(processEngineChannels).isNotNull();
assertThat(processDeployedProducer).isNotNull();
}
@Test
public void getVariables() throws Exception {
VariableInstanceImpl<String> name = new VariableInstanceImpl<>("name",
String.class.getName(),
"Paul",
PROCESS_INSTANCE_ID);
VariableInstanceImpl<Integer> age = new VariableInstanceImpl<>("age",
Integer.class.getName(),
12,
PROCESS_INSTANCE_ID);
given(processRuntime.variables(any()))
.willReturn(Arrays.asList(name,
age));
this.mockMvc.perform(get("/v1/process-instances/{processInstanceId}/variables",
1,
1).accept(MediaTypes.HAL_JSON_VALUE))
.andDo(print())
.andExpect(status().isOk())
.andDo(document(DOCUMENTATION_IDENTIFIER + "/list",
processInstanceIdParameter(),
unpagedVariableFields()
));
}
@Test
public void setVariables() throws Exception {
Map<String, Object> variables = new HashMap<>();
variables.put("var1",
"varObj1");
variables.put("var2",
"varObj2");
this.mockMvc.perform(post("/v1/process-instances/{processInstanceId}/variables",
1).contentType(MediaType.APPLICATION_JSON).content(
mapper.writeValueAsString(ProcessPayloadBuilder.setVariables().withProcessInstanceId("1").withVariables(variables).build())))
.andExpect(status().isOk())
.andDo(document(DOCUMENTATION_IDENTIFIER + "/upsert",
pathParameters(parameterWithName("processInstanceId").description("The process instance id"))));
verify(processRuntime).setVariables(any());
}
}
|
tshino/vscode-kb-macro | test/suite/test_util.js | 'use strict';
const assert = require('assert');
const vscode = require("vscode");
const TestUtil = (function() {
const sleep = msec => new Promise(resolve => setTimeout(resolve, msec));
const usedLanguages = new Set();
const setupTextEditor = async function({ content, language = 'plaintext' }) {
const doc = await vscode.workspace.openTextDocument({ content, language });
await vscode.window.showTextDocument(doc);
const textEditor = vscode.window.activeTextEditor;
assert.ok( textEditor );
const eol = vscode.EndOfLine.LF;
await textEditor.edit((edit) => {
edit.setEndOfLine(eol);
});
return textEditor;
};
const resetDocument = async function(textEditor, content, options = {}) {
const {
eol = vscode.EndOfLine.LF,
languageId = 'plaintext'
} = options;
let lineCount = textEditor.document.lineCount;
let entireDocument = new vscode.Range(0, 0, lineCount, 0);
await textEditor.edit((edit) => {
edit.replace(entireDocument, content);
edit.setEndOfLine(eol);
});
await vscode.languages.setTextDocumentLanguage(
textEditor.document,
languageId
);
if (!usedLanguages.has(languageId)) {
usedLanguages.add(languageId);
// We need some sleep here since it seems a newly loaded language
// triggers process of something asynchronous and some commands
// related to the language would not work properly until the process ends.
await sleep(500);
}
};
const selectionsToArray = function(selections) {
return Array.from(selections).map(
s => (
s.anchor.isEqual(s.active) ?
[
s.active.line,
s.active.character
] :
[
s.anchor.line,
s.anchor.character,
s.active.line,
s.active.character
]
)
);
};
const arrayToSelections = function(array) {
return array.map(
a => (
a.length === 2 ?
new vscode.Selection(
a[0], a[1], a[0], a[1]
) :
new vscode.Selection(
a[0], a[1], a[2], a[3]
)
)
);
};
const normalizeEOL = function(text) {
return text.replace(/\r\n/g, '\n');
};
const readClipboard = async function() {
const text = await vscode.env.clipboard.readText();
return normalizeEOL(text);
};
return {
sleep,
setupTextEditor,
resetDocument,
selectionsToArray,
arrayToSelections,
readClipboard
};
})();
module.exports = { TestUtil };
|
YushchenkoAndrew/template | CDump/TestClass/Main.cpp | #include <iostream>
#include "Test.h"
// #include "Test.cpp"
int main()
{
Test t;
return 0;
} |
Suryadevelops/youtrack-mobile | src/components/agile-row/agile-row__column.js | <reponame>Suryadevelops/youtrack-mobile<filename>src/components/agile-row/agile-row__column.js<gh_stars>1-10
/* @flow */
import React from 'react';
import {FlatList, TouchableOpacity} from 'react-native';
import { DropZone } from '../draggable';
import {cardBottomMargin, getAgileCardHeight} from '../agile-card/agile-card';
import {IconAdd} from '../icon/icon';
import styles from './agile-row.styles';
import type {BoardCell} from '../../flow/Agile';
import type {IssueFull} from '../../flow/Issue';
import type {UITheme} from '../../flow/Theme';
type ColumnProps = {
cell: BoardCell,
onTapCreateIssue: Function,
lastColumn: boolean,
renderIssueCard: (issue: IssueFull) => any,
uiTheme: UITheme,
zoomedIn?: boolean
}
export default function AgileRowColumn(props: ColumnProps) {
const {cell, uiTheme, zoomedIn} = props;
const issues: Array<IssueFull> = cell.issues || [];
function renderCard({item}: IssueFull) {
return props.renderIssueCard(item);
}
function getId(issue: IssueFull) {
return issue.id;
}
function getItemLayout(items: ?Array<IssueFull>, index: number) {
const height = getAgileCardHeight();
const offset = (height + cardBottomMargin) * index;
return {
length: height,
offset: offset,
index
};
}
return (
<DropZone
style={[styles.column, props.lastColumn && styles.columnWithoutBorder]}
data={{
columnId: cell.column.id,
cellId: cell.id,
issueIds: issues.map(issue => issue.id)
}}
>
<FlatList
scrollEnabled={false}
data={issues}
keyExtractor={getId}
renderItem={renderCard}
getItemLayout={getItemLayout}
extraData={zoomedIn}
ListFooterComponent={
<TouchableOpacity
onPress={() => props.onTapCreateIssue(cell.column.id, cell.id)}
style={styles.addCardButton}
>
<IconAdd color={uiTheme.colors.$link} size={18}/>
</TouchableOpacity>
}
/>
</DropZone>
);
}
|
phpc0de/idea-android | android/src/org/jetbrains/android/AndroidClassMembersImplicitUsagesProvider.java | package org.jetbrains.android;
import com.intellij.codeInsight.daemon.ImplicitUsageProvider;
import com.intellij.psi.*;
import com.intellij.psi.impl.source.PsiClassReferenceType;
import com.intellij.psi.util.InheritanceUtil;
import com.intellij.psi.util.PsiTreeUtil;
import org.jetbrains.android.dom.converters.OnClickConverter;
import org.jetbrains.android.facet.AndroidFacet;
import org.jetbrains.android.util.AndroidUtils;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.android.SdkConstants.*;
/**
* Finds implicit usages of fields, methods, parameters and constructors; resulting from Android conventions.
*/
public class AndroidClassMembersImplicitUsagesProvider implements ImplicitUsageProvider {
@Override
public boolean isImplicitUsage(@NotNull PsiElement element) {
if (element instanceof PsiField) {
return isImplicitFieldUsage((PsiField)element);
}
else if (element instanceof PsiParameter) {
return isImplicitParameterUsage((PsiParameter)element);
}
else if (element instanceof PsiMethod) {
PsiMethod method = (PsiMethod)element;
if (method.isConstructor()) {
return isImplicitConstructorUsage(method);
} else {
return isImplicitMethodUsage(method);
}
}
return false;
}
private static boolean isImplicitParameterUsage(@NotNull PsiParameter parameter) {
if (AndroidFacet.getInstance(parameter) == null) {
return false;
}
final PsiMethod method = PsiTreeUtil.getParentOfType(parameter, PsiMethod.class);
if (method == null ||
!OnClickConverter.CONVERTER_FOR_LAYOUT.checkSignature(method) &&
!OnClickConverter.CONVERTER_FOR_MENU.checkSignature(method)) {
return false;
}
final PsiClass aClass = PsiTreeUtil.getParentOfType(method, PsiClass.class);
if (aClass == null) {
return false;
}
final PsiClass activityBaseClass = JavaPsiFacade.getInstance(aClass.getProject()).
findClass(AndroidUtils.ACTIVITY_BASE_CLASS_NAME, parameter.getResolveScope());
if (activityBaseClass == null) {
return false;
}
return aClass.isInheritor(activityBaseClass, true);
}
private static boolean isImplicitFieldUsage(@NotNull PsiField field) {
if (!"CREATOR".equals(field.getName())) {
return false;
}
final PsiModifierList modifierList = field.getModifierList();
if (modifierList == null || !modifierList.hasModifierProperty(PsiModifier.STATIC)) {
return false;
}
final PsiClass aClass = field.getContainingClass();
return aClass != null && InheritanceUtil.isInheritor(aClass, "android.os.Parcelable");
}
@Override
public boolean isImplicitRead(@NotNull PsiElement element) {
return false;
}
@Override
public boolean isImplicitWrite(@NotNull PsiElement element) {
if (!(element instanceof PsiField)) {
return false;
}
final AndroidFacet facet = AndroidFacet.getInstance(element);
if (facet == null) {
return false;
}
final PsiField field = (PsiField)element;
final PsiModifierList modifierList = field.getModifierList();
if (modifierList == null) {
return false;
}
for (PsiAnnotation annotation : modifierList.getAnnotations()) {
for (PsiNameValuePair pair : annotation.getParameterList().getAttributes()) {
final PsiAnnotationMemberValue value = pair.getValue();
if (isResourceReference(value)) {
return true;
}
}
}
return false;
}
private static boolean isResourceReference(@Nullable PsiAnnotationMemberValue value) {
if (!(value instanceof PsiReferenceExpression)) {
return false;
}
PsiReferenceExpression exp = (PsiReferenceExpression)value;
String refName = exp.getReferenceName();
if (refName == null || refName.isEmpty()) {
return false;
}
PsiExpression qExp = exp.getQualifierExpression();
if (!(qExp instanceof PsiReferenceExpression)) {
return false;
}
exp = (PsiReferenceExpression)qExp;
refName = exp.getReferenceName();
if (refName == null || refName.isEmpty()) {
return false;
}
qExp = exp.getQualifierExpression();
if (!(qExp instanceof PsiReferenceExpression)) {
return false;
}
exp = (PsiReferenceExpression)qExp;
return AndroidUtils.R_CLASS_NAME.equals(exp.getReferenceName());
}
public boolean isImplicitMethodUsage(PsiMethod method) {
// Methods annotated with lifecycle annotations are not unused
for (PsiAnnotation annotation : method.getModifierList().getAnnotations()) {
String qualifiedName = annotation.getQualifiedName();
if ("android.arch.lifecycle.OnLifecycleEvent".equals(qualifiedName)) {
return true;
}
}
return false;
}
public boolean isImplicitConstructorUsage(PsiMethod method) {
if (!method.isConstructor()) {
return false;
}
if (!method.hasModifierProperty(PsiModifier.PUBLIC)) {
return false;
}
PsiParameterList parameterList = method.getParameterList();
int parameterCount = parameterList.getParametersCount();
if (parameterCount == 0) {
// Some Android classes need default constructors, and are invoked by inflaters
final PsiClass aClass = method.getContainingClass();
if (aClass != null) {
if (InheritanceUtil.isInheritor(aClass, CLASS_FRAGMENT)
|| InheritanceUtil.isInheritor(aClass, CLASS_V4_FRAGMENT.oldName())
|| InheritanceUtil.isInheritor(aClass, CLASS_V4_FRAGMENT.newName())
|| InheritanceUtil.isInheritor(aClass, CLASS_BACKUP_AGENT)) {
// Activity, Service, ContentProvider and BroadcastReceiver should also be treated as having implicit usages,
// but for some reason that's already the case (they are not marked as unused constructors currently;
// perhaps due to the XML DOM bindings?
return true;
}
}
return false;
}
// Look for View constructors; these are of one of these forms:
// View(android.content.Context context)
// View(android.content.Context context, android.util.AttributeSet attrs)
// View(android.content.Context context, android.util.AttributeSet attrs, int defStyle)
// Also check for
// ActionProvider(android.content.Context context)
if (parameterCount < 1 || parameterCount > 3) {
return false;
}
PsiParameter[] parameters = parameterList.getParameters();
PsiType type = parameters[0].getType();
if (!(type instanceof PsiClassReferenceType)) {
return false;
}
PsiClassReferenceType classType = (PsiClassReferenceType)type;
PsiClass resolvedParameter = classType.resolve();
if (resolvedParameter == null || !CLASS_CONTEXT.equals(resolvedParameter.getQualifiedName())) {
return false;
}
if (parameterCount > 1) {
type = parameters[1].getType();
if (!(type instanceof PsiClassReferenceType)) {
return false;
}
classType = (PsiClassReferenceType)type;
resolvedParameter = classType.resolve();
if (resolvedParameter == null || !CLASS_ATTRIBUTE_SET.equals(resolvedParameter.getQualifiedName())) {
return false;
}
if (parameterCount > 2) {
type = parameters[2].getType();
if (!PsiType.INT.equals(type)) {
return false;
}
}
}
final PsiClass aClass = PsiTreeUtil.getParentOfType(method, PsiClass.class);
if (aClass == null) {
return false;
}
PsiClass viewBaseClass = JavaPsiFacade.getInstance(aClass.getProject()).findClass(CLASS_VIEW, method.getResolveScope());
if (viewBaseClass == null) {
return false;
}
return aClass.isInheritor(viewBaseClass, true) || parameterCount == 1 && InheritanceUtil.isInheritor(aClass, CLASS_ACTION_PROVIDER);
}
}
|
andrewapperley/Blips-Client | Blips/Blips/AFBlip/Timeline/AFBlipTimelineCanvasLayout/AFBlipTimelineCanvasLayout.h | <reponame>andrewapperley/Blips-Client
//
// AFBlipTimelineCanvasLayout.h
// Video-A-Day
//
// Created by <NAME> on 12/11/2013.
// Copyright (c) 2013 AFApps. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface AFBlipTimelineCanvasLayout : UICollectionViewFlowLayout
@end
|
andrew-t-james/personal-project | src/actions/auth.js | <gh_stars>0
export const googleSignInAction = user => ({
type: 'GOOGLE_LOGIN',
user
});
export const googleSignOutAction = () => ({
type: 'GOOGLE_LOGOUT'
});
export const githubLoginAction = user => ({
type: 'GITHUB_LOGIN',
user
});
export const twitterLoginAction = user => ({
type: 'TWITTER_LOGIN',
user
});
export const facebookLoginAction = user => ({
type: 'FACEBOOK_LOGIN',
user
}); |
r-souza/Rocket.Chat | app/livechat/server/methods/takeInquiry.js | import { Meteor } from 'meteor/meteor';
import { hasPermission } from '../../../authorization';
import { Users, LivechatInquiry } from '../../../models/server';
import { RoutingManager } from '../lib/RoutingManager';
import { userCanTakeInquiry } from '../lib/Helper';
Meteor.methods({
'livechat:takeInquiry'(inquiryId, options) {
if (!Meteor.userId() || !hasPermission(Meteor.userId(), 'view-l-room')) {
throw new Meteor.Error('error-not-allowed', 'Not allowed', {
method: 'livechat:takeInquiry',
});
}
const inquiry = LivechatInquiry.findOneById(inquiryId);
if (!inquiry || inquiry.status === 'taken') {
throw new Meteor.Error('error-not-allowed', 'Inquiry already taken', {
method: 'livechat:takeInquiry',
});
}
const user = Users.findOneById(Meteor.userId(), {
fields: { _id: 1, username: 1, roles: 1, status: 1, statusLivechat: 1 },
});
if (!userCanTakeInquiry(user)) {
throw new Meteor.Error('error-not-allowed', 'Not allowed', {
method: 'livechat:takeInquiry',
});
}
const agent = {
agentId: user._id,
username: user.username,
};
return RoutingManager.takeInquiry(inquiry, agent, options);
},
});
|
ViewFaceCore/TenniS | include/kernels/cpu/winograd_algorithm.h | //
// Created by yang on 2019/10/21.
//
#ifndef TENSORSTACK_KERNELS_CPU_WINOGRAD_ALGORITHM_H
#define TENSORSTACK_KERNELS_CPU_WINOGRAD_ALGORITHM_H
#include "core/tensor.h"
namespace ts{
namespace cpu{
template <typename T>
class TS_DEBUG_API Conv2dWinogradAlgorithm{
public:
static void winograd_f23_transform_and_pack_kernel(const Tensor& kernel, int in_tile_size, Tensor &kernel_tm);
static void winograd_f23_transform_and_pack_input(const Tensor& x, int tile_count, Tensor &x_tm);
static void winograd_f23_transform_output(const Tensor& out_tm, int tile_count, Tensor& out);
static void winograd_f23(const Tensor &x,
const Padding2D &padding,
float padding_value,
const Tensor &kernel,
Tensor &out,
bool kernel_transformed = true);
static void winograd_f63_transform_and_pack_kernel(const Tensor& kernel, int in_tile_size, Tensor &kernel_tm);
static void winograd_f63_transform_and_pack_input(const Tensor& x, int tile_count, Tensor &x_tm);
static void winograd_f63_transform_output(const Tensor& out_tm, int tile_count, Tensor& out);
static void winograd_f63(const Tensor &x,
const Padding2D &padding,
float padding_value,
const Tensor &kernel,
Tensor &out,
bool kernel_transformed = true);
};
}
}
#endif //TENSORSTACK_KERNELS_CPU_WINOGRAD_ALGORITHM_H
|
muthuArivoli/game-creator | doc/plan/usecases/chooseGoal.java | <gh_stars>0
import java.util.*;
/**
*Specifies the procedural implementation of choosing a new Goal for the game
*/
public class QueenPiece {
/**
* Default constructor
*/
public QueenPiece() {
Game = new GameController;
Parser = new ParserController;
Parser.loadGameFile("GameFile");
Game.addGoal(GoalName);
}
} |
Lejick/sport-portal-spring | src/main/java/org/portal/authentication/LoginService.java | <gh_stars>0
package org.portal.authentication;
import org.portal.back.model.Logins;
import org.portal.back.model.LoginsRepository;
import org.portal.back.model.PinaccRepository;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
@Component
public class LoginService {
@Autowired
LoginsRepository loginsRepository;
public String getPass(String userName) {
Date current = Calendar.getInstance().getTime();
List<Logins> log = loginsRepository.findByLogin(userName);
if (log.size() == 0) {
return null;
}
log.get(0).setLastLogin(current);
log.get(0).setIp(CurrentUser.getIp());
loginsRepository.save(log.get(0));
return log.get(0).getPass_md5();
}
public boolean create(String userName, String email, String passMd5, String promocode) {
List<Logins> logList = loginsRepository.findByLogin(userName);
if (logList.size() > 0) {
return false;
}
Date current = Calendar.getInstance().getTime();
Logins log = new Logins();
log.setLogin(userName);
log.setPass_md5(<PASSWORD>);
log.setEmail(email);
log.setIp(CurrentUser.getIp());
log.setCreateDate(current);
log.setPromocode(promocode);
loginsRepository.save(log);
return true;
}
}
|
brandonkim-1301/TI-RSLK-Maze-Follower | documentation/structbmi160__acc__no__motion__int__cfg.js | var structbmi160__acc__no__motion__int__cfg =
[
[ "no_motion_dur", "structbmi160__acc__no__motion__int__cfg.html#a17957e047316ade93de129cc1c9817b9", null ],
[ "no_motion_sel", "structbmi160__acc__no__motion__int__cfg.html#ae315deb080bbeccaa8cf648f8244e28a", null ],
[ "no_motion_src", "structbmi160__acc__no__motion__int__cfg.html#a112475be3e06473dc5a2a9607a29075f", null ],
[ "no_motion_thres", "structbmi160__acc__no__motion__int__cfg.html#a455846a4b01cab9d517fd2281f64e85e", null ],
[ "no_motion_x", "structbmi160__acc__no__motion__int__cfg.html#ada27c778a4a4c986fe693dd02075fcdb", null ],
[ "no_motion_y", "structbmi160__acc__no__motion__int__cfg.html#a39b79b5afdfa762c1ced55e61dfdb7bb", null ],
[ "no_motion_z", "structbmi160__acc__no__motion__int__cfg.html#afb089e1b2b0763a4097c9763262795ef", null ]
]; |
zhfeng/org.ops4j.pax.transx | pax-transx-jdbc/src/main/java/org/ops4j/pax/transx/jdbc/impl/ConnectionHandle.java | <gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.ops4j.pax.transx.jdbc.impl;
import org.ops4j.pax.transx.connection.utils.AbstractConnectionHandle;
import org.ops4j.pax.transx.connection.utils.AbstractManagedConnection;
import org.ops4j.pax.transx.connection.utils.AbstractManagedConnectionFactory;
import javax.resource.ResourceException;
import javax.resource.spi.ConnectionRequestInfo;
import javax.resource.spi.LocalTransaction;
import java.sql.Array;
import java.sql.Blob;
import java.sql.CallableStatement;
import java.sql.Clob;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.NClob;
import java.sql.PreparedStatement;
import java.sql.SQLClientInfoException;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.SQLXML;
import java.sql.Savepoint;
import java.sql.Statement;
import java.sql.Struct;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.Executor;
public class ConnectionHandle<
MCF extends AbstractManagedConnectionFactory<MCF, MC, Connection, ConnectionHandle<MCF, MC>>,
MC extends AbstractManagedConnection<MCF, MC, Connection, ConnectionHandle<MCF, MC>>>
extends AbstractConnectionHandle<MCF, MC, Connection, ConnectionHandle<MCF, MC>> implements Connection {
public ConnectionHandle(MCF mcf, ConnectionRequestInfo cri, MC mc) {
super(mcf, cri, mc);
}
@Override
@SuppressWarnings("unchecked")
protected <E extends Exception> E wrapException(String msg, Exception e) {
if (msg == null && e instanceof SQLException) {
return (E) e;
}
if (msg == null && e != null && e.getCause() instanceof SQLException) {
return (E) e.getCause();
}
return (E) new SQLException(msg, e);
}
public void commit() throws SQLException {
MC mc = getManagedConnection();
if (mc.isInXaTransaction()) {
throw new SQLException("Can not commit within an XA transaction");
}
Connection c = mc.getPhysicalConnection();
if (c.getAutoCommit()) {
return;
}
try {
LocalTransaction tx = mc.getClientLocalTransaction();
tx.commit();
tx.begin();
} catch (ResourceException e) {
if (e.getCause() instanceof SQLException) {
throw (SQLException) e.getCause();
} else {
throw new SQLException(e);
}
}
}
public void rollback() throws SQLException {
MC mc = getManagedConnection();
if (mc.isInXaTransaction()) {
throw new SQLException("Can not rollback within an XA transaction");
}
Connection c = mc.getPhysicalConnection();
if (c.getAutoCommit()) {
return;
}
try {
LocalTransaction tx = mc.getClientLocalTransaction();
tx.rollback();
tx.begin();
} catch (ResourceException e) {
if (e.getCause() instanceof SQLException) {
throw (SQLException) e.getCause();
} else {
throw new SQLException(e);
}
}
}
public void setAutoCommit(boolean autoCommit) throws SQLException {
MC mc = getManagedConnection();
if (mc.isInXaTransaction()) {
throw new SQLException("Can not set autoCommit within an XA transaction");
}
Connection c = mc.getPhysicalConnection();
if (autoCommit == c.getAutoCommit()) {
// nothing to do
return;
}
try {
LocalTransaction tx = mc.getClientLocalTransaction();
if (autoCommit) {
// reenabling autoCommit - JDBC spec says current transaction is committed
tx.commit();
} else {
// disabling autoCommit
tx.begin();
}
} catch (ResourceException e) {
if (e.getCause() instanceof SQLException) {
throw (SQLException) e.getCause();
} else {
throw new SQLException(e);
}
}
}
public boolean getAutoCommit() throws SQLException {
return call(Connection::getAutoCommit);
}
public Statement createStatement() throws SQLException {
return wrapStatement(call(Connection::createStatement));
}
public Statement createStatement(int resultSetType, int resultSetConcurrency) throws SQLException {
return wrapStatement(call(c -> c.createStatement(resultSetType, resultSetConcurrency)));
}
public Statement createStatement(int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return wrapStatement(call(c -> c.createStatement(resultSetType, resultSetConcurrency, resultSetHoldability)));
}
public PreparedStatement prepareStatement(String sql) throws SQLException {
return wrapPreparedStatement(call(c -> c.prepareStatement(sql)));
}
public PreparedStatement prepareStatement(String sql, int autoGeneratedKeys) throws SQLException {
return wrapPreparedStatement(call(c -> c.prepareStatement(sql, autoGeneratedKeys)));
}
public PreparedStatement prepareStatement(String sql, int columnIndexes[]) throws SQLException {
return wrapPreparedStatement(call(c -> c.prepareStatement(sql, columnIndexes)));
}
public PreparedStatement prepareStatement(String sql, String columnNames[]) throws SQLException {
return wrapPreparedStatement(call(c -> c.prepareStatement(sql, columnNames)));
}
@Override
public Clob createClob() throws SQLException {
return call(Connection::createClob);
}
@Override
public Blob createBlob() throws SQLException {
return call(Connection::createBlob);
}
@Override
public NClob createNClob() throws SQLException {
return call(Connection::createNClob);
}
@Override
public SQLXML createSQLXML() throws SQLException {
return call(Connection::createSQLXML);
}
@Override
public boolean isValid(int i) throws SQLException {
return call(c -> c.isValid(i));
}
@Override
public void setClientInfo(String s, String s1) throws SQLClientInfoException {
execute(c -> c.setClientInfo(s, s1));
}
@Override
public void setClientInfo(Properties properties) throws SQLClientInfoException {
execute(c -> c.setClientInfo(properties));
}
@Override
public String getClientInfo(String s) throws SQLException {
return call(c -> c.getClientInfo(s));
}
@Override
public Properties getClientInfo() throws SQLException {
return call(Connection::getClientInfo);
}
@Override
public Array createArrayOf(String s, Object[] objects) throws SQLException {
return call(c -> c.createArrayOf(s, objects));
}
@Override
public Struct createStruct(String s, Object[] objects) throws SQLException {
return call(c -> c.createStruct(s, objects));
}
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return wrapPreparedStatement(call(c -> c.prepareStatement(sql, resultSetType, resultSetConcurrency)));
}
public PreparedStatement prepareStatement(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return wrapPreparedStatement(call(c -> c.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability)));
}
public CallableStatement prepareCall(String sql) throws SQLException {
return wrapCallableStatement(call(c -> c.prepareCall(sql)));
}
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency) throws SQLException {
return wrapCallableStatement(call(c -> c.prepareCall(sql, resultSetType, resultSetConcurrency)));
}
public CallableStatement prepareCall(String sql, int resultSetType, int resultSetConcurrency, int resultSetHoldability) throws SQLException {
return wrapCallableStatement(call(c -> c.prepareCall(sql, resultSetType, resultSetConcurrency, resultSetHoldability)));
}
public DatabaseMetaData getMetaData() throws SQLException {
return wrapMetaData(call(Connection::getMetaData));
}
public String getCatalog() throws SQLException {
return call(Connection::getCatalog);
}
public void setCatalog(String catalog) throws SQLException {
execute(c -> c.setCatalog(catalog));
}
public int getHoldability() throws SQLException {
return call(Connection::getHoldability);
}
public void setHoldability(int holdability) throws SQLException {
execute(c -> c.setHoldability(holdability));
}
@SuppressWarnings("all")
public int getTransactionIsolation() throws SQLException {
return call(Connection::getTransactionIsolation);
}
public void setTransactionIsolation(int level) throws SQLException {
execute(c -> c.setTransactionIsolation(level));
}
public Map<String, Class<?>> getTypeMap() throws SQLException {
return call(Connection::getTypeMap);
}
public void setTypeMap(Map<String, Class<?>> map) throws SQLException {
execute(c -> c.setTypeMap(map));
}
public SQLWarning getWarnings() throws SQLException {
return call(Connection::getWarnings);
}
public void clearWarnings() throws SQLException {
execute(Connection::clearWarnings);
}
public boolean isReadOnly() throws SQLException {
return call(Connection::isReadOnly);
}
public void setReadOnly(boolean readOnly) throws SQLException {
execute(c -> c.setReadOnly(readOnly));
}
public Savepoint setSavepoint() throws SQLException {
return call(Connection::setSavepoint);
}
public Savepoint setSavepoint(String name) throws SQLException {
return call(c -> c.setSavepoint(name));
}
public void releaseSavepoint(Savepoint savepoint) throws SQLException {
execute(c -> c.releaseSavepoint(savepoint));
}
public void rollback(Savepoint savepoint) throws SQLException {
// rollback(Savepoint) simply delegates as this does not interact with the transaction
execute(c -> c.rollback(savepoint));
}
public String nativeSQL(String sql) throws SQLException {
return call(c -> c.nativeSQL(sql));
}
@Override
public void setSchema(String schema) throws SQLException {
execute(c -> c.setSchema(schema));
}
@Override
public String getSchema() throws SQLException {
return call(Connection::getSchema);
}
@Override
public void abort(Executor executor) throws SQLException {
execute(c -> c.abort(executor));
}
@Override
public void setNetworkTimeout(Executor executor, int milliseconds) throws SQLException {
execute(c -> c.setNetworkTimeout(executor, milliseconds));
}
@Override
public int getNetworkTimeout() throws SQLException {
return call(Connection::getNetworkTimeout);
}
@Override
public <T> T unwrap(Class<T> tClass) throws SQLException {
if (tClass.isInstance(this)) {
return tClass.cast(this);
}
return call(c -> c.unwrap(tClass));
}
@Override
public boolean isWrapperFor(Class<?> aClass) throws SQLException {
if (aClass.isInstance(this)) {
return true;
}
return call(c -> c.isWrapperFor(aClass));
}
private Statement wrapStatement(Statement s) {
return Wrappers.wrap(Statement.class, this, s);
}
private PreparedStatement wrapPreparedStatement(PreparedStatement ps) {
return Wrappers.wrap(PreparedStatement.class, this, ps);
}
private CallableStatement wrapCallableStatement(CallableStatement cs) {
return Wrappers.wrap(CallableStatement.class, this, cs);
}
private DatabaseMetaData wrapMetaData(DatabaseMetaData dbmd) {
return Wrappers.wrap(DatabaseMetaData.class, this, dbmd);
}
}
|
stausholm/vue-pwa | client/src/components/poll/mock-api.js | const isDateInThePast = dateString => {
return dateString ? new Date() > new Date(dateString) : false;
};
let answerIdIncrementer = 0;
class Poll {
constructor(id, question, answers, expires = null, showResults = false) {
this.id = id;
this.question = question;
this.answers = answers;
this.expires = expires;
this.showResults = showResults; // Serverside check to know if results should be sent along
}
hasExpired() {
return isDateInThePast(this.expires);
}
votesTotal() {
return this.answers.reduce((acc, curr) => {
return acc + curr.votes;
}, 0);
}
votersTotal() {
return null; // TODO
}
response() {
return {
id: this.id,
answers: this.answers,
info: {
question: this.question,
expires: this.expires,
hasExpired: this.hasExpired(),
votesTotal: this.votesTotal(),
votersTotal: this.votersTotal()
}
};
}
}
class Answer {
constructor(answer) {
this.answer = answer;
this.id = answerIdIncrementer++;
this.votes = 1;
}
incrementVote() {
this.votes += 1;
}
}
const answer1 = new Answer("Yes, thank you i would like so.");
const answer2 = new Answer("No, why would you ever suggest that.");
const answer3 = new Answer("Idunno, maybe. But does it really matter?");
const poll1 = new Poll(
"c7cbb406-69ce-494d-ba46-176ccb73af5d",
"Is this the question you were looking for?",
[answer1, answer2, answer3],
"2022-04-31T09:58:19.036Z",
true
);
const poll2 = new Poll(
"eaa5c688-dd78-49bb-8d2c-a0e10776258d",
"Another question with the same answers",
[answer1, answer2, answer3],
"2021-03-31T09:58:19.036Z",
false
);
const poll3 = new Poll(
"c3a0d9df-2a43-4ace-9662-a3b803e9e41c",
"A third and final question",
[answer1, answer2, answer3]
);
export const polls = [poll1, poll2, poll3];
|
tjx666/leetcode-javascript | src/0101-Symmetric Tree/isSymmetric1.js | <reponame>tjx666/leetcode-javascript
/**
* 判断一颗树是不是镜像树,
* 递归解法:
* 如果一颗树是镜像树,那么它的左子树和右子树也是镜像的
* 判断两颗树是否是镜像即判断他俩是否相等,并且左子树的左孩子应该和右子树的右孩子也是镜像对称,左子树的右孩子应该和右子树的左孩子也是镜像对称
*
* @param {TreeNode} root
* @return {boolean}
*/
function isSymmetric(root) {
if (root == null) return true;
return isMirror(root.left, root.right);
}
function isMirror(left, right) {
if (left == null && right == null) {
return true;
}
if (left == null || right == null) {
return false;
}
return (
left.val === right.val &&
isMirror(left.left, right.right) &&
isMirror(left.right, right.left)
);
}
module.exports = isSymmetric;
|
Karun842002/discord-bot-GenuineGenie | cmds/economy/balance.js | <filename>cmds/economy/balance.js
const Discord = require('discord.js');
const Commando = require('discord.js-commando');
const getUser = require('@utils/getUser');
// Array of member IDs who have claimed their daily rewards in the last 24hrs
// Resets every 10 mins
let claimedCache = [];
const clearCache = () => {
claimedCache = [];
setTimeout(clearCache, 20 * 60 * 1000);
};
clearCache();
module.exports = class kickCommand extends (
Commando.Command
) {
constructor(client) {
super(client, {
name: 'bal',
aliases: ['balance'],
group: 'economy',
memberName: 'bal',
description: 'Displays the balance of a user',
format: "<user's @>",
throttling: {
usages: 3,
duration: 10,
},
argsType: 'multiple',
});
}
async run(message) {
const target = message.mentions.users.first() || message.author;
const targetId = target.id;
const name = target.username;
const result = await getUser(name, targetId);
let desc = `coins:\t **${result.coins}**`;
desc += `\nvault:\t **${result.vault_coins}/${result.vault_size}**`;
const embed = new Discord.MessageEmbed().setTitle(`${name}'s richness`).setDescription(desc);
//message.channel.send(`<@${targetId}> has **${coins}** coins`);
message.channel.send(embed);
}
};
|
llama-0/java_in_examples | collections/src/com/github/vedenin/eng/collections/multiset/GsMutableBagTest.java | package com.github.vedenin.eng.collections.multiset;
import com.gs.collections.api.bag.MutableBag;
import com.gs.collections.impl.bag.mutable.HashBag;
import java.util.Arrays;
// Attention: version with russian comments in "rus" package
public class GsMutableBagTest {
public static void main(String[] args) {
// Parse text to separate words
String INPUT_TEXT = "Hello World! Hello All! Hi World!";
// Create Multiset
MutableBag<String> bag = HashBag.newBag(Arrays.asList(INPUT_TEXT.split(" ")));
// Print count words
System.out.println(bag); // print [Hi, World!, World!, Hello, Hello, All!]- in random orders
// Print all unique words
System.out.println(bag.toSet()); // print [Hi, Hello, World!, All!] - in random orders
// Print count occurrences of words
System.out.println("Hello = " + bag.occurrencesOf("Hello")); // print 2
System.out.println("World = " + bag.occurrencesOf("World!")); // print 2
System.out.println("All = " + bag.occurrencesOf("All!")); // print 1
System.out.println("Hi = " + bag.occurrencesOf("Hi")); // print 1
System.out.println("Empty = " + bag.occurrencesOf("Empty")); // print 0
// Print count all words
System.out.println(bag.size()); //print 6
// Print count unique words
System.out.println(bag.toSet().size()); //print 4
}
}
|
Miven666/SpringBoot-learing | springboot-dds/springboot-dds-jpa/src/main/java/com/miven/springboot/dds/jpa/master/service/TenantService.java | <reponame>Miven666/SpringBoot-learing
package com.miven.springboot.dds.jpa.master.service;
import com.miven.springboot.dds.jpa.master.model.Tenant;
/**
* 租户业务层
* @author mingzhi.xie
* @date 2019/12/17
* @since 1.0
*/
public interface TenantService {
/**
* 根据身份标识查询租户
* @param name 身份
* @return 租户
*/
Tenant findByName(String name);
} |
dgpgdev/junctions | src/map.js | import curry from './curry'
/**
* transforme les donnée d'une liste via une condition
* @memberOf List
* @function map
* @param {function} fn la fonction de filtrage
* @param {array} array la liste des items a testé
* @instance
* @returns {array} un nouveau tableau de données transformés
* @instance
* @example {@lang javascript} const x = item => item + 1
const y = [43, 45, 46]
console.log((map(x, y)) // return [44, 46, 47]
*/
const map = curry((fn, array) => array.map(fn))
export default map
|
bozhnyukAlex/SPBU_hometasks | SEMESTER #3/FiltersJava/out/production/FiltersJava/generated/Test/generated/Measure_jmhType.java | package Test.generated;
public class Measure_jmhType extends Measure_jmhType_B3 {
}
|
ScalablyTyped/SlinkyTyped | r/react-native/src/main/scala/typingsSlinky/reactNative/mod/ThemeAttributeBackgroundPropType.scala | <filename>r/react-native/src/main/scala/typingsSlinky/reactNative/mod/ThemeAttributeBackgroundPropType.scala
package typingsSlinky.reactNative.mod
import typingsSlinky.reactNative.reactNativeStrings.ThemeAttrAndroid
import org.scalablytyped.runtime.StObject
import scala.scalajs.js
import scala.scalajs.js.`|`
import scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}
@js.native
trait ThemeAttributeBackgroundPropType
extends BaseBackgroundPropType
with BackgroundPropType {
var attribute: String = js.native
@JSName("type")
var type_ThemeAttributeBackgroundPropType: ThemeAttrAndroid = js.native
}
object ThemeAttributeBackgroundPropType {
@scala.inline
def apply(attribute: String, `type`: ThemeAttrAndroid): ThemeAttributeBackgroundPropType = {
val __obj = js.Dynamic.literal(attribute = attribute.asInstanceOf[js.Any])
__obj.updateDynamic("type")(`type`.asInstanceOf[js.Any])
__obj.asInstanceOf[ThemeAttributeBackgroundPropType]
}
@scala.inline
implicit class ThemeAttributeBackgroundPropTypeMutableBuilder[Self <: ThemeAttributeBackgroundPropType] (val x: Self) extends AnyVal {
@scala.inline
def setAttribute(value: String): Self = StObject.set(x, "attribute", value.asInstanceOf[js.Any])
@scala.inline
def setType(value: ThemeAttrAndroid): Self = StObject.set(x, "type", value.asInstanceOf[js.Any])
}
}
|
adammichaelwilliams/prisma | server/servers/api/src/test/scala/com/prisma/api/mutations/nonEmbedded/nestedMutations/NestedCreateMutationInsideUpdateSpec.scala | package com.prisma.api.mutations.nonEmbedded.nestedMutations
import com.prisma.api.ApiSpecBase
import com.prisma.shared.models.ConnectorCapability.JoinRelationLinksCapability
import com.prisma.shared.schema_dsl.SchemaDsl
import org.scalatest.{FlatSpec, Matchers}
class NestedCreateMutationInsideUpdateSpec extends FlatSpec with Matchers with ApiSpecBase with SchemaBase {
override def runOnlyForCapabilities = Set(JoinRelationLinksCapability)
"a P1! to C1! relation" should "error since old required parent relation would be broken" in {
val project = SchemaDsl.fromString() { schemaP1reqToC1req }
database.setup(project)
val res = server
.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childReq: {
| create: {c: "c1"}
| }
| }){
| id
| childReq{
| id
| }
| }
|}""".stripMargin,
project
)
val parentId = res.pathAsString("data.createParent.id")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
server.queryThatMustFail(
s"""
|mutation {
| updateParent(
| where: {id: "$parentId"}
| data:{
| p: "p2"
| childReq: {create: {c: "SomeC"}}
| }){
| p
| childReq {
| c
| }
| }
|}
""".stripMargin,
project,
errorCode = 3042,
errorContains = "The change you are trying to make would violate the required relation 'ChildToParent' between Child and Parent"
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a P1! to C1 relation" should "work" in {
val project = SchemaDsl.fromString() { schemaP1reqToC1opt }
database.setup(project)
val res = server
.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childReq: {
| create: {c: "c1"}
| }
| }){
| id
| childReq{
| id
| }
| }
|}""".stripMargin,
project
)
val parentId = res.pathAsString("data.createParent.id")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
val res2 = server.query(
s"""
|mutation {
| updateParent(
| where: {id: "$parentId"}
| data:{
| p: "p2"
| childReq: {create: {c: "SomeC"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project
)
res2.toString should be("""{"data":{"updateParent":{"childReq":{"c":"SomeC"}}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a P1 to C1 relation " should "work" in {
val project = SchemaDsl.fromString() { schemaP1optToC1opt }
database.setup(project)
val res = server
.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childOpt: {
| create: {c: "c1"}
| }
| }){
| id
| childOpt{
| id
| }
| }
|}""".stripMargin,
project
)
val parentId = res.pathAsString("data.createParent.id")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
val res2 = server.query(
s"""
|mutation {
| updateParent(
| where:{id: "$parentId"}
| data:{
| p: "p2"
| childOpt: {create: {c: "SomeC"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res2.toString should be("""{"data":{"updateParent":{"childOpt":{"c":"SomeC"}}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a P1 to C1 relation with the parent without a relation" should "work" in {
val project = SchemaDsl.fromString() { schemaP1optToC1opt }
database.setup(project)
val parent1Id = server
.query(
"""mutation {
| createParent(data: {p: "p1"})
| {
| id
| }
|}""".stripMargin,
project
)
.pathAsString("data.createParent.id")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(0) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where:{id: "$parent1Id"}
| data:{
| p: "p2"
| childOpt: {create: {c: "SomeC"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childOpt":{"c":"SomeC"}}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a PM to C1! relation with a child already in a relation" should "work" in {
val project = SchemaDsl.fromString() { schemaPMToC1req }
database.setup(project)
server.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childrenOpt: {
| create: {c: "c1"}
| }
| }){
| childrenOpt{
| c
| }
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where: {p: "p1"}
| data:{
| childrenOpt: {create: {c: "c2"}}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"}]}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(2) }
}
"a P1 to C1! relation with the parent and a child already in a relation" should "error in a nested mutation by unique" in {
val project = SchemaDsl.fromString() { schemaP1optToC1req }
database.setup(project)
server.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childOpt: {
| create: {c: "c1"}
| }
| }){
| childOpt{
| c
| }
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
server.queryThatMustFail(
s"""
|mutation {
| updateParent(
| where: {p: "p1"}
| data:{
| childOpt: {create: {c: "c2"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project,
errorCode = 3042,
errorContains = "The change you are trying to make would violate the required relation 'ChildToParent' between Child and Parent"
)
}
"a P1 to C1! relation with the parent not already in a relation" should "work in a nested mutation by unique" in {
val project = SchemaDsl.fromString() { schemaP1optToC1req }
database.setup(project)
server.query(
"""mutation {
| createParent(data: {
| p: "p1"
|
| }){
| p
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(0) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where: {p: "p1"}
| data:{
| childOpt: {create: {c: "c1"}}
| }){
| childOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childOpt":{"c":"c1"}}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a PM to C1 relation with the parent already in a relation" should "work through a nested mutation by unique" in {
val project = SchemaDsl.fromString() { schemaPMToC1opt }
database.setup(project)
server
.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childrenOpt: {
| create: [{c: "c1"}, {c: "c2"}]
| }
| }){
| childrenOpt{
| c
| }
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(2) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where: { p: "p1"}
| data:{
| childrenOpt: {create: [{c: "c3"}]}
| }){
| childrenOpt {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"c3"}]}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(3) }
}
"a P1! to CM relation with the parent already in a relation" should "work through a nested mutation by unique" in {
val project = SchemaDsl.fromString() { schemaP1reqToCM }
database.setup(project)
server.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childReq: {
| create: {c: "c1"}
| }
| }){
| childReq{
| c
| }
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where: {p: "p1"}
| data:{
| childReq: {create: {c: "c2"}}
| }){
| childReq {
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childReq":{"c":"c2"}}}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a P1 to CM relation with the child already in a relation" should "work through a nested mutation by unique" in {
val project = SchemaDsl.fromString() { schemaP1optToCM }
database.setup(project)
server.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childOpt: {
| create: {c: "c1"}
| }
| }){
| childOpt{
| c
| }
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where: {p: "p1"}
| data:{
| childOpt: {create: {c: "c2"}}
| }){
| childOpt{
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childOpt":{"c":"c2"}}}}""")
server.query(s"""query{children{c, parentsOpt{p}}}""", project).toString should be(
"""{"data":{"children":[{"c":"c1","parentsOpt":[]},{"c":"c2","parentsOpt":[{"p":"p1"}]}]}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(1) }
}
"a PM to CM relation with the children already in a relation" should "be disconnectable through a nested mutation by unique" in {
val project = SchemaDsl.fromString() { schemaPMToCM }
database.setup(project)
server.query(
"""mutation {
| createParent(data: {
| p: "p1"
| childrenOpt: {
| create: [{c: "c1"},{c: "c2"}]
| }
| }){
| childrenOpt{
| c
| }
| }
|}""".stripMargin,
project
)
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(2) }
val res = server.query(
s"""
|mutation {
| updateParent(
| where: { p: "p1"}
| data:{
| childrenOpt: {create: [{c: "c3"}]}
| }){
| childrenOpt{
| c
| }
| }
|}
""".stripMargin,
project
)
res.toString should be("""{"data":{"updateParent":{"childrenOpt":[{"c":"c1"},{"c":"c2"},{"c":"c3"}]}}}""")
server.query(s"""query{children{c, parentsOpt{p}}}""", project).toString should be(
"""{"data":{"children":[{"c":"c1","parentsOpt":[{"p":"p1"}]},{"c":"c2","parentsOpt":[{"p":"p1"}]},{"c":"c3","parentsOpt":[{"p":"p1"}]}]}}""")
ifConnectorIsActive { dataResolver(project).countByTable("_ChildToParent").await should be(3) }
}
"a one to many relation" should "be creatable through a nested mutation" in {
val project = SchemaDsl.fromString() {
"""type Comment{
| id: ID! @unique
| text: String
| todo: Todo
|}
|
|type Todo{
| id: ID! @unique
| comments: [Comment]
|}"""
}
database.setup(project)
val createResult = server.query(
"""mutation {
| createTodo(data:{}){
| id
| }
|}
""".stripMargin,
project
)
val id = createResult.pathAsString("data.createTodo.id")
val result = server.query(
s"""mutation {
| updateTodo(
| where: {
| id: "$id"
| }
| data:{
| comments: {
| create: [{text: "comment1"}, {text: "comment2"}]
| }
| }
| ){
| comments {
| text
| }
| }
|}
""".stripMargin,
project
)
mustBeEqual(result.pathAsJsValue("data.updateTodo.comments").toString, """[{"text":"comment1"},{"text":"comment2"}]""")
}
"a many to one relation" should "be creatable through a nested mutation" in {
val project = SchemaDsl.fromString() {
"""type Comment{
| id: ID! @unique
| text: String
| todo: Todo
|}
|
|type Todo{
| id: ID! @unique
| title: String
| comments: [Comment]
|}"""
}
database.setup(project)
val createResult = server.query(
"""mutation {
| createComment(data:{}){
| id
| }
|}
""".stripMargin,
project
)
val id = createResult.pathAsString("data.createComment.id")
val result = server.query(
s"""
|mutation {
| updateComment(
| where: {
| id: "$id"
| }
| data: {
| todo: {
| create: {title: "todo1"}
| }
| }
| ){
| id
| todo {
| title
| }
| }
|}
""".stripMargin,
project
)
mustBeEqual(result.pathAsString("data.updateComment.todo.title"), "todo1")
}
"a many to one relation" should "be creatable through a nested mutation using non-id unique field" in {
val project = SchemaDsl.fromString() {
"""type Comment{
| id: ID! @unique
| text: String! @unique
| todo: Todo
|}
|
|type Todo{
| id: ID! @unique
| title: String! @unique
| comments: [Comment]
|}"""
}
database.setup(project)
server.query(
"""mutation {
| createComment(data:{ text: "comment"}){
| id
| text
| }
|}
""".stripMargin,
project
)
val result = server.query(
s"""
|mutation {
| updateComment(
| where: {
| text: "comment"
| }
| data: {
| todo: {
| create: {title: "todo1"}
| }
| }
| ){
| id
| todo {
| title
| }
| }
|}
""".stripMargin,
project
)
mustBeEqual(result.pathAsString("data.updateComment.todo.title"), "todo1")
}
}
|
brezillon/opensplice | src/api/cm/xml/code/cmx_participant.c | <filename>src/api/cm/xml/code/cmx_participant.c
/*
* Vortex OpenSplice
*
* This software and documentation are Copyright 2006 to TO_YEAR ADLINK
* Technology Limited, its affiliated companies and licensors. All rights
* reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "cmx__participant.h"
#include "cmx_participant.h"
#include "cmx__entity.h"
#include "cmx__factory.h"
#include "cmx_factory.h"
#include "u_serviceManager.h"
#include "u_participant.h"
#include "u_entity.h"
#include "u_observable.h"
#include "v_participant.h"
#include "v_serviceState.h"
#include "v_observable.h"
#include "v_observer.h"
#include "v__serviceManager.h"
#include "v_event.h"
#include "v_participantQos.h"
#include "sd_serializerXMLMetadata.h"
#include "vortex_os.h"
#include <stdio.h>
c_char*
cmx_participantNew(
const c_char* uri,
const c_char* domainId,
c_long timeout,
const c_char* name,
const c_char* qos)
{
u_participant p;
u_result ur;
u_domainId_t did;
int pos;
c_char* result;
const c_char* context;
if (*domainId == '\0') {
did = U_DOMAIN_ID_ANY;
} else if (sscanf (domainId,"%d%n", &did, &pos) != 1 || domainId[pos] != '\0') {
OS_REPORT(OS_ERROR, CM_XML_CONTEXT, 0,
"cmx_participantNew failed (reason: illegal argument: domainId \"%s\").",
domainId);
return NULL;
}
p = u_participantNew(uri, did, timeout > 0 ? (unsigned)timeout : 0, name, NULL, TRUE);
if(p == NULL){
/* Error reported by u_participantNew() */
goto err_u_participantNew;
}
if(qos && *qos){
if((ur = u_entitySetXMLQos(u_entity(p), qos)) != U_RESULT_OK) {
context = "u_entitySetXMLQos";
goto err_entity;
}
}
if((ur = u_entityEnable(u_entity(p))) != U_RESULT_OK) {
context = "u_entityEnable";
goto err_entity;
}
if((ur = cmx_entityRegister(u_object(p), NULL, &result)) != U_RESULT_OK) {
context = "cmx_entityRegister";
goto err_entity;
}
return result;
/* Error handling */
err_entity:
OS_REPORT(OS_ERROR, CM_XML_CONTEXT, 0,
"cmx_participantNew failed (reason: %s returned %u).",
context, ur);
u_objectFree(u_object(p));
err_u_participantNew:
return NULL;
}
c_char*
cmx_participantInit(
v_participant entity)
{
assert(C_TYPECHECK(entity, v_participant));
OS_UNUSED_ARG(entity);
return (c_char*)(os_strdup("<kind>PARTICIPANT</kind>"));
}
c_char*
cmx_participantAllParticipants(
const c_char* participant)
{
cmx_walkEntityArg arg;
u_result ur;
c_char* result;
cmx_entity ce;
result = NULL;
ce = cmx_entityClaim(participant);
if(ce != NULL){
arg = cmx_walkEntityArg(os_malloc(C_SIZEOF(cmx_walkEntityArg)));
if (arg != NULL) {
arg->length = 0;
arg->list = NULL;
arg->entityArg.entity = ce;
arg->entityArg.create = TRUE;
arg->entityArg.result = NULL;
ur = u_observableAction(u_observable(ce->uentity),
cmx_participantParticipantsAction,
(c_voidp)arg);
if (ur == U_RESULT_OK) {
result = cmx_convertToXMLList(arg->list, arg->length);
}
os_free(arg);
}
cmx_entityRelease(ce);
}
return result;
}
void
cmx_participantParticipantsAction(
v_public p,
c_voidp args)
{
cmx_walkEntityArg arg;
c_iter participants;
v_entity participant;
c_bool proceed;
c_char* xmlEntity;
arg = cmx_walkEntityArg(args);
participants = v_resolveParticipants(v_objectKernel(p), "*");
participant = v_entity(c_iterTakeFirst(participants));
while(participant != NULL){
proceed = cmx_entityNewFromWalk(v_public(participant), &arg->entityArg);
if(proceed == TRUE){
xmlEntity = arg->entityArg.result;
arg->list = c_iterInsert(arg->list, xmlEntity);
arg->length += strlen(xmlEntity);
}
c_free(participant);
participant = v_entity(c_iterTakeFirst(participants));
}
c_iterFree(participants);
}
c_char*
cmx_participantAllTopics(
const c_char* participant)
{
u_result ur;
cmx_walkEntityArg arg;
c_char* result;
cmx_entity ce;
result = NULL;
ce = cmx_entityClaim(participant);
if (ce != NULL) {
arg = cmx_walkEntityArg(os_malloc(C_SIZEOF(cmx_walkEntityArg)));
if (arg != NULL){
arg->length = 0;
arg->list = NULL;
arg->entityArg.entity = ce;
arg->entityArg.create = TRUE;
arg->entityArg.result = NULL;
ur = u_observableAction(u_observable(ce->uentity),
cmx_participantTopicsAction,
(c_voidp)arg);
if (ur == U_RESULT_OK) {
result = cmx_convertToXMLList(arg->list, arg->length);
}
os_free(arg);
}
cmx_entityRelease(ce);
}
return result;
}
void
cmx_participantTopicsAction(
v_public p,
c_voidp args)
{
cmx_walkEntityArg arg;
c_iter topics;
v_entity topic;
c_bool proceed;
c_char* xmlEntity;
arg = cmx_walkEntityArg(args);
topics = v_resolveTopics(v_objectKernel(p), "*");
topic = v_entity(c_iterTakeFirst(topics));
while(topic != NULL){
proceed = cmx_entityNewFromWalk(v_public(topic), &arg->entityArg);
if(proceed == TRUE){
xmlEntity = arg->entityArg.result;
arg->list = c_iterInsert(arg->list, xmlEntity);
arg->length += strlen(xmlEntity);
}
c_free(topic);
topic = v_entity(c_iterTakeFirst(topics));
}
c_iterFree(topics);
}
c_char*
cmx_participantAllDomains(
const c_char* participant)
{
u_result ur;
cmx_walkEntityArg arg;
c_char* result;
cmx_entity ce;
result = NULL;
ce = cmx_entityClaim(participant);
if(ce != NULL){
arg = cmx_walkEntityArg(os_malloc(C_SIZEOF(cmx_walkEntityArg)));
if (arg != NULL) {
arg->length = 0;
arg->list = NULL;
arg->entityArg.entity = ce;
arg->entityArg.create = TRUE;
arg->entityArg.result = NULL;
ur = u_observableAction(u_observable(ce->uentity),
cmx_participantDomainsAction,
(c_voidp)arg);
if (ur == U_RESULT_OK) {
result = cmx_convertToXMLList(arg->list, arg->length);
}
os_free(arg);
}
cmx_entityRelease(ce);
}
return result;
}
void
cmx_participantDomainsAction(
v_public p,
c_voidp args)
{
cmx_walkEntityArg arg;
c_iter partitions;
v_entity partition;
c_bool proceed;
c_char* xmlEntity;
arg = cmx_walkEntityArg(args);
partitions = v_resolvePartitions(v_objectKernel(p), "*");
partition = v_entity(c_iterTakeFirst(partitions));
while(partition != NULL){
proceed = cmx_entityNewFromWalk(v_public(partition), &arg->entityArg);
if(proceed == TRUE){
xmlEntity = arg->entityArg.result;
arg->list = c_iterInsert(arg->list, xmlEntity);
arg->length += strlen(xmlEntity);
}
c_free(partition);
partition = v_entity(c_iterTakeFirst(partitions));
}
c_iterFree(partitions);
}
const c_char*
cmx_participantRegisterType(
const c_char* participant,
const c_char* type)
{
sd_serializer serializer;
sd_serializedData meta_data;
cmx_entityKernelArg kernelArg;
c_type topicType;
cmx_entity ce;
const c_char* result;
const c_char* msg;
ce = cmx_entityClaim(participant);
if(ce != NULL){
kernelArg = cmx_entityKernelArg(os_malloc(C_SIZEOF(cmx_entityKernelArg)));
if (u_observableAction(u_observable(ce->uentity),
cmx_entityKernelAction,
(c_voidp)kernelArg) == U_RESULT_OK)
{
serializer = sd_serializerXMLMetadataNew(c_getBase(c_object(kernelArg->kernel)));
if(serializer != NULL){
meta_data = sd_serializerFromString(serializer, type);
if (meta_data != NULL) {
topicType = c_type(sd_serializerDeserialize(serializer, meta_data));
if (topicType == NULL) {
msg = sd_serializerLastValidationMessage(serializer);
OS_REPORT(OS_ERROR,
CM_XML_CONTEXT, 0,
"Data type could not be registered, "
"because it is not valid: %s",
msg);
result = CMX_RESULT_FAILED;
} else {
result = CMX_RESULT_OK;
}
sd_serializedDataFree(meta_data);
} else {
OS_REPORT(OS_ERROR, CM_XML_CONTEXT, 0, "Construction of serialized data failed.");
result = CMX_RESULT_FAILED;
}
sd_serializerFree(serializer);
} else {
OS_REPORT(OS_ERROR, CM_XML_CONTEXT, 0, "Serializer could not be initialized");
result = CMX_RESULT_FAILED;
}
} else {
OS_REPORT(OS_ERROR, CM_XML_CONTEXT, 0, "Kernel object could not be retrieved");
result = CMX_RESULT_FAILED;
}
os_free(kernelArg);
cmx_entityRelease(ce);
} else {
result = CMX_RESULT_FAILED;
}
return result;
}
c_char*
cmx_participantFindTopic(
const c_char* participant,
const c_char* topicName)
{
u_result ur;
c_char* topics;
cmx_walkEntityArg arg;
cmx_entity ce;
topics = NULL;
ce = cmx_entityClaim(participant);
if(ce != NULL){
arg = cmx_walkEntityArg(os_malloc(C_SIZEOF(cmx_walkParticipantArg)));
if (arg != NULL){
arg->length = 0;
arg->list = NULL;
arg->entityArg.entity = ce;
arg->entityArg.create = TRUE;
arg->entityArg.result = NULL;
cmx_walkParticipantArg(arg)->topicName = topicName;
ur = u_observableAction(u_observable(ce->uentity),
cmx_participantFindTopicAction,
(c_voidp)arg);
if (ur == U_RESULT_OK) {
topics = cmx_convertToXMLList(arg->list, arg->length);
}
os_free(arg);
}
cmx_entityRelease(ce);
}
return topics;
}
void
cmx_participantFindTopicAction(
v_public p,
c_voidp args)
{
cmx_walkEntityArg arg;
c_iter topics;
v_entity topic;
c_bool proceed;
c_char* xmlEntity;
arg = cmx_walkEntityArg(args);
topics = v_resolveTopics(v_objectKernel(p), cmx_walkParticipantArg(arg)->topicName);
topic = v_entity(c_iterTakeFirst(topics));
while(topic != NULL){
proceed = cmx_entityNewFromWalk(v_public(topic), &arg->entityArg);
if(proceed == TRUE){
xmlEntity = arg->entityArg.result;
arg->list = c_iterInsert(arg->list, xmlEntity);
arg->length += strlen(xmlEntity);
}
c_free(topic);
topic = v_entity(c_iterTakeFirst(topics));
}
c_iterFree(topics);
}
static void
cmx_participantInitDetach(
v_public entity,
c_voidp args)
{
v_kernel k;
v_serviceManager m;
v_serviceState splicedState;
OS_UNUSED_ARG(args);
k = v_objectKernel(entity);
m = v_getServiceManager(k);
splicedState = v_serviceManagerGetServiceState(m, V_SPLICED_NAME);
v_observableAddObserver(v_observable(splicedState), v_observer(entity), V_EVENTMASK_ALL, NULL);
}
static c_ulong
cmx_participantDetach(
u_observable o,
c_ulong event,
c_voidp usrData)
{
v_serviceStateKind kind;
u_serviceManager manager;
OS_UNUSED_ARG(o);
OS_UNUSED_ARG(event);
OS_UNUSED_ARG(usrData);
if ((event & V_EVENT_SERVICESTATE_CHANGED) == V_EVENT_SERVICESTATE_CHANGED) {
if(cmx_isInitialized() == TRUE){
manager = (u_serviceManager)usrData;
if(manager != NULL){
kind = u_serviceManagerGetServiceStateKind(manager, V_SPLICED_NAME);
if ((kind != STATE_INITIALISING) && (kind != STATE_OPERATIONAL)) {
cmx_internalDetach();
u_objectFree(manager);
manager = NULL;
}
}
}
}
return event;
}
const c_char*
cmx_participantAutoDetach(
const c_char* participant,
c_bool enable)
{
u_participant up;
u_result result;
cmx_entity ce;
ce = cmx_entityClaim(participant);
if (ce == NULL) {
goto errorGetEntity;
}
up = u_participant(ce->uentity);
if (enable == FALSE) {
result = u_observableRemoveListener(u_observable(up), cmx_participantDetach);
if (result != U_RESULT_OK) {
goto errorRemoveListener;
}
} else {
result = u_observableAction(u_observable(up), cmx_participantInitDetach, NULL);
if (result != U_RESULT_OK) {
goto errorEntityAction;
}
result = u_observableAddListener(u_observable(up),
V_EVENT_SERVICESTATE_CHANGED,
cmx_participantDetach,
u_serviceManagerNew(up));
if (result != U_RESULT_OK) {
goto errorInsertListener;
}
}
cmx_entityRelease(ce);
return CMX_RESULT_OK;
errorInsertListener:
errorEntityAction:
errorRemoveListener:
errorGetEntity:
return CMX_RESULT_FAILED;
}
c_char*
cmx_participantDomainId(
const c_char* participant)
{
cmx_entity ce;
u_participant up;
c_char* result;
u_domainId_t did;
int written;
ce = cmx_entityClaim(participant);
if (ce == NULL) {
did = U_DOMAIN_ID_INVALID;
} else {
up = u_participant(ce->uentity);
did = u_participantGetDomainId(up);
cmx_entityRelease(ce);
}
/* worst-case DOMAIN_ID_ANY: 2147483647 */
result = os_malloc(sizeof(char) * 10 + 1);
written = os_sprintf(result, "%d", did);
assert(written > 0 && written <= 11);
OS_UNUSED_ARG(written);
return result;
}
|
rio-31/android_frameworks_base-1 | packages/SystemUI/src/com/android/systemui/bubbles/BubbleIconFactory.java | <reponame>rio-31/android_frameworks_base-1<gh_stars>100-1000
/*
* Copyright (C) 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.systemui.bubbles;
import android.content.Context;
import com.android.launcher3.icons.BaseIconFactory;
import com.android.systemui.R;
/**
* Factory for creating normalized bubble icons.
* We are not using Launcher's IconFactory because bubbles only runs on the UI thread,
* so there is no need to manage a pool across multiple threads.
*/
public class BubbleIconFactory extends BaseIconFactory {
protected BubbleIconFactory(Context context) {
super(context, context.getResources().getConfiguration().densityDpi,
context.getResources().getDimensionPixelSize(R.dimen.individual_bubble_size));
}
public int getBadgeSize() {
return mContext.getResources().getDimensionPixelSize(
com.android.launcher3.icons.R.dimen.profile_badge_size);
}
}
|
hugorebelo/gitlabhq | spec/factories/merge_request_context_commit_diff_file.rb | # frozen_string_literal: true
FactoryBot.define do
factory :merge_request_context_commit_diff_file do
association :merge_request_context_commit
sha { Digest::SHA1.hexdigest(SecureRandom.hex) }
relative_order { 0 }
new_file { true }
renamed_file { false }
deleted_file { false }
too_large { false }
a_mode { 0 }
b_mode { 100644 }
new_path { 'foo' }
old_path { 'foo' }
diff { '' }
binary { false }
end
end
|
RickySauce/NFl-Pickems | client/src/actions/seasons/weeks/matchups/lockMatchups.js | <reponame>RickySauce/NFl-Pickems<filename>client/src/actions/seasons/weeks/matchups/lockMatchups.js
export function lockMatchups(gameTime, weekId){
return (dispatch) => {
dispatch({type: 'LOCK_MATCHUPS', gameTime})
let data = JSON.stringify({game_date_time: gameTime})
fetch(`/api/matchups/update/${weekId}`, {
headers: {
'Accept': 'application/json',
'Content-Type': 'application/json'
},
method: "PATCH",
body: data
})
}
};
|
KieranRoberts44/builtjs-theme-corporate-tailwind | components/templates/lists/list-5/list-5.js | <filename>components/templates/lists/list-5/list-5.js
import Link from "next/link";
import Image from "next/image";
import getConfig from "next/config";
// import { format } from "date-fns";
import { Tag } from "@/elements";
export default function List4({ content, router }) {
const { collections } = content;
const { publicRuntimeConfig } = getConfig();
if (!collections) {
throw new Error("No template collections");
}
let collectionName = Object.keys(collections)[0];
let collection = collections[collectionName];
let items;
if (collection) {
items = collection.items;
}
let tag = router && router.query ? router.query.tag : null;
return (
<section id="list-5" className="template">
<div className="max-w-screen-xl mx-auto">
<div className="grid grid-cols-1 gap-x-6 gap-y-16 lg:grid-cols-3">
{items &&
items.map((item) => {
return (
<div key={item.slug}>
<div>
<Link className="w-24" href={`/${collectionName}/${item.slug}`}>
<a>
<div className="relative mb-6 transition-opacity h-96 lg:h-56 hover:opacity-80">
<Image
className="bg-gray-100 rounded-lg"
src={`${publicRuntimeConfig.API_URL || ""}${item.image.url}`}
layout="fill"
objectFit="cover"
alt=""
/>
</div>
</a>
</Link>
</div>
<div>
{/* TODO: Implement Tag functionality */}
{item.tags && (
<div className="grid grid-flow-col gap-2 mb-4 auto-cols-max">
{item.tags.map((tag) => {
return <Tag key={tag.tag} item={tag}></Tag>;
})}
</div>
)}
<div className="flex items-center mb-2">
<p className="mb-0 text-sm capitalize preheading">
{/* TODO: Get article publish date */}
{/* {format(new Date(item.createdAt), "dd LLLL yyyy")} */}
01 January 2022
</p>
<span className="mx-3 text-gray-400">|</span>
{/* TODO: Implement Category functionality */}
{item.category && (
<Link href={`/`}>
<a className="no-underline hover:underline">
<p className="mb-0 text-sm capitalize">{item.category}</p>
</a>
</Link>
)}
</div>
<Link href={`/${collectionName}/${item.slug}`}>
<a className="no-underline">
<h3 className="mb-2 hover:text-gray-700 dark:hover:text-gray-200">{item.title}</h3>
</a>
</Link>
<p>{item.excerpt}</p>
<Link href={`/${collectionName}/${item.slug}`}>
<a>Read Article</a>
</Link>
</div>
</div>
);
})}
</div>
{!items.length && (
<div>
<p>No posts</p>
</div>
)}
</div>
</section>
);
}
|
ev0x/genesis | src/commands/Rooms/SetDefaultCategory.js | <filename>src/commands/Rooms/SetDefaultCategory.js<gh_stars>0
'use strict';
const Command = require('../../models/Command.js');
class SetDefaultCategory extends Command {
constructor(bot) {
super(bot, 'settings.defaultcategory', 'set rooms category', 'Set whether or not to the bot should default rooms to being unlocked (public).');
this.usages = [
{ description: 'Change the bot\'s default category for temp rooms. Defaults to none.', parameters: ['temp room default'] },
];
this.regex = new RegExp(`^${this.call}\\s?(\\d+)?$`, 'i');
this.requiresAuth = true;
this.allowDM = false;
}
/**
* Run the command
* @param {Message} message Message with a command to handle, reply to,
* or perform an action based on parameters.
* @returns {string} success status
*/
async run(message) {
const category = message.strippedContent.match(this.regex)[1];
if (category && this.bot.client.channels.has(category.trim())) {
await this.settings.setGuildSetting(message.guild, 'tempCategory', category);
this.messageManager.notifySettingsChange(message, true, true);
return this.messageManager.statuses.SUCCESS;
}
await this.settings.deleteGuildSetting(message.guild, 'tempCategory');
this.messageManager.notifySettingsChange(message, true, true);
return this.messageManager.statuses.SUCCESS;
}
}
module.exports = SetDefaultCategory;
|
VanessaMMH/ProgComp2021A | Examen2PC/P2_BrokenKeyboard/BrokenKeyboard.cpp | #include <iostream>
#include <string>
#include <unordered_set>
#include <unordered_map>
using namespace std;
//link: https://www.beecrowd.com.br/judge/en/problems/view/1642
int main()
{
int m;
while (1)
{
cin >> m;
if (!m)
break;
string s;
getline(cin, s);
getline(cin, s);
auto s_ptr = s.begin();
auto e_ptr = s.begin();
int maxSoFar = 0;
int currentLength = 0;
unordered_set<char> unique;
unordered_map<char, int> count;
while (e_ptr != s.end())
{
if(unique.find(*e_ptr) != unique.end())
{
++currentLength;
count[*e_ptr]++;
e_ptr++;
}
else
{
if(unique.size() < m)
{
unique.insert(*e_ptr);
if(count.find(*e_ptr) != count.end())
count[*e_ptr]++;
else
count[*e_ptr] = 1;
currentLength++;
e_ptr++;
}
else
{
while (unique.size() == m)
{
count[*s_ptr]--;
if(count[*s_ptr] == 0)
unique.erase(*s_ptr);
currentLength--;
s_ptr++;
}
}
}
if(currentLength > maxSoFar)
maxSoFar = currentLength;
}
cout<<maxSoFar<<endl;
}
return 0;
} |
veda-p/stew | pkg/utils/yaml_editor.go | <gh_stars>0
package utils
import (
"fmt"
commands "stew/pkg/commands"
)
// type Map map[string]interface{}
var ExtensionMap = map[string]string{"nodejs": ".js", "go": ".go", "packagejson": ".json"}
// func (m Map) M(s string) Map {
// return m[s].(map[string]interface{})
// }
// func (m Map) S(s string) string {
// return m[s].(string)
// }
// func readYml(filename string) (Map, error) {
// buf, err := ioutil.ReadFile(filename)
// if err != nil {
// return nil, err
// }
// var body Map
// err = yaml.Unmarshal(buf, &body)
// if err != nil {
// return nil, fmt.Errorf("in file %q: %v", filename, err)
// }
// return body, nil
// }
// func updateYamlContent(yamlMap Map, prop string, value string) Map {
// var updated map[string]interface{}
// // marshalled, err := yaml.Marshal(value)
// fmt.Println(value)
// yaml.Unmarshal([]byte(value), &updated)
// fmt.Println(updated)
// // if err != nil {
// // log.Fatal(err)
// // }
// yamlMap[prop] = updated
// return yamlMap
// }
// func saveUpdatedYaml(filename string, yamlData Map) {
// d, err := yaml.Marshal(&yamlData)
// if err != nil {
// log.Fatal(err)
// }
// err = ioutil.WriteFile(filename, d, 0644)
// if err != nil {
// log.Fatal(err)
// }
// }
// func UpdateYmlContents(filename string, property string, value string) {
// c, err := readYml(filename)
// if err != nil {
// log.Fatal(err)
// }
// updatedYaml := updateYamlContent(c, property, value)
// saveUpdatedYaml(filename, updatedYaml)
// fmt.Println("saved updated yaml")
// }
func UpdateYmlContents(filename string, property string, value string) {
setString := "." + property + " +=" + value + ""
UpdateYmlFile(setString, filename)
// fmt.Println("Updated:" + filename + " contents")
// options := []string{"-i", setString, filename}
// err := commands.ExecCommand("yq", options, true)
// if err != nil {
// fmt.Println(err)
// } else {
// fmt.Println("saved updated yaml")
// }
}
func UpdateYmlFromRoot(filename string, property string, value string, operator string) {
setString := property + " " + operator + " " + value
UpdateYmlFile(setString, filename)
// var options []string
// options = []string{"-i", setString, filename}
// err := commands.ExecCommand("yq", options, true)
// if err != nil {
// fmt.Println(err)
// } else {
// fmt.Println("saved updated yaml")
// }
}
func UpdateYmlArray(filename string, property string, value string) {
setString := "." + property + " +=" + value
UpdateYmlFile(setString, filename)
// options := []string{"-i", setString, filename}
// err := commands.ExecCommand("yq", options, true)
// if err != nil {
// fmt.Println(err)
// } else {
// fmt.Println("saved updated yaml")
// }
}
func UpdateYmlFile(setString string, filename string) {
options := []string{"-i", setString, filename}
err := commands.ExecCommand("yq", options, true)
if err != nil {
fmt.Println(err)
}
// else {
// fmt.Println("saved updated yaml")
// }
}
|
PayForFish/CheesePro | src/main/java/cn/emitor/chesspro/hero/GuaFu.java | package cn.emitor.chesspro.hero;
import cn.emitor.chesspro.Hero;
import cn.emitor.chesspro.buff.CiKe;
import cn.emitor.chesspro.buff.Devil;
import cn.emitor.chesspro.enums.HeroEnum;
/**
* @author Emitor
* on 2019/8/31.
*/
public class GuaFu extends Hero {
private CiKe ciKe;
private Devil devil;
public GuaFu() {
super();
}
@Override
public void setHeroBuff() {
this.ciKe = new CiKe();
this.devil = new Devil();
this.buffs.add(ciKe);
this.buffs.add(devil);
}
@Override
public void setterHeroEnum() {
this.heroEnum = HeroEnum.GUA_FU;
}
}
|
pupper68k/arcusplatform | platform/arcus-subsystems/src/test/java/com/iris/common/subsystem/climate/ClimateSubsystemTestCase.java | /*
* Copyright 2019 Arcus Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.iris.common.subsystem.climate;
import java.util.HashMap;
import java.util.Map;
import java.util.UUID;
import com.google.common.collect.ImmutableSet;
import com.iris.common.subsystem.SubsystemTestCase;
import com.iris.common.subsystem.event.SubsystemLifecycleEvent;
import com.iris.messages.address.Address;
import com.iris.messages.capability.Capability;
import com.iris.messages.capability.ClimateSubsystemCapability;
import com.iris.messages.capability.PersonCapability;
import com.iris.messages.capability.PlaceCapability;
import com.iris.messages.capability.SubsystemCapability;
import com.iris.messages.event.Listener;
import com.iris.messages.event.ModelEvent;
import com.iris.messages.model.Model;
import com.iris.messages.model.SimpleModel;
import com.iris.messages.model.subs.ClimateSubsystemModel;
import com.iris.messages.model.test.ModelFixtures;
import com.iris.util.IrisCollections;
/**
*
*/
public class ClimateSubsystemTestCase extends SubsystemTestCase<ClimateSubsystemModel> {
private boolean started = false;
protected ClimateSubsystem subsystem = new ClimateSubsystem();
@Override
protected ClimateSubsystemModel createSubsystemModel() {
Map<String, Object> attributes = ModelFixtures.createServiceAttributes(SubsystemCapability.NAMESPACE, ClimateSubsystemCapability.NAMESPACE);
return new ClimateSubsystemModel(new SimpleModel(attributes));
}
// TODO move start / add model / remove model down to SubsystemTestCase
/**
* Calling start will send the subsystem an added and started event.
* Additionally before start is called any addModel / updateModel / removeModel
* calls will simply affect the store, afterwards these events will result
* in ModelEvents sent to the subsystem as well.
*/
protected void start() {
addModel(ModelFixtures.buildServiceAttributes(context.getPlaceId(), PlaceCapability.NAMESPACE).create());
subsystem.onEvent(SubsystemLifecycleEvent.added(context.model().getAddress()), context);
subsystem.onEvent(SubsystemLifecycleEvent.started(context.model().getAddress()), context);
store.addListener(new Listener<ModelEvent>() {
@Override
public void onEvent(ModelEvent event) {
subsystem.onEvent(event, context);
}
});
started = true;
}
protected boolean isStarted() {
return started;
}
protected Model addModel(Map<String, Object> attributes) {
return store.addModel(attributes);
}
protected void updateModel(Address address, Map<String, Object> attributes) {
Map<String, Object> update = new HashMap<>(attributes);
update.put(Capability.ATTR_ADDRESS, address.getRepresentation());
addModel(update);
}
protected void removeModel(String address) {
store.removeModel(Address.fromString(address));
}
protected void removeModel(Address address) {
store.removeModel(address);
}
protected void assertAllEmpty() {
assertControlEmpty();
assertTemperatureEmpty();
assertHumidityEmpty();
assertThermostatsEmpty();
}
protected void assertControlEmpty() {
assertEquals(ImmutableSet.of(), context.model().getControlDevices());
}
protected void assertTemperatureEmpty() {
assertEquals(ImmutableSet.of(), context.model().getTemperatureDevices());
}
protected void assertHumidityEmpty() {
assertEquals(ImmutableSet.of(), context.model().getHumidityDevices());
}
protected void assertThermostatsEmpty() {
assertEquals(ImmutableSet.of(), context.model().getThermostats());
}
protected void assertControlEquals(String... addresses) {
assertEquals(IrisCollections.setOf(addresses), context.model().getControlDevices());
}
protected void assertThermostatsEquals(String... addresses) {
assertEquals(IrisCollections.setOf(addresses), context.model().getThermostats());
}
protected void assertTemperatureEquals(String... addresses) {
assertEquals(IrisCollections.setOf(addresses), context.model().getTemperatureDevices());
}
protected void assertHumidityEquals(String... addresses) {
assertEquals(IrisCollections.setOf(addresses), context.model().getHumidityDevices());
}
}
|
ckoutsiaris/ui5-webcomponents | packages/tools/icons-collection/nps.js | const path = require("path");
const LIB = path.join(__dirname, `../lib/`);
const getScripts = () => {
const scripts = {
clean: "rimraf dist",
copy: {
default: "nps copy.json-imports copy.icon-collections",
"json-imports": 'copy-and-watch "src/**/*.js" dist/',
"icon-collections": 'copy-and-watch "src/icon-collections/**/*.json" dist/assets/icon-collections/'
},
build: {
default: "nps clean copy build.i18n build.icons",
i18n: {
default: "nps build.i18n.defaultsjs build.i18n.json",
defaultsjs: `mkdirp dist/generated/i18n && node ${LIB}/i18n/defaults.js src/i18n dist/generated/i18n`,
json: `mkdirp dist/assets/i18n && node ${LIB}/i18n/toJSON.js src/i18n dist/assets/i18n`,
},
icons: `node ${LIB}/create-icons/index.js`,
}
};
return scripts;
};
module.exports = getScripts;
|
akbence007/flinkcep | flink-streaming-scala/src/test/scala/org/apache/flink/streaming/api/scala/StreamingScalaAPICompletenessTest.scala | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.streaming.api.scala
import java.lang.reflect.Method
import org.apache.flink.api.scala.completeness.ScalaAPICompletenessTestBase
import org.apache.flink.streaming.api.datastream.{DataStream => JavaStream}
import scala.language.existentials
import org.junit.Test
/**
* This checks whether the streaming Scala API is up to feature parity with the Java API.
* Implements the {@link ScalaAPICompletenessTest} for streaming.
*/
class StreamingScalaAPICompletenessTest extends ScalaAPICompletenessTestBase {
override def isExcludedByName(method: Method): Boolean = {
val name = method.getDeclaringClass.getName + "." + method.getName
val excludedNames = Seq(
// These are only used internally. Should be internal API but Java doesn't have
// private[flink].
"org.apache.flink.streaming.api.datastream.DataStream.getType",
"org.apache.flink.streaming.api.datastream.DataStream.copy",
"org.apache.flink.streaming.api.datastream.DataStream.getTransformation",
"org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator.copy",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getFirstInput",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getSecondInput",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getType1",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.getType2",
"org.apache.flink.streaming.api.datastream.ConnectedStreams.addGeneralWindowCombine",
"org.apache.flink.streaming.api.datastream.WindowedDataStream.getType",
"org.apache.flink.streaming.api.datastream.WindowedDataStream.getExecutionConfig",
"org.apache.flink.streaming.api.datastream.WindowedStream.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.WindowedStream.getInputType",
"org.apache.flink.streaming.api.datastream.AllWindowedStream.getExecutionEnvironment",
"org.apache.flink.streaming.api.datastream.AllWindowedStream.getInputType",
"org.apache.flink.streaming.api.datastream.KeyedStream.getKeySelector",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.isChainingEnabled",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment." +
"getStateHandleProvider",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.getCheckpointInterval",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.addOperator",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment.getCheckpointingMode",
"org.apache.flink.streaming.api.environment.StreamExecutionEnvironment." +
"isForceCheckpointing",
// TypeHints are only needed for Java API, Scala API doesn't need them
"org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator.returns",
// Deactivated until Scala API has new windowing API
"org.apache.flink.streaming.api.datastream.DataStream.timeWindowAll",
"org.apache.flink.streaming.api.datastream.DataStream.windowAll"
)
val excludedPatterns = Seq(
// We don't have project on tuples in the Scala API
"""^org\.apache\.flink\.streaming.api.*project""",
// Cleaning is easier in the Scala API
"""^org\.apache\.flink\.streaming.api.*clean""",
// Object methods
"""^.*notify""",
"""^.*wait""",
"""^.*notifyAll""",
"""^.*equals""",
"""^.*toString""",
"""^.*getClass""",
"""^.*hashCode"""
).map(_.r)
lazy val excludedByPattern =
excludedPatterns.map(_.findFirstIn(name)).exists(_.isDefined)
name.contains("$") || excludedNames.contains(name) || excludedByPattern
}
@Test
override def testCompleteness(): Unit = {
checkMethods("DataStream", "DataStream", classOf[JavaStream[_]], classOf[DataStream[_]])
checkMethods(
"StreamExecutionEnvironment", "StreamExecutionEnvironment",
classOf[org.apache.flink.streaming.api.environment.StreamExecutionEnvironment],
classOf[StreamExecutionEnvironment])
checkMethods(
"SingleOutputStreamOperator", "DataStream",
classOf[org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator[_]],
classOf[DataStream[_]])
checkMethods(
"ConnectedStreams", "ConnectedStreams",
classOf[org.apache.flink.streaming.api.datastream.ConnectedStreams[_,_]],
classOf[ConnectedStreams[_,_]])
checkMethods(
"SplitStream", "SplitStream",
classOf[org.apache.flink.streaming.api.datastream.SplitStream[_]],
classOf[SplitStream[_]])
checkMethods(
"WindowedStream", "WindowedStream",
classOf[org.apache.flink.streaming.api.datastream.WindowedStream[_, _, _]],
classOf[WindowedStream[_, _, _]])
checkMethods(
"AllWindowedStream", "AllWindowedStream",
classOf[org.apache.flink.streaming.api.datastream.AllWindowedStream[_, _]],
classOf[AllWindowedStream[_, _]])
checkMethods(
"KeyedStream", "KeyedStream",
classOf[org.apache.flink.streaming.api.datastream.KeyedStream[_, _]],
classOf[KeyedStream[_, _]])
checkMethods(
"JoinedStreams.WithWindow", "JoinedStreams.WithWindow",
classOf[org.apache.flink.streaming.api.datastream.JoinedStreams.WithWindow[_,_,_,_]],
classOf[JoinedStreams[_,_]#Where[_]#EqualTo#WithWindow[_]])
checkMethods(
"CoGroupedStreams.WithWindow", "CoGroupedStreams.WithWindow",
classOf[org.apache.flink.streaming.api.datastream.CoGroupedStreams.WithWindow[_,_,_,_]],
classOf[CoGroupedStreams[_, _]#Where[_]#EqualTo#WithWindow[_]])
}
}
|
wiresong/synthizer | include/synthizer/generators/fast_sine_bank.hpp | <gh_stars>1-10
#pragma once
#include "synthizer.h"
#include "synthizer/fast_sine_bank.hpp"
#include "synthizer/generator.hpp"
#include "synthizer/property_internals.hpp"
#include <memory>
#include <optional>
namespace synthizer {
class Context;
class FastSineBankGenerator : public Generator {
public:
FastSineBankGenerator(const std::shared_ptr<Context> &context, const syz_SineBankConfig *cfg);
int getObjectType() override;
unsigned int getChannels() override;
void generateBlock(float *output, FadeDriver *gain_driver) override;
std::optional<double> startGeneratorLingering() override;
#define PROPERTY_CLASS SineBankGenerator
#define PROPERTY_BASE Generator
#define PROPERTY_LIST SINE_BANK_GENERATOR_PROPERTIES
#include "synthizer/property_impl.hpp"
private:
FastSineBank bank;
};
} // namespace synthizer
|
paulwellnerbou/chronicreplay | src/main/java/de/wellnerbou/chronic/logparser/LogLineParserProvider.java | <filename>src/main/java/de/wellnerbou/chronic/logparser/LogLineParserProvider.java
package de.wellnerbou.chronic.logparser;
import de.wellnerbou.chronic.plugins.ServiceLoaderImplementationProvider;
public class LogLineParserProvider extends ServiceLoaderImplementationProvider<LogLineParser> {
private String parserPattern;
public LogLineParserProvider(String grokPattern) {
this.parserPattern = grokPattern;
}
@Override
public LogLineParser getImplementation(String id) {
final LogLineParser implementation = super.getImplementation(id);
if(implementation instanceof GrokLogFormatLogLineParser) {
((GrokLogFormatLogLineParser) implementation).init(parserPattern, new GrokResultMapper());
}
return implementation;
}
@Override
protected boolean matches(final String id, final LogLineParser implementation) {
return implementation.getId().equalsIgnoreCase(id);
}
@Override
protected Class<LogLineParser> getImplementationClass() {
return LogLineParser.class;
}
}
|
forwardalex/Ytool | log/blame.go | package log
import (
"bufio"
"bytes"
"context"
"fmt"
"io"
"os/exec"
"strconv"
"strings"
"sync"
"time"
)
// BlameLine is a structure for a blame result for a specific user
type BlameLine struct {
AuthorName string
AuthorEmail string
AuthorDate time.Time
CommitName string
CommitEmail string
CommitDate time.Time
}
// Callback is called for each line
type Callback func(line BlameLine) error
var (
authorPrefix = "author"
authorMailPrefix = "author-mail"
authorTimePrefix = "author-time"
committerPrefix = "committer"
committerMailPrefix = "committer-mail"
committerTimePrefix = "committer-time"
)
// buffer pool to reduce GC
var bufferPool = sync.Pool{
// New is called when a new instance is needed
New: func() interface{} {
return bytes.NewBuffer(make([]byte, MaxLineSize))
},
}
// getBuffer fetches a buffer from the pool
func getBuffer() *bytes.Buffer {
return bufferPool.Get().(*bytes.Buffer)
}
// putBuffer returns a buffer to the pool
func putBuffer(buf *bytes.Buffer) {
buf.Reset()
bufferPool.Put(buf)
}
// MaxLineSize is the maximum of one line of output. testing with 1K which seems OK
var MaxLineSize = 1024
//FindCommit 查询代码提交人
func FindCommit(ctx context.Context, fn string, line int, w io.Writer) (current BlameLine, err error) {
cmd := exec.Command("git", "blame", "-e", "--root", "--line-porcelain", fn, fmt.Sprintf("-L %d,%d", line, line))
r, err := cmd.StdoutPipe()
if err != nil {
Error(context.Background(), "ERR ", err.Error())
}
if err := cmd.Start(); err != nil {
fmt.Println(err)
}
defer r.Close()
buf := getBuffer()
defer putBuffer(buf)
lr := bufio.NewReaderSize(r, MaxLineSize)
s := bufio.NewScanner(lr)
s.Buffer(buf.Bytes(), MaxLineSize)
var writer *bufio.Writer
if w != nil {
writer = bufio.NewWriter(w)
defer writer.Flush()
}
for s.Scan() {
// make sure our context isn't done
select {
case <-ctx.Done():
return current, nil
default:
}
buf := s.Text()
if writer != nil {
_, err := writer.WriteString(buf)
if err != nil {
return current, fmt.Errorf("error writing buffer to output. %v", err)
}
_, err = writer.WriteString("\n")
if err != nil {
return current, fmt.Errorf("error writing buffer to output. %v", err)
}
}
infos := strings.Split(buf, " ")
switch infos[0] {
case authorPrefix:
current.AuthorName = infos[1]
case authorMailPrefix:
current.AuthorEmail = infos[1]
case authorTimePrefix:
i, err := strconv.ParseInt(infos[1], 10, 64)
if err != nil {
return current, err
}
current.AuthorDate = time.Unix(i, 0)
case committerPrefix:
current.CommitName = infos[1]
case committerMailPrefix:
current.CommitEmail = infos[1]
case committerTimePrefix:
i, err := strconv.ParseInt(infos[1], 10, 64)
if err != nil {
return current, err
}
current.CommitDate = time.Unix(i, 0)
}
}
err = s.Err()
if err != nil {
if strings.Contains(s.Err().Error(), "file already closed") {
return current, nil
}
return current, err
}
return current, nil
}
|
iainx/grs | Mail Ping/Mail Ping/MyAccountsWindowController.h | <gh_stars>1-10
//
// MyAccountsWindowController.h
// Mail Ping
//
// Created by <NAME> on 2/15/13.
// Copyright (c) 2013 <NAME>. All rights reserved.
//
#import <Cocoa/Cocoa.h>
#import "MyAddAccountWindowController.h"
@interface MyAccountsWindowController : NSWindowController <NSTableViewDataSource, NSTableViewDelegate>
@property MyAddAccountWindowController *myAddAccountWindowController;
@property NSIndexSet* accountSelectionIndexes;
@property (weak) IBOutlet NSTableView* accountsTableView;
@property (weak) IBOutlet NSButton* orderMattersButton;
@end
|
bocke/ucc | test/cases/vla/vm_init.c | // RUN: %ocheck 0 %s
void abort(void) __attribute__((noreturn));
syntax(int n)
{
int ar[n];
int (*p)[n] = &ar;
syntax(p);
}
assert(_Bool b)
{
if(!b)
abort();
}
f(int n)
{
#define NULL (void*)0
short (*p)[n] = NULL;
assert(p == NULL);
short (*q)[n] = (void *)3;
assert(q == (void *)3);
__auto_type a = (int)(p + 1);
__auto_type b = 3 * sizeof(short);
if(a != b)
abort();
}
main()
{
#include "../ocheck-init.c"
f(3);
return 0;
}
|
anhnt4288/SmartRealEstate | frontend/src/components/password/styles/password.js | import styled from "styled-components/macro";
export const Container = styled.div`
padding: 40px;
background-color: var(--bs-white);
box-shadow: var(--primary-box-shadow);
border-radius: 4px;
`;
|
IvanovE/js_learning | blog-app/src/components/create.component.js | import { Component } from '../core/component'
import { Form } from "../core/form"
import { Validators } from "../core/validators"
import { apiService } from "../services/api.service"
import {NotificationComponent} from "./notification.component";
export class CreateComponent extends Component {
constructor(id) {
super(id)
}
init() {
this.$el.addEventListener('submit', submitHandler.bind(this))
this.form = new Form(this.$el, {
title: [Validators.required, Validators.maxLength(50)],
fulltext: [Validators.required, Validators.minLength(5), Validators.maxLength(500)]
})
}
}
async function submitHandler(event) {
event.preventDefault()
if (this.form.isValid()) {
const formData = {
type: this.$el.type.value,
date: new Date().toLocaleDateString(),
favourite: false,
...this.form.value()
}
this.form.clearInputs()
await apiService.createPost(formData)
const notification = new NotificationComponent('notification', {
message: 'Пост создан!'
})
notification.show()
setTimeout(() => {
notification.hide()
}, 2000)
}
}
|
SMartQi/Leetcode | Code/1583-paint-house-iii.cc | <filename>Code/1583-paint-house-iii.cc
class Solution {
public:
int minCost(vector<int>& houses, vector<vector<int>>& cost, int m, int n, int target) {
int origin[100] = {0};
int last = 0, count = 0;
for (int i = m - 1; i >= 0; i--) {
if (houses[i] == 0 || houses[i] == last) {
continue;
}
if (houses[i] != last) {
count++;
last = houses[i];
}
}
if (count > target) {
return -1;
}
int dp[100][101][20]; // index, colorCount, lastColor
for (int i = 0; i < m; i++) {
for (int j = 0; j <= target; j++) {
for (int k = 0; k < n; k++) {
dp[i][j][k] = INT_MAX;
}
}
}
for (int i = 0; i < m; i++) {
if (i == 0) {
if (houses[0] != 0) {
dp[0][1][houses[0] - 1] = 0;
} else {
for (int j = 0; j < n; j++) {
dp[0][1][j] = cost[i][j];
}
}
} else {
for (int j = 0; j <= target; j++) {
for (int k = 0; k < n; k++) {
if (dp[i - 1][j][k] != INT_MAX) {
if (houses[i] != 0) {
if (houses[i] - 1 == k) {
dp[i][j][k] = min(dp[i][j][k], dp[i - 1][j][k]);
} else {
dp[i][j + 1][houses[i] - 1] = min(dp[i][j + 1][houses[i] - 1], dp[i - 1][j][k]);
}
} else {
for (int color = 0; color < n; color++) {
if (color == k) {
dp[i][j][k] = min(dp[i][j][k], dp[i - 1][j][k] + cost[i][color]);
} else {
dp[i][j + 1][color] = min(dp[i][j + 1][color], dp[i - 1][j][k] + cost[i][color]);
}
}
}
}
}
}
}
}
int result = INT_MAX;
for (int i = 0; i < n; i++) {
result = min(result, dp[m - 1][target][i]);
}
if (result == INT_MAX) {
result = -1;
}
return result;
}
}; |
trevor-vaughan/rubygem-simp-cli | ext/gems/highline/examples/password.rb | #!/usr/bin/env ruby
require "rubygems"
require "highline/import"
pass = ask("Enter your password: ") { |q| q.echo = false }
puts "Your password is #{pass}!"
|
PedroHenrique-git/c | champter2/ex9.c | <filename>champter2/ex9.c
#include<stdio.h>
int main ( void ) {
int n, aux, maior1 = 0, maior2 = 0;
for(int i = 0; i < 9; i++) {
printf("Digite um numero: ");
scanf("%d", &n);
aux = n;
if( aux > maior1 ) {
maior2 = maior1;
maior1 = aux;
} else if( aux > maior2 ) {
maior2 = aux;
}
}
printf("Maiores numeros da sequencia: %d e %d", maior1, maior2);
}
|
QuiNovas/apache-pulsar | pulsar-discovery-service/src/test/java/org/apache/pulsar/discovery/service/DiscoveryServiceTest.java | <reponame>QuiNovas/apache-pulsar
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.discovery.service;
import static org.apache.pulsar.discovery.service.web.ZookeeperCacheLoader.LOADBALANCE_BROKERS_ROOT;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotEquals;
import static org.testng.Assert.fail;
import java.lang.reflect.Field;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.security.PrivateKey;
import java.security.cert.X509Certificate;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.apache.bookkeeper.util.ZkUtils;
import org.apache.pulsar.common.api.Commands;
import org.apache.pulsar.common.api.proto.PulsarApi.BaseCommand;
import org.apache.pulsar.common.naming.TopicName;
import org.apache.pulsar.common.partition.PartitionedTopicMetadata;
import org.apache.pulsar.common.util.ObjectMapperFactory;
import org.apache.pulsar.common.util.SecurityUtility;
import org.apache.pulsar.common.util.protobuf.ByteBufCodedInputStream;
import org.apache.pulsar.discovery.service.web.ZookeeperCacheLoader;
import org.apache.pulsar.policies.data.loadbalancer.LoadReport;
import org.apache.pulsar.zookeeper.ZooKeeperChildrenCache;
import org.apache.pulsar.zookeeper.ZookeeperClientFactoryImpl;
import org.apache.zookeeper.CreateMode;
import org.apache.zookeeper.KeeperException.Code;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
import org.apache.zookeeper.ZooDefs;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.ssl.SslContext;
import io.netty.handler.ssl.SslContextBuilder;
import io.netty.handler.ssl.util.InsecureTrustManagerFactory;
public class DiscoveryServiceTest extends BaseDiscoveryTestSetup {
private final static String TLS_CLIENT_CERT_FILE_PATH = "./src/test/resources/certificate/client.crt";
private final static String TLS_CLIENT_KEY_FILE_PATH = "./src/test/resources/certificate/client.key";
@BeforeMethod
private void init() throws Exception {
super.setup();
}
@AfterMethod
private void clean() throws Exception {
super.cleanup();
}
/**
* Verifies: Discovery-service returns broker is round-robin manner
*
* @throws Exception
*/
@Test
public void testBrokerDiscoveryRoundRobin() throws Exception {
addBrokerToZk(5);
String prevUrl = null;
for (int i = 0; i < 10; i++) {
String current = service.getDiscoveryProvider().nextBroker().getPulsarServiceUrl();
assertNotEquals(prevUrl, current);
prevUrl = current;
}
}
@Test
public void testGetPartitionsMetadata() throws Exception {
TopicName topic1 = TopicName.get("persistent://test/local/ns/my-topic-1");
PartitionedTopicMetadata m = service.getDiscoveryProvider().getPartitionedTopicMetadata(service, topic1, "role", null)
.get();
assertEquals(m.partitions, 0);
// Simulate ZK error
mockZookKeeper.failNow(Code.SESSIONEXPIRED);
TopicName topic2 = TopicName.get("persistent://test/local/ns/my-topic-2");
CompletableFuture<PartitionedTopicMetadata> future = service.getDiscoveryProvider()
.getPartitionedTopicMetadata(service, topic2, "role", null);
try {
future.get();
fail("Partition metadata lookup should have failed");
} catch (ExecutionException e) {
assertEquals(e.getCause().getClass(), SessionExpiredException.class);
}
}
/**
* It verifies: client connects to Discovery-service and receives discovery response successfully.
*
* @throws Exception
*/
@Test
public void testClientServerConnection() throws Exception {
addBrokerToZk(2);
final CompletableFuture<BaseCommand> promise = new CompletableFuture<>();
NioEventLoopGroup workerGroup = connectToService(service.getServiceUrl(), promise, false);
assertEquals(promise.get(10, TimeUnit.SECONDS).getType(), BaseCommand.Type.CONNECTED);
workerGroup.shutdownGracefully();
}
@Test
public void testClientServerConnectionTls() throws Exception {
addBrokerToZk(2);
final CompletableFuture<BaseCommand> promise = new CompletableFuture<>();
NioEventLoopGroup workerGroup = connectToService(service.getServiceUrlTls(), promise, true);
assertEquals(promise.get(10, TimeUnit.SECONDS).getType(), BaseCommand.Type.CONNECTED);
workerGroup.shutdownGracefully();
}
/**
* creates ClientHandler channel to connect and communicate with server
*
* @param serviceUrl
* @param latch
* @return
* @throws URISyntaxException
*/
public static NioEventLoopGroup connectToService(String serviceUrl,
CompletableFuture<BaseCommand> promise,
boolean tls)
throws URISyntaxException {
NioEventLoopGroup workerGroup = new NioEventLoopGroup();
Bootstrap b = new Bootstrap();
b.group(workerGroup);
b.channel(NioSocketChannel.class);
b.handler(new ChannelInitializer<SocketChannel>() {
@Override
public void initChannel(SocketChannel ch) throws Exception {
if (tls) {
SslContextBuilder builder = SslContextBuilder.forClient();
builder.trustManager(InsecureTrustManagerFactory.INSTANCE);
X509Certificate[] certificates = SecurityUtility
.loadCertificatesFromPemFile(TLS_CLIENT_CERT_FILE_PATH);
PrivateKey privateKey = SecurityUtility.loadPrivateKeyFromPemFile(TLS_CLIENT_KEY_FILE_PATH);
builder.keyManager(privateKey, (X509Certificate[]) certificates);
SslContext sslCtx = builder.build();
ch.pipeline().addLast("tls", sslCtx.newHandler(ch.alloc()));
}
ch.pipeline().addLast(new ClientHandler(promise));
}
});
URI uri = new URI(serviceUrl);
InetSocketAddress serviceAddress = new InetSocketAddress(uri.getHost(), uri.getPort());
b.connect(serviceAddress).addListener((ChannelFuture future) -> {
if (!future.isSuccess()) {
promise.completeExceptionally(future.cause());
}
});
return workerGroup;
}
static class ClientHandler extends ChannelInboundHandlerAdapter {
final CompletableFuture<BaseCommand> promise;
public ClientHandler(CompletableFuture<BaseCommand> promise) {
this.promise = promise;
}
@Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
try {
ByteBuf buffer = (ByteBuf) msg;
buffer.readUnsignedInt(); // discard frame length
int cmdSize = (int) buffer.readUnsignedInt();
buffer.writerIndex(buffer.readerIndex() + cmdSize);
ByteBufCodedInputStream cmdInputStream = ByteBufCodedInputStream.get(buffer);
BaseCommand.Builder cmdBuilder = BaseCommand.newBuilder();
BaseCommand cmd = cmdBuilder.mergeFrom(cmdInputStream, null).build();
cmdInputStream.recycle();
cmdBuilder.recycle();
buffer.release();
promise.complete(cmd);
} catch (Exception e) {
promise.completeExceptionally(e);
}
ctx.close();
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
promise.completeExceptionally(cause);
ctx.close();
}
@Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
super.channelActive(ctx);
ctx.writeAndFlush(Commands.newConnect("", "", null));
}
}
private void addBrokerToZk(int number) throws Exception {
for (int i = 0; i < number; i++) {
LoadReport report = new LoadReport(null, null, "pulsar://broker-:15000" + i, null);
String reportData = ObjectMapperFactory.getThreadLocal().writeValueAsString(report);
ZkUtils.createFullPathOptimistic(mockZookKeeper, LOADBALANCE_BROKERS_ROOT + "/" + "broker-" + i,
reportData.getBytes(ZookeeperClientFactoryImpl.ENCODING_SCHEME), ZooDefs.Ids.OPEN_ACL_UNSAFE,
CreateMode.PERSISTENT);
}
// sometimes test-environment takes longer time to trigger async mockZK-watch: so reload cache explicitly
Field field = ZookeeperCacheLoader.class.getDeclaredField("availableBrokersCache");
field.setAccessible(true);
ZooKeeperChildrenCache availableBrokersCache = (ZooKeeperChildrenCache) field
.get(service.getDiscoveryProvider().localZkCache);
availableBrokersCache.reloadCache(LOADBALANCE_BROKERS_ROOT);
}
}
|
shahor02/AliRoot | GPU/GPUTracking/Base/GPUOutputControl.h | <reponame>shahor02/AliRoot<gh_stars>0
//**************************************************************************\
//* This file is property of and copyright by the ALICE Project *\
//* ALICE Experiment at CERN, All rights reserved. *\
//* *\
//* Primary Authors: <NAME> <<EMAIL>> *\
//* for The ALICE HLT Project. *\
//* *\
//* Permission to use, copy, modify and distribute this software and its *\
//* documentation strictly for non-commercial purposes is hereby granted *\
//* without fee, provided that the above copyright notice appears in all *\
//* copies and that both the copyright notice and this permission notice *\
//* appear in the supporting documentation. The authors make no claims *\
//* about the suitability of this software for any purpose. It is *\
//* provided "as is" without express or implied warranty. *\
//**************************************************************************
/// \file GPUOutputControl.h
/// \author <NAME>
#ifndef GPUOUTPUTCONTROL_H
#define GPUOUTPUTCONTROL_H
#include "GPUCommonDef.h"
#ifndef GPUCA_GPUCODE
#include <cstddef>
#endif
namespace GPUCA_NAMESPACE
{
namespace gpu
{
struct GPUOutputControl {
enum OutputTypeStruct { AllocateInternal = 0,
UseExternalBuffer = 1,
ControlledExternal = 2 };
#ifndef GPUCA_GPUCODE_DEVICE
GPUOutputControl() = default;
#endif
char* OutputPtr = nullptr; // Pointer to Output Space
volatile size_t Offset = 0; // Offset to write into output pointer
size_t OutputMaxSize = 0; // Max Size of Output Data if Pointer to output space is given
OutputTypeStruct OutputType = AllocateInternal; // How to perform the output
char EndOfSpace = 0; // end of space flag
};
} // namespace gpu
} // namespace GPUCA_NAMESPACE
#endif
|
qicosmos/hana | include/boost/hana/bool.hpp | /*!
@file
Defines the `Logical` and `Comparable` models of `boost::hana::integral_constant`.
@copyright <NAME> 2015
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt)
*/
#ifndef BOOST_HANA_BOOL_HPP
#define BOOST_HANA_BOOL_HPP
#include <boost/hana/fwd/bool.hpp>
#include <boost/hana/concept/integral_constant.hpp>
#include <boost/hana/core/convert.hpp>
#include <boost/hana/core/when.hpp>
#include <boost/hana/detail/operators/arithmetic.hpp>
#include <boost/hana/detail/operators/comparable.hpp>
#include <boost/hana/detail/operators/logical.hpp>
#include <boost/hana/detail/operators/orderable.hpp>
#include <boost/hana/eval.hpp>
#include <boost/hana/fwd/core/tag_of.hpp>
#include <boost/hana/fwd/eval_if.hpp>
#include <boost/hana/fwd/if.hpp>
#include <boost/hana/fwd/value.hpp>
#include <cstddef>
#include <type_traits>
#include <utility>
namespace boost { namespace hana {
//////////////////////////////////////////////////////////////////////////
// integral_constant
//////////////////////////////////////////////////////////////////////////
//! @cond
namespace ic_detail {
template <typename T, T N, typename = std::make_integer_sequence<T, N>>
struct go;
template <typename T, T N, T ...i>
struct go<T, N, std::integer_sequence<T, i...>> {
using swallow = T[];
template <typename F>
static constexpr void with_index(F&& f)
{ (void)swallow{T{}, ((void)f(integral_constant<T, i>{}), i)...}; }
template <typename F>
static constexpr void without_index(F&& f)
{ (void)swallow{T{}, ((void)f(), i)...}; }
};
template <typename T, T v>
template <typename F>
constexpr void with_index_t<T, v>::operator()(F&& f) const
{ go<T, ((void)sizeof(&f), v)>::with_index(static_cast<F&&>(f)); }
template <typename T, T v>
template <typename F>
constexpr void times_t<T, v>::operator()(F&& f) const
{ go<T, ((void)sizeof(&f), v)>::without_index(static_cast<F&&>(f)); }
// avoid link-time error
template <typename T, T v>
constexpr with_index_t<T, v> times_t<T, v>::with_index;
}
// avoid link-time error
template <typename T, T v>
constexpr ic_detail::times_t<T, v> integral_constant<T, v>::times;
template <typename T, T v>
struct tag_of<integral_constant<T, v>> {
using type = integral_constant_tag<T>;
};
//! @endcond
//////////////////////////////////////////////////////////////////////////
// Operators
//////////////////////////////////////////////////////////////////////////
namespace detail {
template <typename T>
struct comparable_operators<integral_constant_tag<T>> {
static constexpr bool value = true;
};
template <typename T>
struct orderable_operators<integral_constant_tag<T>> {
static constexpr bool value = true;
};
template <typename T>
struct arithmetic_operators<integral_constant_tag<T>> {
static constexpr bool value = true;
};
template <typename T>
struct logical_operators<integral_constant_tag<T>> {
static constexpr bool value = true;
};
}
#define BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP(op) \
template <typename U, U u, typename V, V v> \
constexpr integral_constant<decltype(u op v), (u op v)> \
operator op(integral_constant<U, u>, integral_constant<V, v>) \
{ return {}; } \
/**/
#define BOOST_HANA_INTEGRAL_CONSTANT_UNARY_OP(op) \
template <typename U, U u> \
constexpr integral_constant<decltype(op u), (op u)> \
operator op(integral_constant<U, u>) \
{ return {}; } \
/**/
// Arithmetic
BOOST_HANA_INTEGRAL_CONSTANT_UNARY_OP(+)
// Bitwise
BOOST_HANA_INTEGRAL_CONSTANT_UNARY_OP(~)
BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP(&)
BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP(|)
BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP(^)
BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP(<<)
BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP(>>)
#undef BOOST_HANA_INTEGRAL_CONSTANT_UNARY_OP
#undef BOOST_HANA_INTEGRAL_CONSTANT_BINARY_OP
//////////////////////////////////////////////////////////////////////////
// User-defined literal
//////////////////////////////////////////////////////////////////////////
namespace ic_detail {
constexpr int to_int(char c)
{ return static_cast<int>(c) - 48; }
template <std::size_t N>
constexpr long long parse(const char (&arr)[N]) {
long long number = 0, base = 1;
for (std::size_t i = 0; i < N; ++i) {
number += to_int(arr[N - 1 - i]) * base;
base *= 10;
}
return number;
}
}
namespace literals {
template <char ...c>
constexpr auto operator"" _c()
{ return llong_c<ic_detail::parse<sizeof...(c)>({c...})>; }
}
//////////////////////////////////////////////////////////////////////////
// Model of Constant/IntegralConstant
//////////////////////////////////////////////////////////////////////////
template <typename T>
struct IntegralConstant<integral_constant_tag<T>> {
static constexpr bool value = true;
};
template <typename T, typename C>
struct to_impl<integral_constant_tag<T>, C, when<IntegralConstant<C>::value>>
: embedding<is_embedded<typename C::value_type, T>::value>
{
template <typename N>
static constexpr auto apply(N const&)
{ return integral_constant<T, N::value>{}; }
};
//////////////////////////////////////////////////////////////////////////
// Optimizations
//////////////////////////////////////////////////////////////////////////
template <typename T>
struct eval_if_impl<integral_constant_tag<T>> {
template <typename Cond, typename Then, typename Else>
static constexpr decltype(auto)
apply(Cond const&, Then&& t, Else&& e) {
return eval_if_impl::apply(hana::bool_c<static_cast<bool>(Cond::value)>,
static_cast<Then&&>(t), static_cast<Else&&>(e));
}
template <typename Then, typename Else>
static constexpr decltype(auto)
apply(hana::true_ const&, Then&& t, Else&&)
{ return hana::eval(static_cast<Then&&>(t)); }
template <typename Then, typename Else>
static constexpr decltype(auto)
apply(hana::false_ const&, Then&&, Else&& e)
{ return hana::eval(static_cast<Else&&>(e)); }
};
template <typename T>
struct if_impl<integral_constant_tag<T>> {
template <typename Cond, typename Then, typename Else>
static constexpr decltype(auto)
apply(Cond const&, Then&& t, Else&& e) {
return if_impl::apply(hana::bool_c<static_cast<bool>(Cond::value)>,
static_cast<Then&&>(t), static_cast<Else&&>(e));
}
//! @todo We could return `Then` instead of `auto` to sometimes save
//! a copy, but that would break some code that would return a
//! reference to a `type` object. I think the code that would be
//! broken should be changed, but more thought needs to be given.
template <typename Then, typename Else>
static constexpr auto
apply(hana::true_ const&, Then&& t, Else&&)
{ return static_cast<Then&&>(t); }
template <typename Then, typename Else>
static constexpr auto
apply(hana::false_ const&, Then&&, Else&& e)
{ return static_cast<Else&&>(e); }
};
}} // end namespace boost::hana
#endif // !BOOST_HANA_BOOL_HPP
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.