code stringlengths 5 1.03M | repo_name stringlengths 5 90 | path stringlengths 4 158 | license stringclasses 15 values | size int64 5 1.03M | n_ast_errors int64 0 53.9k | ast_max_depth int64 2 4.17k | n_whitespaces int64 0 365k | n_ast_nodes int64 3 317k | n_ast_terminals int64 1 171k | n_ast_nonterminals int64 1 146k | loc int64 -1 37.3k | cycloplexity int64 -1 1.31k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
module Queue where
type Queue a = [a]
enqueue :: a -> Queue a -> Queue a
enqueue item queue = queue ++ [item]
dequeue :: Queue a -> (Maybe a, Queue a)
dequeue [] = (Nothing, empty)
dequeue (x : xs) = (Just x, xs)
empty :: Queue a
empty = []
| mikegehard/grokkingAlgorithms | chapter6/src/Queue.hs | mit | 245 | 0 | 7 | 57 | 129 | 70 | 59 | 9 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_HADDOCK show-extensions #-}
{-|
Module : Yi.Interact
License : GPL-2
Maintainer : yi-devel@googlegroups.com
Stability : experimental
Portability : portable
This is a library of interactive processes combinators, usable to
define extensible keymaps.
(Inspired by the Parsec library, written by Koen Claessen)
The processes are:
* composable: in parallel using '<|>', in sequence using monadic bind.
* extensible: it is always possible to override a behaviour by combination of
'adjustPriority' and '<|>'. (See also '<||' for a convenient combination of the two.)
* monadic: sequencing is done via monadic bind. (leveraging the whole
battery of monadic tools that Haskell provides)
The processes can parse input, and write output that depends on it.
The semantics are quite obvious; only disjunction
deserve a bit more explanation:
in @p = (a '<|>' b)@, what happens if @a@ and @b@ recognize the same
input (prefix), but produce conflicting output?
* if the output is the same (as by the Eq class), then the processes (prefixes) are "merged"
* if a Write is more prioritized than the other, the one with low priority will be discarded
* otherwise, the output will be delayed until one of the branches can be discarded.
* if there is no way to disambiguate, then no output will be generated anymore.
This situation can be detected by using 'possibleActions' however.
-}
module Yi.Interact
(
I, P (Chain,End),
InteractState (..),
MonadInteract (..),
deprioritize,
important,
(<||),
(||>),
option,
oneOf,
processOneEvent,
computeState,
event,
events,
choice,
mkAutomaton, idAutomaton,
runWrite,
anyEvent,
eventBetween,
accepted
) where
import Control.Applicative
import Control.Arrow (first)
import Control.Lens
import Control.Monad.State hiding ( get, mapM )
import Data.Function (on)
import Data.List (groupBy)
import Data.Monoid
import qualified Data.Text as T
------------------------------------------------
-- Classes
-- | Abstraction of monadic interactive processes
class (Eq w, Monad m, Alternative m, Applicative m, MonadPlus m) => MonadInteract m w e | m -> w e where
write :: w -> m ()
-- ^ Outputs a result.
eventBounds :: Ord e => Maybe e -> Maybe e -> m e
-- ^ Consumes and returns the next character.
-- Fails if there is no input left, or outside the given bounds.
adjustPriority :: Int -> m ()
-------------------------------------------------
-- State transformation
-- Needs -fallow-undecidable-instances
-- TODO: abstract over MonadTransformer
instance MonadInteract m w e => MonadInteract (StateT s m) w e where
write = lift . write
eventBounds l h = lift (eventBounds l h)
adjustPriority p = lift (adjustPriority p)
---------------------------------------------------------------------------
-- | Interactive process description
-- TODO: Replace 'Doc:' by ^ when haddock supports GADTs
data I ev w a where
Returns :: a -> I ev w a
Binds :: I ev w a -> (a -> I ev w b) -> I ev w b
Gets :: Ord ev => Maybe ev -> Maybe ev -> I ev w ev
-- Doc: Accept any character between given bounds. Bound is ignored if 'Nothing'.
Fails :: I ev w a
Writes :: w -> I ev w ()
Priority :: Int -> I ev w ()
Plus :: I ev w a -> I ev w a -> I ev w a
instance Functor (I event w) where
fmap f i = pure f <*> i
instance Applicative (I ev w) where
pure = return
a <*> b = do f <- a; x <- b; return (f x)
instance Alternative (I ev w) where
empty = Fails
(<|>) = Plus
instance Monad (I event w) where
return = Returns
fail _ = Fails
(>>=) = Binds
instance Eq w => MonadPlus (I event w) where
mzero = Fails
mplus = Plus
instance Eq w => MonadInteract (I event w) w event where
write = Writes
eventBounds = Gets
adjustPriority = Priority
infixl 3 <||
deprioritize :: (MonadInteract f w e) => f ()
deprioritize = adjustPriority 1
(<||), (||>) :: (MonadInteract f w e) => f a -> f a -> f a
a <|| b = a <|> (deprioritize >> b)
(||>) = flip (<||)
-- | Just like '(<||)' but in prefix form. It 'deprioritize's the
-- second argument.
important :: MonadInteract f w e => f a -> f a -> f a
important a b = a <|| b
-- | Convert a process description to an "executable" process.
mkProcess :: Eq w => I ev w a -> (a -> P ev w) -> P ev w
mkProcess (Returns x) = \fut -> fut x
mkProcess Fails = const Fail
mkProcess (m `Binds` f) = \fut -> mkProcess m (\a -> mkProcess (f a) fut)
mkProcess (Gets l h) = Get l h
mkProcess (Writes w) = \fut -> Write w (fut ())
mkProcess (Priority p) = \fut -> Prior p (fut ())
mkProcess (Plus a b) = \fut -> Best (mkProcess a fut) (mkProcess b fut)
----------------------------------------------------------------------
-- Process type
-- | Operational representation of a process
data P event w
= Ord event => Get (Maybe event) (Maybe event) (event -> P event w)
| Fail
| Write w (P event w)
| Prior Int (P event w) -- low numbers indicate high priority
| Best (P event w) (P event w)
| End
| forall mid. (Show mid, Eq mid) => Chain (P event mid) (P mid w)
accepted :: (Show ev) => Int -> P ev w -> [[T.Text]]
accepted 0 _ = [[]]
accepted d (Get (Just low) (Just high) k) = do
t <- accepted (d - 1) (k low)
let h = if low == high
then showT low
else showT low `T.append` ".." `T.append` showT high
return (h : t)
accepted _ (Get Nothing Nothing _) = [["<any>"]]
accepted _ (Get Nothing (Just e) _) = [[".." `T.append` showT e]]
accepted _ (Get (Just e) Nothing _) = [[showT e `T.append` ".."]]
accepted _ Fail = []
accepted _ (Write _ _) = [[]] -- this should show what action we get...
accepted d (Prior _ p) = accepted d p
accepted d (Best p q) = accepted d p ++ accepted d q
accepted _ End = []
accepted _ (Chain _ _) = error "accepted: chain not supported"
-- Utility function
showT :: Show a => a -> T.Text
showT = T.pack . show
-- ---------------------------------------------------------------------------
-- Operations over P
runWrite :: Eq w => P event w -> [event] -> [w]
runWrite _ [] = []
runWrite p (c:cs) = let (ws, p') = processOneEvent p c in ws ++ runWrite p' cs
processOneEvent :: Eq w => P event w -> event -> ([w], P event w)
processOneEvent p e = pullWrites $ pushEvent p e
-- | Push an event in the automaton
pushEvent :: P ev w -> ev -> P ev w
pushEvent (Best c d) e = Best (pushEvent c e) (pushEvent d e)
pushEvent (Write w c) e = Write w (pushEvent c e)
pushEvent (Prior p c) e = Prior p (pushEvent c e)
pushEvent (Get l h f) e = if test (e >=) l && test (e <=) h then f e else Fail
where test = maybe True
pushEvent Fail _ = Fail
pushEvent End _ = End
pushEvent (Chain p q) e = Chain (pushEvent p e) q
-- | Abstraction of the automaton state.
data InteractState event w = Ambiguous [(Int,w,P event w)] | Waiting | Dead | Running w (P event w)
instance Monoid (InteractState event w) where
-- not used at the moment:
mappend (Running w c) _ = Running w c
mappend _ (Running w c) = Running w c
-- don't die if that can be avoided
mappend Dead p = p
mappend p Dead = p
-- If a branch is not determined, wait for it.
mappend Waiting _ = Waiting
mappend _ Waiting = Waiting
-- ambiguity remains
mappend (Ambiguous a) (Ambiguous b) = Ambiguous (a ++ b)
mempty = Ambiguous []
-- | find all the writes that are accessible.
findWrites :: Int -> P event w -> InteractState event w
findWrites p (Best c d) = findWrites p c `mappend` findWrites p d
findWrites p (Write w c) = Ambiguous [(p,w,c)]
findWrites p (Prior dp c) = findWrites (p+dp) c
findWrites _ Fail = Dead
findWrites _ End = Dead
findWrites _ (Get{}) = Waiting
findWrites p (Chain a b) = case computeState a of
Dead -> Dead
Ambiguous _ -> Dead -- If ambiguity, don't try to do anything clever for now; die.
Running w c -> findWrites p (Chain c (pushEvent b w)) -- pull as much as possible from the left automaton
Waiting -> case findWrites p b of
Ambiguous choices -> Ambiguous [(p',w',Chain a c') | (p',w',c') <- choices]
Running w' c' -> Running w' (Chain a c') -- when it has nothing more, pull from the right.
Dead -> Dead
Waiting -> Waiting
computeState :: Eq w => P event w -> InteractState event w
computeState a = case findWrites 0 a of
Ambiguous actions ->
let prior = minimum $ map (view _1) actions
bests = groupBy ((==) `on` view _2) $
filter ((prior ==) . view _1) actions
in case bests of
[(_,w,c):_] -> Running w c
_ -> Ambiguous $ map head bests
s -> s
pullWrites :: Eq w => P event w -> ([w], P event w)
pullWrites a = case computeState a of
Running w c -> first (w:) (pullWrites c)
_ -> ([], a)
instance (Show w, Show ev) => Show (P ev w) where
show (Get Nothing Nothing _) = "?"
show (Get (Just l) (Just h) _p) | l == h = show l -- ++ " " ++ show (p l)
show (Get l h _) = maybe "" show l ++ ".." ++ maybe "" show h
show (Prior p c) = ":" ++ show p ++ show c
show (Write w c) = "!" ++ show w ++ "->" ++ show c
show (End) = "."
show (Fail) = "*"
show (Best p q) = "{" ++ show p ++ "|" ++ show q ++ "}"
show (Chain a b) = show a ++ ">>>" ++ show b
-- ---------------------------------------------------------------------------
-- Derived operations
oneOf :: (Ord event, MonadInteract m w event) => [event] -> m event
oneOf s = choice $ map event s
anyEvent :: (Ord event, MonadInteract m w event) => m event
anyEvent = eventBounds Nothing Nothing
eventBetween :: (Ord e, MonadInteract m w e) => e -> e -> m e
eventBetween l h = eventBounds (Just l) (Just h)
event :: (Ord event, MonadInteract m w event) => event -> m event
-- ^ Parses and returns the specified character.
event e = eventBetween e e
events :: (Ord event, MonadInteract m w event) => [event] -> m [event]
-- ^ Parses and returns the specified list of events (lazily).
events = mapM event
choice :: (MonadInteract m w e) => [m a] -> m a
-- ^ Combines all parsers in the specified list.
choice [] = fail "No choice succeeds"
choice [p] = p
choice (p:ps) = p `mplus` choice ps
option :: (MonadInteract m w e) => a -> m a -> m a
-- ^ @option x p@ will either parse @p@ or return @x@ without consuming
-- any input.
option x p = p `mplus` return x
mkAutomaton :: Eq w => I ev w a -> P ev w
mkAutomaton i = mkProcess i (const End)
-- An automaton that produces its input
idAutomaton :: (Ord a, Eq a) => P a a
idAutomaton = Get Nothing Nothing $ \e -> Write e idAutomaton
-- It would be much nicer to write:
-- mkAutomaton (forever 0 (anyEvent >>= write))
-- however this creates a memory leak. Unfortunately I don't understand why.
-- To witness:
-- dist/build/yi/yi +RTS -hyI -hd
-- Then type some characters. (Binds grows linearly)
| atsukotakahashi/wi | src/library/Yi/Interact.hs | gpl-2.0 | 11,183 | 0 | 16 | 2,668 | 3,725 | 1,937 | 1,788 | -1 | -1 |
-- -*- mode: haskell -*-
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
module Control.Gruppe.Typ where
import Control.Types
import Autolib.Reader
import Autolib.ToDoc
import Data.Typeable
-- | das sollte exactly das sein, was auch in DB-tabelle steht
data Gruppe =
Gruppe { gnr :: GNr
, vnr :: VNr
, name :: Name
, maxStudents :: Integer
, referent :: Name
}
deriving ( Typeable )
$(derives [makeReader, makeToDoc] [''Gruppe])
| Erdwolf/autotool-bonn | src/Control/Gruppe/Typ.hs | gpl-2.0 | 493 | 2 | 9 | 125 | 101 | 62 | 39 | 14 | 0 |
-----------------------------------------------------------------------------
-- |
-- Module : DSP.Source.Basic
-- Copyright : (c) Matthew Donadio 2003
-- License : GPL
--
-- Maintainer : m.p.donadio@ieee.org
-- Stability : experimental
-- Portability : portable
--
-- Basic signals
--
-----------------------------------------------------------------------------
module DSP.Source.Basic where
-- | all zeros
zeros :: (Num a) => [a]
zeros = repeat 0
-- | single impulse
impulse :: (Num a) => [a]
impulse = 1 : zeros
-- | unit step
step :: (Num a) => [a]
step = repeat 1
-- | ramp
ramp :: (Num a) => [a]
ramp = iterate (1+) 0
| tolysz/dsp | DSP/Source/Basic.hs | gpl-2.0 | 654 | 0 | 6 | 126 | 131 | 83 | 48 | 9 | 1 |
-- Exercício 07: Implemente uma função collatzLen x que retorna o tamanho da lista formada pela aplicação repetida de collatz sobre o valor x até que essa chegue no número 1.
collatz :: Int -> Int
collatz x
|x `mod` 2 == 0 = x `div` 2
|otherwise = (3 * x)+1
collatzSeq:: Int -> [Int]
collatzSeq 1 = [1]
collatzSeq x = x:(collatzSeq next)
where next = collatz x
collatzLen:: Int -> Int
collatzLen x = length (collatzSeq x)
main = do
print (collatzLen 10)
| danielgoncalvesti/BIGDATA2017 | Atividade01/Haskell/Activity1/Exercises3/Ex7.hs | gpl-3.0 | 509 | 0 | 9 | 134 | 155 | 80 | 75 | 12 | 1 |
module STH.Lib.Monad.LinePrinter (
Geom(..), LinePrinter(), runLPJob, lpPrintLns, defaultGeom, lpPrintCCLns
) where
import Data.List (unfoldr)
import STH.Lib.Text.Format.PostScript (unicodeToPS)
import STH.Lib.Text.Format.ASACarriageControl (CCLine(), fromCCLine)
{-----------------}
{- Page Geometry -}
{-----------------}
--PageGeom.S
data Geom = Geom
{ fontSize :: Int
, lineSkip :: Int
, vMargin :: Int
, hMargin :: Int
, pageHeight :: Int
, pageWidth :: Int
} deriving (Show)
-- letter size, 12 pt type
defaultGeom :: Geom
defaultGeom = Geom
{ fontSize = 12
, lineSkip = 2
, vMargin = 28
, hMargin = 32
, pageHeight = 792
, pageWidth = 612
}
--PageGeom.E
--PageGeomComp.S
numLinesPerPage :: Geom -> Int
numLinesPerPage geom = floor ((pH - (2*vM)) / (fS + lS))
where
pH = fromIntegral $ pageHeight geom
vM = fromIntegral $ vMargin geom
fS = fromIntegral $ fontSize geom
lS = fromIntegral $ lineSkip geom
-- lower left corner of line number k
lineStartPos :: Geom -> Int -> (Int,Int)
lineStartPos geom k = (hM, pH - vM - k*(fS + lS))
where
hM = hMargin geom
vM = vMargin geom
pH = pageHeight geom
fS = fontSize geom
lS = lineSkip geom
--PageGeomComp.E
{----------------------}
{- Line Printer State -}
{----------------------}
--LPState.S
data LPState = LPState
{ pageSettings :: Geom
, currentLine :: Int
, currentPage :: Int
, pageInProcess :: Bool
}
makeLPState :: Geom -> LPState
makeLPState geom = LPState
{ pageSettings = geom
, currentLine = 1
, currentPage = 1
, pageInProcess = False
}
--LPState.E
{----------------------}
{- Line Printer Monad -}
{----------------------}
--LinePrinter.S
newtype LinePrinter t = LP
{ runLP :: LPState -> IO (t, LPState) }
runLPJob :: Geom -> LinePrinter t -> IO t
runLPJob geom pr = do
(x,_) <- runLP pr (makeLPState geom)
return x
instance Monad LinePrinter where
return x = LP (\st -> return (x, st))
x >>= f = LP foo
where
foo st1 = do
(y,st2) <- runLP x st1
runLP (f y) st2
--LinePrinter.E
{--------------}
{- Primitives -}
{--------------}
--LPCommand.S
lpInitialize :: LinePrinter ()
lpInitialize = LP init
where
init st = do
putStrLn "%!PS"
putStrLn "/FreeMono findfont"
let k = fontSize $ pageSettings st
putStrLn $ show k ++ " scalefont"
putStrLn "setfont\n"
return ((), st)
lpStartPage :: LinePrinter ()
lpStartPage = LP sp
where
sp st = do
if pageInProcess st == True
then return ()
else putStrLn $ "%%Page: " ++ show (currentPage st)
return ((), st { pageInProcess = True })
lpShutDown :: LinePrinter ()
lpShutDown = LP sd
where
sd st = do
if pageInProcess st == False
then return ()
else putStrLn "showpage"
return ((), st { pageInProcess = False })
--LPCommand.E
--lpPutStr.S
lpPutStr :: String -> LinePrinter ()
lpPutStr "" = return ()
lpPutStr str = lpStartPage >> LP write
where
write st = do
let (x,y) = lineStartPos (pageSettings st) (currentLine st)
putStrLn $ show x ++ " " ++ show y ++ " moveto"
putStr $ unicodeToPS str
return ((), st)
--lpPutStr.E
--lpLineFeed.S
lpLineFeed :: LinePrinter ()
lpLineFeed = lpStartPage >> LP lf
where
lf st = do
let
(kOld,mOld) = (currentLine st, currentPage st)
lpp = numLinesPerPage (pageSettings st)
if kOld + 1 > lpp
then do
putStrLn "showpage\n"
return ((), st {currentLine = 1, currentPage = mOld+1, pageInProcess = False})
else do
return ((), st {currentLine = kOld+1, currentPage = mOld})
lpPutStrLn :: String -> LinePrinter ()
lpPutStrLn str = do
lpPutStr str
lpLineFeed
--lpLineFeed.E
--lpPrintLns.S
lpPrintLns :: [String] -> LinePrinter ()
lpPrintLns lns = do
lpInitialize
mapM_ lpPutStrLn lns
lpShutDown
--lpPrintLns.E
lpPutCCStrLn :: CCLine -> LinePrinter ()
lpPutCCStrLn x = do
mapM_ lpPutStr (fromCCLine x)
lpLineFeed
lpPrintCCLns :: [CCLine] -> LinePrinter ()
lpPrintCCLns lns = do
lpInitialize
mapM_ lpPutCCStrLn lns
lpShutDown
| nbloomf/st-haskell | src/STH/Lib/Monad/LinePrinter.hs | gpl-3.0 | 4,192 | 0 | 16 | 1,057 | 1,362 | 728 | 634 | 120 | 2 |
{-# LANGUAGE FlexibleInstances, MultiParamTypeClasses, UndecidableInstances #-}
module Language.Subleq.Model.Architecture.Fix2Machine (Word
, Fix2Machine16, Fix2SubleqState16
, Fix2Machine32, Fix2SubleqState32
, Fix2Machine64, Fix2SubleqState64
) where
import Language.Subleq.Model.Prim
import Data.Map (Map)
import Data.Word
type Fix2Machine16 = Machine Word16 Word16 (Map Word16 Word)
type Fix2SubleqState16 = (Word16, Map Word16 Word)
type Fix2Machine32 = Machine Word32 Word32 (Map Word32 Word)
type Fix2SubleqState32 = (Word32, Map Word32 Word)
type Fix2Machine64 = Machine Word64 Word64 (Map Word64 Word)
type Fix2SubleqState64 = (Word64, Map Word64 Word)
| Hara-Laboratory/subleq-toolchain | Language/Subleq/Model/Architecture/Fix2Machine.hs | gpl-3.0 | 882 | 0 | 7 | 295 | 169 | 100 | 69 | 14 | 0 |
module InputModels (
InputCode,
InputWord,
InputCombination(..),
ArrowCombination(..),
PunchCombination(..),
KickCombination(..),
FrameInput(..),
arrowsToCode,
punchesToCode,
kicksToCode,
frameInputFromWord,
frameInputToString,
emptyFrameInput,
arrowsFromWord,
punchesFromWord,
kicksFromWord,
plusArrows,
plusPunches,
plusKicks,
minusArrows,
minusPunches,
minusKicks,
)
where
import Data.List
type InputCode = Int
type InputWord = String
class InputCombination a where
plusC :: a -> a -> a --Not commutative, more like append 2nd argument to first
minusC :: a -> a -> a
data ArrowCombination = NO_ARROWS | LEFT | RIGHT | UP | DOWN |
DOWN_LEFT | DOWN_RIGHT | UP_LEFT | UP_RIGHT deriving (Eq)
instance InputCombination ArrowCombination where
NO_ARROWS `plusC` c = c
c `plusC` NO_ARROWS = c
LEFT `plusC` RIGHT = RIGHT
LEFT `plusC` DOWN = DOWN_LEFT
LEFT `plusC` UP = UP_LEFT
LEFT `plusC` c = c
RIGHT `plusC` LEFT = LEFT
RIGHT `plusC` DOWN = DOWN_RIGHT
RIGHT `plusC` UP = UP_RIGHT
RIGHT `plusC` c = c
DOWN `plusC` UP = UP
DOWN `plusC` LEFT = DOWN_LEFT
DOWN `plusC` RIGHT = DOWN_RIGHT
DOWN `plusC` c = c
UP `plusC` DOWN = DOWN
UP `plusC` LEFT = UP_LEFT
UP `plusC` RIGHT = UP_RIGHT
UP `plusC` c = c
DOWN_LEFT `plusC` DOWN = DOWN_LEFT
DOWN_LEFT `plusC` LEFT = DOWN_LEFT
DOWN_LEFT `plusC` c = c
DOWN_RIGHT `plusC` DOWN = DOWN_RIGHT
DOWN_RIGHT `plusC` RIGHT = DOWN_RIGHT
DOWN_RIGHT `plusC` c = c
UP_LEFT `plusC` UP = UP_LEFT
UP_LEFT `plusC` LEFT = UP_LEFT
UP_LEFT `plusC` c = c
UP_RIGHT `plusC` UP = UP_RIGHT
UP_RIGHT `plusC` RIGHT = UP_RIGHT
UP_RIGHT `plusC` c = c
NO_ARROWS `minusC` _ = NO_ARROWS
c `minusC` NO_ARROWS = c
LEFT `minusC` LEFT = NO_ARROWS
LEFT `minusC` _ = LEFT
RIGHT `minusC` RIGHT = NO_ARROWS
RIGHT `minusC` _ = RIGHT
DOWN `minusC` DOWN = NO_ARROWS
DOWN `minusC` _ = DOWN
UP `minusC` UP = NO_ARROWS
UP `minusC` _ = UP
DOWN_LEFT `minusC` DOWN_LEFT = NO_ARROWS
DOWN_LEFT `minusC` DOWN = LEFT
DOWN_LEFT `minusC` LEFT = DOWN
DOWN_LEFT `minusC` c = DOWN_LEFT
UP_LEFT `minusC` UP_LEFT = NO_ARROWS
UP_LEFT `minusC` UP = LEFT
UP_LEFT `minusC` LEFT = UP
UP_LEFT `minusC` c = UP_LEFT
DOWN_RIGHT `minusC` DOWN_RIGHT = NO_ARROWS
DOWN_RIGHT `minusC` DOWN = RIGHT
DOWN_RIGHT `minusC` RIGHT = DOWN
DOWN_RIGHT `minusC` c = DOWN_RIGHT
UP_RIGHT `minusC` UP_RIGHT = NO_ARROWS
UP_RIGHT `minusC` UP = RIGHT
UP_RIGHT `minusC` RIGHT = UP
UP_RIGHT `minusC` c = UP_RIGHT
arrowsToCode :: ArrowCombination -> InputCode
arrowsToCode NO_ARROWS = 0
arrowsToCode DOWN = 1
arrowsToCode RIGHT = 2
arrowsToCode LEFT = 3
arrowsToCode UP = 4
arrowsToCode DOWN_LEFT = 5
arrowsToCode DOWN_RIGHT = 6
arrowsToCode UP_LEFT = 7
arrowsToCode UP_RIGHT = 8
instance Show ArrowCombination where
show NO_ARROWS = "none"
show DOWN = "down"
show LEFT = "left"
show RIGHT = "right"
show UP = "up"
show DOWN_LEFT = "downleft"
show DOWN_RIGHT = "downright"
show UP_RIGHT = "upright"
show UP_LEFT = "upleft"
arrowsFromWord :: InputWord -> ArrowCombination
arrowsFromWord word
| isDown && isLeft = DOWN_LEFT
| isDown && isRight = DOWN_RIGHT
| isUp && isLeft = UP_LEFT
| isUp && isRight = UP_RIGHT
| isDown = DOWN
| isLeft = LEFT
| isRight = RIGHT
| isUp = UP
| otherwise = NO_ARROWS
where isLeft = show LEFT `isInfixOf` word
isDown = show DOWN `isInfixOf` word
isRight = show RIGHT `isInfixOf` word
isUp = show UP `isInfixOf` word
data PunchCombination = NO_PUNCHES | LP | MP | HP | LPMP | LPHP | MPHP | LPMPHP
deriving (Eq)
instance InputCombination PunchCombination where
NO_PUNCHES `plusC` c = c
c `plusC` NO_PUNCHES = c
LP `plusC` MP = LPMP
LP `plusC` HP = LPHP
LP `plusC` MPHP = LPMPHP
LP `plusC` c = c
MP `plusC` LP = LPMP
MP `plusC` HP = MPHP
MP `plusC` LPHP = LPMPHP
MP `plusC` c = c
HP `plusC` LP = LPHP
HP `plusC` MP = MPHP
HP `plusC` LPMP = LPMPHP
HP `plusC` c = c
LPMPHP `plusC` _ = LPMPHP
NO_PUNCHES `minusC` _ = NO_PUNCHES
c `minusC` NO_PUNCHES = c
LP `minusC` LP = NO_PUNCHES
LP `minusC` _ = LP
MP `minusC` MP = NO_PUNCHES
MP `minusC` _ = MP
HP `minusC` HP = NO_PUNCHES
HP `minusC` _ = HP
LPMP `minusC` LPMP = NO_PUNCHES
LPMP `minusC` LP = MP
LPMP `minusC` MP = LP
LPMP `minusC` _ = LPMP
MPHP `minusC` MPHP = NO_PUNCHES
MPHP `minusC` MP = HP
MPHP `minusC` HP = LP
MPHP `minusC` _ = MPHP
LPHP `minusC` LPHP = NO_PUNCHES
LPHP `minusC` LP = HP
LPHP `minusC` HP = LP
LPHP `minusC` _ = LPHP
LPMPHP `minusC` LPMPHP = NO_PUNCHES
LPMPHP `minusC` LPMP = HP
LPMPHP `minusC` LPHP = MP
LPMPHP `minusC` MPHP = LP
LPMPHP `minusC` LP = MPHP
LPMPHP `minusC` MP = LPHP
LPMPHP `minusC` HP = LPMP
punchesToCode :: PunchCombination -> InputCode
punchesToCode NO_PUNCHES = 9
punchesToCode LP = 10
punchesToCode MP = 11
punchesToCode HP = 12
punchesToCode LPMP = 13
punchesToCode MPHP = 14
punchesToCode LPHP = 15
punchesToCode LPMPHP = 16
instance Show PunchCombination where
show NO_PUNCHES = "none"
show LP = "lp"
show MP = "mp"
show HP = "hp"
show LPMP = "lpmp"
show MPHP = "mphp"
show LPHP = "lphp"
show LPMPHP = "lpmphp"
punchesFromWord :: InputWord -> PunchCombination
punchesFromWord word
| isLP && isMP && isHP = LPMPHP
| isLP && isMP = LPMP
| isLP && isHP = LPHP
| isMP && isHP = MPHP
| isLP = LP
| isMP = MP
| isHP = HP
| otherwise = NO_PUNCHES
where isLP = show LP `isInfixOf` word
isMP = show MP `isInfixOf` word
isHP = show HP `isInfixOf` word
data KickCombination = NO_KICKS | LK | MK | HK | LKMK | LKHK | MKHK | LKMKHK
deriving (Eq)
instance InputCombination KickCombination where
NO_KICKS `plusC` c = c
c `plusC` NO_KICKS = c
LK `plusC` MK = LKMK
LK `plusC` HK = LKHK
LK `plusC` MKHK = LKMKHK
LK `plusC` c = c
MK `plusC` LK = LKMK
MK `plusC` HK = MKHK
MK `plusC` LKHK = LKMKHK
MK `plusC` c = c
HK `plusC` LK = LKHK
HK `plusC` MK = MKHK
HK `plusC` LKMK = LKMKHK
HK `plusC` c = c
LKMKHK `plusC` _ = LKMKHK
NO_KICKS `minusC` _ = NO_KICKS
c `minusC` NO_KICKS = c
LK `minusC` LK = NO_KICKS
LK `minusC` _ = LK
MK `minusC` MK = NO_KICKS
MK `minusC` _ = MK
HK `minusC` HK = NO_KICKS
HK `minusC` _ = HK
LKMK `minusC` LKMK = NO_KICKS
LKMK `minusC` LK = MK
LKMK `minusC` MK = LK
LKMK `minusC` _ = LKMK
MKHK `minusC` MKHK = NO_KICKS
MKHK `minusC` MK = HK
MKHK `minusC` HK = LK
MKHK `minusC` _ = MKHK
LKHK `minusC` LKHK = NO_KICKS
LKHK `minusC` LK = HK
LKHK `minusC` HK = LK
LKHK `minusC` _ = LKHK
LKMKHK `minusC` LKMKHK = NO_KICKS
LKMKHK `minusC` LKMK = HK
LKMKHK `minusC` LKHK = MK
LKMKHK `minusC` MKHK = LK
LKMKHK `minusC` LK = MKHK
LKMKHK `minusC` MK = LKHK
LKMKHK `minusC` HK = LKMK
kicksToCode :: KickCombination -> InputCode
kicksToCode NO_KICKS = 17
kicksToCode LK = 18
kicksToCode MK = 19
kicksToCode HK = 20
kicksToCode LKMK = 21
kicksToCode MKHK = 22
kicksToCode LKHK = 23
kicksToCode LKMKHK = 24
instance Show KickCombination where
show NO_KICKS = "none"
show LK = "lk"
show MK = "mk"
show HK = "hk"
show LKMK = "lkmk"
show MKHK = "mkhk"
show LKHK = "lkhk"
show LKMKHK = "lkmkhk"
kicksFromWord :: InputWord -> KickCombination
kicksFromWord word
| isLK && isMK && isHK = LKMKHK
| isLK && isMK = LKMK
| isLK && isHK = LKHK
| isMK && isHK = MKHK
| isLK = LK
| isMK = MK
| isHK = HK
| otherwise = NO_KICKS
where isLK = show LK `isInfixOf` word
isMK = show MK `isInfixOf` word
isHK = show HK `isInfixOf` word
data FrameInput = FrameInput {
arrows :: ArrowCombination,
punches :: PunchCombination,
kicks :: KickCombination
} deriving (Eq, Show)
instance InputCombination FrameInput where
left `plusC` right = FrameInput (arrows left `plusC` arrows right)
(punches left `plusC` punches right) (kicks left `plusC` kicks right)
left `minusC` right = FrameInput (arrows left `minusC` arrows right)
(punches left `minusC` punches right) (kicks left `minusC` kicks right)
frameInputFromWord:: InputWord -> FrameInput
frameInputFromWord word = FrameInput (arrowsFromWord word) (punchesFromWord word)
(kicksFromWord word)
frameInputToString :: FrameInput -> String
frameInputToString input = mconcat [show x , separator, show y , separator, show z]
where x = arrowsToCode (arrows input)
y = punchesToCode (punches input)
z = kicksToCode (kicks input)
separator = ":"
emptyFrameInput = FrameInput NO_ARROWS NO_PUNCHES NO_KICKS
plusArrows :: FrameInput -> ArrowCombination -> FrameInput
input `plusArrows` newArrows = FrameInput (arrows input `plusC` newArrows)
(punches input) (kicks input)
plusPunches :: FrameInput -> PunchCombination -> FrameInput
input `plusPunches` newPunches = FrameInput (arrows input)
(punches input `plusC` newPunches) (kicks input)
plusKicks :: FrameInput -> KickCombination -> FrameInput
input `plusKicks` newKicks = FrameInput (arrows input)
(punches input) (kicks input `plusC` newKicks)
minusArrows :: FrameInput -> ArrowCombination -> FrameInput
input `minusArrows` newArrows = FrameInput (arrows input `minusC` newArrows)
(punches input) (kicks input)
minusPunches :: FrameInput -> PunchCombination -> FrameInput
input `minusPunches` newPunches = FrameInput (arrows input)
(punches input `minusC` newPunches) (kicks input)
minusKicks :: FrameInput -> KickCombination -> FrameInput
input `minusKicks` newKicks = FrameInput (arrows input)
(punches input) (kicks input `minusC` newKicks)
| PinkFLoyd92/PiAssist | input-script-parser/src/InputModels.hs | gpl-3.0 | 10,483 | 0 | 9 | 3,034 | 3,683 | 1,963 | 1,720 | 313 | 1 |
module Subspace.DHT.Network
( SockHandle
, openPort
, closePort
, recvPacket
, sendPacket ) where
import Network.Socket hiding (recvFrom)
import Network.Socket.ByteString (recvFrom, sendAllTo)
import Data.ByteString as BS
import Data.ByteString.Lazy as LBS
data SockHandle = SockHandle { pSocket :: Socket
, pNumber :: PortNumber }
instance Show SockHandle where
show handle = "(SockHandle " ++ show (pNumber handle) ++ ")"
-- | open a socket on the specified port
openPort :: PortNumber -> IO SockHandle
openPort port = do
sock <- socket AF_INET Datagram 17
setSocketOption sock Broadcast 1
bindSocket sock (SockAddrInet port iNADDR_ANY)
return $ SockHandle sock port
-- | close a socket
closePort :: SockHandle -> IO ()
closePort handle = sClose $ pSocket handle
-- | receive a packet with a specified maximum length from a socket
recvPacket :: SockHandle -> Int -> IO (LBS.ByteString, SockAddr)
recvPacket handle len = do
(packet, addr) <- recvFrom (pSocket handle) len
return (strictToLazyBS packet, addr)
-- | send a package to to an address via a socket
sendPacket :: SockHandle -> LBS.ByteString -> SockAddr -> IO ()
sendPacket handle str = sendAllTo (pSocket handle) (lazyToStrictBS str)
-- | convert a lazy ByteString to a strict ByteString
lazyToStrictBS :: LBS.ByteString -> BS.ByteString
lazyToStrictBS x = BS.concat $ LBS.toChunks x
-- | convert a strict ByteString to a lazy ByteString
strictToLazyBS :: BS.ByteString -> LBS.ByteString
strictToLazyBS x = LBS.fromChunks [x]
| 7h0ma5/subspace | src/Subspace/DHT/Network.hs | gpl-3.0 | 1,575 | 0 | 10 | 313 | 410 | 217 | 193 | 32 | 1 |
-- Copyright 2017 Allan Psiocbyte psicobyte@gmail.com
--
-- This program is free software: you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation, either version 3 of the License, or
-- (at your option) any later version.
--
-- This program is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
-- You should have received a copy of the GNU General Public License
-- along with this program. If not, see <http://www.gnu.org/licenses/>
import System.Environment
import Data.Char
import Data.List
haycadena :: [String] -> String
haycadena [] = ayuda
haycadena (x:xs) = modificadores (x:xs)
empiezaguion :: String -> Bool
empiezaguion (x:xs) =
if x == '-'
then True
else False
modificadores :: [String] -> String
modificadores (x:xs) =
if empiezaguion x
then procesa (x:xs)
else map toUpper $ intercalate " " (x:xs)
procesa :: [String] -> String
procesa (x:xs)
| x == "-u" = map toUpper $ intercalate " " (xs)
| x == "-l" = map toLower $ intercalate " " (xs)
| x == "-c" = tail $ scanl (capitaliza) ' ' $ intercalate " " (xs)
| x == "-h" = ayuda
| otherwise = "Invalid option \"" ++ x ++ "\". Try -u, -l, -c or -h"
capitaliza :: Char -> Char -> Char
capitaliza ' ' x = toUpper x
capitaliza '\n' x = toUpper x
capitaliza _ x = toLower x
ayuda = "Change case of a string.\n\
\\n\
\use: case [-u|-l|-c|-h] string\n\
\-u: Uppercase.\n\
\-l: Lowercase.\n\
\-c: Capitalize.\n\
\-h: This message."
main = do
args <- getArgs
if length args > 1
then do
putStrLn $ procesa args
else if length args > 0
then do
if empiezaguion $ head args
then do
entrada <- getContents
putStrLn $ procesa $ (head args):entrada:[]
else
putStrLn $ procesa $ "-u":args
else do
entrada <- getContents
putStrLn $ procesa $ "-u":entrada:[]
| psicobyte/ccase | ccase.hs | gpl-3.0 | 2,219 | 0 | 18 | 599 | 531 | 273 | 258 | 43 | 4 |
module ConvexHull where
import Data.List
import Data.Ord
import Text.Printf
type Point a = (a,a)
-- | Titty operator
-- Not used in this code any more, I still like it though
(.:) = (.) . (.)
-- | Tuples == numbers, Nice!
instance (Num a, Num b) => Num (a,b) where
(+) (x1,y1) (x2,y2) = (x1 + x2, y1+y2)
(*) (x1,y1) (x2,y2) = (x1 * x2, y1*y2)
abs (a,b) = (abs a, abs b)
negate (a,b) = (negate a, negate b)
signum (a,b) = (signum a, signum b)
fromInteger = undefined
-- | Standard distance between two points
distance :: Floating a => Point a -> Point a -> a
distance (x1, y1) (x2, y2) = sqrt $ a**2 + b**2
where a = x2 - x1
b = y2 - y1
toPoint :: [a] -> (a,a)
toPoint (x:y:xs) = (x,y)
-- * Imported from IO
readMultiNums :: (Num a, Read a) => IO [[a]]
readMultiNums = getContents >>= return . map (map read . words) . lines
-- | "Multiplicity"
-- - Lars 2014
cross :: Num a => Point a -> Point a -> a
cross (x1, y1) (x2, y2) = x1 * y2 - x2 * y1
-- | Gets a cross product between two points and an origin point,
-- if the answer is: > 0 then its a counter-clockwise turn
-- == 0 then the three points are colinear
-- < 0 then its a clockwise turn
--
-- This algorithm keeps turning clockwise until it has hit the starting value
turn :: Num a => Point a -> Point a -> Point a -> a
turn origin a b = (a - origin) `cross` (b - origin)
-- | Check if a turn is clockwise
clockwise :: (Num a, Ord a) => Point a -> Point a -> Point a -> Bool
clockwise origin a b = turn origin a b <= 0
-- | The PFM algorith (Pure fucking magic)
-- Will keep checking points until it finds the most right turn, then moves
-- on to the next point. stops when list is empty.
chain :: (Num a, Ord a) => [Point a] -> [Point a] -> [Point a]
chain (a:b:path) (x:xs) = if clockwise b a x
-- Clockwise, remove most recent turn.
then chain (b:path) (x:xs)
-- Counter-clockwise, append to chain.
else chain (x:a:b:path) (xs)
chain path (x:xs) = chain (x:path) xs
chain path [] = tail path
-- | More fucking magic
convexHull :: (Ord a, Num a) => [Point a] -> [Point a]
convexHull [] = []
convexHull [p] = [p]
convexHull points = lower ++ upper
where
sorted = sort points
lower = chain [] sorted
upper = chain [] (reverse sorted)
-- | Perimeter of a list of points.
perimeter :: Floating a => [(a, a)] -> a
perimeter (x:xs) = x `distance` (last xs) +
(sum . map (\(a,b) -> distance a b) $ pairs (x:xs))
-- | Groups a list into pairs for easy distancing.
pairs :: [a] -> [(a,a)]
pairs [] = []
pairs ([x,y]) = [(x,y)]
pairs (x:y:xs) = (x,y) : pairs (y:xs) | Jiggins/Travelling-Sales-Person | ConvexHull.hs | gpl-3.0 | 2,650 | 0 | 12 | 654 | 1,118 | 616 | 502 | 47 | 2 |
{-# LANGUAGE RankNTypes, StandaloneDeriving #-}
module Nirum.TypeInstance.BoundModule
( BoundModule (boundPackage, modulePath)
, TypeLookup (..)
, boundTypes
, findInBoundModule
, lookupType
, resolveBoundModule
) where
import Nirum.Constructs.Declaration
import Nirum.Constructs.DeclarationSet as DS
import Nirum.Constructs.Identifier
import Nirum.Constructs.Module
import Nirum.Constructs.ModulePath
import Nirum.Constructs.TypeDeclaration as TypeDeclaration hiding (modulePath)
import Nirum.Package
import Nirum.Package.Metadata
import qualified Nirum.Package.ModuleSet as ModuleSet
data BoundModule t = BoundModule
{ boundPackage :: Target t => Package t
, modulePath :: ModulePath
}
deriving instance (Eq t, Target t) => Eq (BoundModule t)
deriving instance (Ord t, Target t) => Ord (BoundModule t)
deriving instance (Show t, Target t) => Show (BoundModule t)
resolveBoundModule :: ModulePath -> Package t -> Maybe (BoundModule t)
resolveBoundModule path' package =
case resolveModule path' package of
Just _ -> Just $ BoundModule package path'
Nothing -> Nothing
findInBoundModule :: Target t => (Module -> a) -> a -> BoundModule t -> a
findInBoundModule valueWhenExist valueWhenNotExist
BoundModule { boundPackage = Package { modules = ms }
, modulePath = path'
} =
case ModuleSet.lookup path' ms of
Nothing -> valueWhenNotExist
Just mod' -> valueWhenExist mod'
boundTypes :: Target t => BoundModule t -> DeclarationSet TypeDeclaration
boundTypes = findInBoundModule types DS.empty
data TypeLookup = Missing
| Local Type
| Imported ModulePath Identifier Type
deriving (Eq, Ord, Show)
lookupType :: Target t => Identifier -> BoundModule t -> TypeLookup
lookupType identifier boundModule =
case DS.lookup identifier (boundTypes boundModule) of
Nothing ->
toType
coreModulePath
identifier
(DS.lookup identifier $ types coreModule)
Just TypeDeclaration { type' = t } -> Local t
Just (Import path' _ s _) ->
case resolveModule path' (boundPackage boundModule) of
Nothing -> Missing
Just (Module decls _) ->
toType path' s (DS.lookup s decls)
Just ServiceDeclaration {} -> Missing
where
toType :: ModulePath -> Identifier -> Maybe TypeDeclaration -> TypeLookup
toType mp i (Just TypeDeclaration { type' = t }) = Imported mp i t
toType _ _ _ = Missing
instance Target t => Documented (BoundModule t) where
docs = findInBoundModule Nirum.Constructs.Module.docs Nothing
| spoqa/nirum | src/Nirum/TypeInstance/BoundModule.hs | gpl-3.0 | 2,761 | 0 | 14 | 718 | 751 | 395 | 356 | 65 | 6 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Dataflow.Projects.CatalogTemplates.Commit
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Creates a new TemplateVersion (Important: not new Template) entry in the
-- spanner table. Requires project_id and display_name (template).
--
-- /See:/ <https://cloud.google.com/dataflow Dataflow API Reference> for @dataflow.projects.catalogTemplates.commit@.
module Network.Google.Resource.Dataflow.Projects.CatalogTemplates.Commit
(
-- * REST Resource
ProjectsCatalogTemplatesCommitResource
-- * Creating a Request
, projectsCatalogTemplatesCommit
, ProjectsCatalogTemplatesCommit
-- * Request Lenses
, pctcXgafv
, pctcUploadProtocol
, pctcAccessToken
, pctcUploadType
, pctcPayload
, pctcName
, pctcCallback
) where
import Network.Google.Dataflow.Types
import Network.Google.Prelude
-- | A resource alias for @dataflow.projects.catalogTemplates.commit@ method which the
-- 'ProjectsCatalogTemplatesCommit' request conforms to.
type ProjectsCatalogTemplatesCommitResource =
"v1b3" :>
CaptureMode "name" "commit" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :>
ReqBody '[JSON] CommitTemplateVersionRequest :>
Post '[JSON] TemplateVersion
-- | Creates a new TemplateVersion (Important: not new Template) entry in the
-- spanner table. Requires project_id and display_name (template).
--
-- /See:/ 'projectsCatalogTemplatesCommit' smart constructor.
data ProjectsCatalogTemplatesCommit =
ProjectsCatalogTemplatesCommit'
{ _pctcXgafv :: !(Maybe Xgafv)
, _pctcUploadProtocol :: !(Maybe Text)
, _pctcAccessToken :: !(Maybe Text)
, _pctcUploadType :: !(Maybe Text)
, _pctcPayload :: !CommitTemplateVersionRequest
, _pctcName :: !Text
, _pctcCallback :: !(Maybe Text)
}
deriving (Eq, Show, Data, Typeable, Generic)
-- | Creates a value of 'ProjectsCatalogTemplatesCommit' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pctcXgafv'
--
-- * 'pctcUploadProtocol'
--
-- * 'pctcAccessToken'
--
-- * 'pctcUploadType'
--
-- * 'pctcPayload'
--
-- * 'pctcName'
--
-- * 'pctcCallback'
projectsCatalogTemplatesCommit
:: CommitTemplateVersionRequest -- ^ 'pctcPayload'
-> Text -- ^ 'pctcName'
-> ProjectsCatalogTemplatesCommit
projectsCatalogTemplatesCommit pPctcPayload_ pPctcName_ =
ProjectsCatalogTemplatesCommit'
{ _pctcXgafv = Nothing
, _pctcUploadProtocol = Nothing
, _pctcAccessToken = Nothing
, _pctcUploadType = Nothing
, _pctcPayload = pPctcPayload_
, _pctcName = pPctcName_
, _pctcCallback = Nothing
}
-- | V1 error format.
pctcXgafv :: Lens' ProjectsCatalogTemplatesCommit (Maybe Xgafv)
pctcXgafv
= lens _pctcXgafv (\ s a -> s{_pctcXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pctcUploadProtocol :: Lens' ProjectsCatalogTemplatesCommit (Maybe Text)
pctcUploadProtocol
= lens _pctcUploadProtocol
(\ s a -> s{_pctcUploadProtocol = a})
-- | OAuth access token.
pctcAccessToken :: Lens' ProjectsCatalogTemplatesCommit (Maybe Text)
pctcAccessToken
= lens _pctcAccessToken
(\ s a -> s{_pctcAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pctcUploadType :: Lens' ProjectsCatalogTemplatesCommit (Maybe Text)
pctcUploadType
= lens _pctcUploadType
(\ s a -> s{_pctcUploadType = a})
-- | Multipart request metadata.
pctcPayload :: Lens' ProjectsCatalogTemplatesCommit CommitTemplateVersionRequest
pctcPayload
= lens _pctcPayload (\ s a -> s{_pctcPayload = a})
-- | The location of the template, name includes project_id and display_name.
-- Commit using project_id(pid1) and display_name(tid1). Format:
-- projects\/{pid1}\/catalogTemplates\/{tid1}
pctcName :: Lens' ProjectsCatalogTemplatesCommit Text
pctcName = lens _pctcName (\ s a -> s{_pctcName = a})
-- | JSONP
pctcCallback :: Lens' ProjectsCatalogTemplatesCommit (Maybe Text)
pctcCallback
= lens _pctcCallback (\ s a -> s{_pctcCallback = a})
instance GoogleRequest ProjectsCatalogTemplatesCommit
where
type Rs ProjectsCatalogTemplatesCommit =
TemplateVersion
type Scopes ProjectsCatalogTemplatesCommit =
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/userinfo.email"]
requestClient ProjectsCatalogTemplatesCommit'{..}
= go _pctcName _pctcXgafv _pctcUploadProtocol
_pctcAccessToken
_pctcUploadType
_pctcCallback
(Just AltJSON)
_pctcPayload
dataflowService
where go
= buildClient
(Proxy ::
Proxy ProjectsCatalogTemplatesCommitResource)
mempty
| brendanhay/gogol | gogol-dataflow/gen/Network/Google/Resource/Dataflow/Projects/CatalogTemplates/Commit.hs | mpl-2.0 | 5,813 | 0 | 16 | 1,251 | 784 | 459 | 325 | 116 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Monitoring.Projects.Categories.MetricAssociations.Delete
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Delete a MetricAssociation.
--
-- /See:/ <https://cloud.google.com/monitoring/api/ Stackdriver Monitoring API Reference> for @monitoring.projects.categories.metricAssociations.delete@.
module Network.Google.Resource.Monitoring.Projects.Categories.MetricAssociations.Delete
(
-- * REST Resource
ProjectsCategoriesMetricAssociationsDeleteResource
-- * Creating a Request
, projectsCategoriesMetricAssociationsDelete
, ProjectsCategoriesMetricAssociationsDelete
-- * Request Lenses
, pcmadXgafv
, pcmadUploadProtocol
, pcmadPp
, pcmadAccessToken
, pcmadUploadType
, pcmadBearerToken
, pcmadName
, pcmadCallback
) where
import Network.Google.Monitoring.Types
import Network.Google.Prelude
-- | A resource alias for @monitoring.projects.categories.metricAssociations.delete@ method which the
-- 'ProjectsCategoriesMetricAssociationsDelete' request conforms to.
type ProjectsCategoriesMetricAssociationsDeleteResource
=
"v3" :>
Capture "name" Text :>
QueryParam "$.xgafv" Xgafv :>
QueryParam "upload_protocol" Text :>
QueryParam "pp" Bool :>
QueryParam "access_token" Text :>
QueryParam "uploadType" Text :>
QueryParam "bearer_token" Text :>
QueryParam "callback" Text :>
QueryParam "alt" AltJSON :> Delete '[JSON] Empty
-- | Delete a MetricAssociation.
--
-- /See:/ 'projectsCategoriesMetricAssociationsDelete' smart constructor.
data ProjectsCategoriesMetricAssociationsDelete = ProjectsCategoriesMetricAssociationsDelete'
{ _pcmadXgafv :: !(Maybe Xgafv)
, _pcmadUploadProtocol :: !(Maybe Text)
, _pcmadPp :: !Bool
, _pcmadAccessToken :: !(Maybe Text)
, _pcmadUploadType :: !(Maybe Text)
, _pcmadBearerToken :: !(Maybe Text)
, _pcmadName :: !Text
, _pcmadCallback :: !(Maybe Text)
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'ProjectsCategoriesMetricAssociationsDelete' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'pcmadXgafv'
--
-- * 'pcmadUploadProtocol'
--
-- * 'pcmadPp'
--
-- * 'pcmadAccessToken'
--
-- * 'pcmadUploadType'
--
-- * 'pcmadBearerToken'
--
-- * 'pcmadName'
--
-- * 'pcmadCallback'
projectsCategoriesMetricAssociationsDelete
:: Text -- ^ 'pcmadName'
-> ProjectsCategoriesMetricAssociationsDelete
projectsCategoriesMetricAssociationsDelete pPcmadName_ =
ProjectsCategoriesMetricAssociationsDelete'
{ _pcmadXgafv = Nothing
, _pcmadUploadProtocol = Nothing
, _pcmadPp = True
, _pcmadAccessToken = Nothing
, _pcmadUploadType = Nothing
, _pcmadBearerToken = Nothing
, _pcmadName = pPcmadName_
, _pcmadCallback = Nothing
}
-- | V1 error format.
pcmadXgafv :: Lens' ProjectsCategoriesMetricAssociationsDelete (Maybe Xgafv)
pcmadXgafv
= lens _pcmadXgafv (\ s a -> s{_pcmadXgafv = a})
-- | Upload protocol for media (e.g. \"raw\", \"multipart\").
pcmadUploadProtocol :: Lens' ProjectsCategoriesMetricAssociationsDelete (Maybe Text)
pcmadUploadProtocol
= lens _pcmadUploadProtocol
(\ s a -> s{_pcmadUploadProtocol = a})
-- | Pretty-print response.
pcmadPp :: Lens' ProjectsCategoriesMetricAssociationsDelete Bool
pcmadPp = lens _pcmadPp (\ s a -> s{_pcmadPp = a})
-- | OAuth access token.
pcmadAccessToken :: Lens' ProjectsCategoriesMetricAssociationsDelete (Maybe Text)
pcmadAccessToken
= lens _pcmadAccessToken
(\ s a -> s{_pcmadAccessToken = a})
-- | Legacy upload protocol for media (e.g. \"media\", \"multipart\").
pcmadUploadType :: Lens' ProjectsCategoriesMetricAssociationsDelete (Maybe Text)
pcmadUploadType
= lens _pcmadUploadType
(\ s a -> s{_pcmadUploadType = a})
-- | OAuth bearer token.
pcmadBearerToken :: Lens' ProjectsCategoriesMetricAssociationsDelete (Maybe Text)
pcmadBearerToken
= lens _pcmadBearerToken
(\ s a -> s{_pcmadBearerToken = a})
-- | Resource name of metric association to delete. Resource name form is
-- projects\/{project_id_or_number}\/
-- categories\/{short_name}\/metricAssociations\/{metric_name}.
pcmadName :: Lens' ProjectsCategoriesMetricAssociationsDelete Text
pcmadName
= lens _pcmadName (\ s a -> s{_pcmadName = a})
-- | JSONP
pcmadCallback :: Lens' ProjectsCategoriesMetricAssociationsDelete (Maybe Text)
pcmadCallback
= lens _pcmadCallback
(\ s a -> s{_pcmadCallback = a})
instance GoogleRequest
ProjectsCategoriesMetricAssociationsDelete where
type Rs ProjectsCategoriesMetricAssociationsDelete =
Empty
type Scopes
ProjectsCategoriesMetricAssociationsDelete
=
'["https://www.googleapis.com/auth/cloud-platform",
"https://www.googleapis.com/auth/monitoring"]
requestClient
ProjectsCategoriesMetricAssociationsDelete'{..}
= go _pcmadName _pcmadXgafv _pcmadUploadProtocol
(Just _pcmadPp)
_pcmadAccessToken
_pcmadUploadType
_pcmadBearerToken
_pcmadCallback
(Just AltJSON)
monitoringService
where go
= buildClient
(Proxy ::
Proxy
ProjectsCategoriesMetricAssociationsDeleteResource)
mempty
| rueshyna/gogol | gogol-monitoring/gen/Network/Google/Resource/Monitoring/Projects/Categories/MetricAssociations/Delete.hs | mpl-2.0 | 6,317 | 0 | 17 | 1,417 | 855 | 498 | 357 | 129 | 1 |
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -fno-warn-duplicate-exports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
-- |
-- Module : Network.Google.Resource.Blogger.Comments.List
-- Copyright : (c) 2015-2016 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Retrieves the comments for a post, possibly filtered.
--
-- /See:/ <https://developers.google.com/blogger/docs/3.0/getting_started Blogger API Reference> for @blogger.comments.list@.
module Network.Google.Resource.Blogger.Comments.List
(
-- * REST Resource
CommentsListResource
-- * Creating a Request
, commentsList
, CommentsList
-- * Request Lenses
, clStatus
, clEndDate
, clBlogId
, clStartDate
, clFetchBodies
, clView
, clPostId
, clPageToken
, clMaxResults
) where
import Network.Google.Blogger.Types
import Network.Google.Prelude
-- | A resource alias for @blogger.comments.list@ method which the
-- 'CommentsList' request conforms to.
type CommentsListResource =
"blogger" :>
"v3" :>
"blogs" :>
Capture "blogId" Text :>
"posts" :>
Capture "postId" Text :>
"comments" :>
QueryParams "status" CommentsListStatus :>
QueryParam "endDate" DateTime' :>
QueryParam "startDate" DateTime' :>
QueryParam "fetchBodies" Bool :>
QueryParam "view" CommentsListView :>
QueryParam "pageToken" Text :>
QueryParam "maxResults" (Textual Word32) :>
QueryParam "alt" AltJSON :>
Get '[JSON] CommentList
-- | Retrieves the comments for a post, possibly filtered.
--
-- /See:/ 'commentsList' smart constructor.
data CommentsList = CommentsList'
{ _clStatus :: !(Maybe [CommentsListStatus])
, _clEndDate :: !(Maybe DateTime')
, _clBlogId :: !Text
, _clStartDate :: !(Maybe DateTime')
, _clFetchBodies :: !(Maybe Bool)
, _clView :: !(Maybe CommentsListView)
, _clPostId :: !Text
, _clPageToken :: !(Maybe Text)
, _clMaxResults :: !(Maybe (Textual Word32))
} deriving (Eq,Show,Data,Typeable,Generic)
-- | Creates a value of 'CommentsList' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'clStatus'
--
-- * 'clEndDate'
--
-- * 'clBlogId'
--
-- * 'clStartDate'
--
-- * 'clFetchBodies'
--
-- * 'clView'
--
-- * 'clPostId'
--
-- * 'clPageToken'
--
-- * 'clMaxResults'
commentsList
:: Text -- ^ 'clBlogId'
-> Text -- ^ 'clPostId'
-> CommentsList
commentsList pClBlogId_ pClPostId_ =
CommentsList'
{ _clStatus = Nothing
, _clEndDate = Nothing
, _clBlogId = pClBlogId_
, _clStartDate = Nothing
, _clFetchBodies = Nothing
, _clView = Nothing
, _clPostId = pClPostId_
, _clPageToken = Nothing
, _clMaxResults = Nothing
}
clStatus :: Lens' CommentsList [CommentsListStatus]
clStatus
= lens _clStatus (\ s a -> s{_clStatus = a}) .
_Default
. _Coerce
-- | Latest date of comment to fetch, a date-time with RFC 3339 formatting.
clEndDate :: Lens' CommentsList (Maybe UTCTime)
clEndDate
= lens _clEndDate (\ s a -> s{_clEndDate = a}) .
mapping _DateTime
-- | ID of the blog to fetch comments from.
clBlogId :: Lens' CommentsList Text
clBlogId = lens _clBlogId (\ s a -> s{_clBlogId = a})
-- | Earliest date of comment to fetch, a date-time with RFC 3339 formatting.
clStartDate :: Lens' CommentsList (Maybe UTCTime)
clStartDate
= lens _clStartDate (\ s a -> s{_clStartDate = a}) .
mapping _DateTime
-- | Whether the body content of the comments is included.
clFetchBodies :: Lens' CommentsList (Maybe Bool)
clFetchBodies
= lens _clFetchBodies
(\ s a -> s{_clFetchBodies = a})
-- | Access level with which to view the returned result. Note that some
-- fields require elevated access.
clView :: Lens' CommentsList (Maybe CommentsListView)
clView = lens _clView (\ s a -> s{_clView = a})
-- | ID of the post to fetch posts from.
clPostId :: Lens' CommentsList Text
clPostId = lens _clPostId (\ s a -> s{_clPostId = a})
-- | Continuation token if request is paged.
clPageToken :: Lens' CommentsList (Maybe Text)
clPageToken
= lens _clPageToken (\ s a -> s{_clPageToken = a})
-- | Maximum number of comments to include in the result.
clMaxResults :: Lens' CommentsList (Maybe Word32)
clMaxResults
= lens _clMaxResults (\ s a -> s{_clMaxResults = a})
. mapping _Coerce
instance GoogleRequest CommentsList where
type Rs CommentsList = CommentList
type Scopes CommentsList =
'["https://www.googleapis.com/auth/blogger",
"https://www.googleapis.com/auth/blogger.readonly"]
requestClient CommentsList'{..}
= go _clBlogId _clPostId (_clStatus ^. _Default)
_clEndDate
_clStartDate
_clFetchBodies
_clView
_clPageToken
_clMaxResults
(Just AltJSON)
bloggerService
where go
= buildClient (Proxy :: Proxy CommentsListResource)
mempty
| rueshyna/gogol | gogol-blogger/gen/Network/Google/Resource/Blogger/Comments/List.hs | mpl-2.0 | 5,867 | 0 | 22 | 1,591 | 996 | 572 | 424 | 137 | 1 |
{-# LANGUAGE GADTs,ExistentialQuantification #-}
module ProjectM36.Relation where
import qualified Data.Set as S
import qualified Data.HashSet as HS
import Control.Monad
import qualified Data.Vector as V
import ProjectM36.Base
import ProjectM36.Tuple
import qualified ProjectM36.Attribute as A
import ProjectM36.TupleSet
import ProjectM36.Error
--import qualified Control.Parallel.Strategies as P
import qualified ProjectM36.TypeConstructorDef as TCD
import qualified ProjectM36.DataConstructorDef as DCD
import qualified Data.Text as T
import Data.Either (isRight)
import System.Random.Shuffle
import Control.Monad.Random
attributes :: Relation -> Attributes
attributes (Relation attrs _ ) = attrs
attributeNames :: Relation -> S.Set AttributeName
attributeNames (Relation attrs _) = A.attributeNameSet attrs
attributeForName :: AttributeName -> Relation -> Either RelationalError Attribute
attributeForName attrName (Relation attrs _) = A.attributeForName attrName attrs
attributesForNames :: S.Set AttributeName -> Relation -> Attributes
attributesForNames attrNameSet (Relation attrs _) = A.attributesForNames attrNameSet attrs
atomTypeForName :: AttributeName -> Relation -> Either RelationalError AtomType
atomTypeForName attrName (Relation attrs _) = A.atomTypeForAttributeName attrName attrs
mkRelationFromList :: Attributes -> [[Atom]] -> Either RelationalError Relation
mkRelationFromList attrs atomMatrix = do
Relation attrs <$> mkTupleSetFromList attrs atomMatrix
emptyRelationWithAttrs :: Attributes -> Relation
emptyRelationWithAttrs attrs = Relation attrs emptyTupleSet
mkRelation :: Attributes -> RelationTupleSet -> Either RelationalError Relation
mkRelation attrs tupleSet =
--check that all tuples have the same keys
--check that all tuples have keys (1-N) where N is the attribute count
case verifyTupleSet attrs tupleSet of
Left err -> Left err
Right verifiedTupleSet -> return $ Relation attrs verifiedTupleSet
--less safe version of mkRelation skips verifyTupleSet
--useful for infinite or thunked tuple sets
--instead of returning a Left RelationalError, if a tuple does not match the relation's attributes, the tuple is simply removed
--duplicate tuples are NOT filtered by this creation method
mkRelationDeferVerify :: Attributes -> RelationTupleSet -> Either RelationalError Relation
mkRelationDeferVerify attrs tupleSet = return $ Relation attrs (RelationTupleSet (filter tupleFilter (asList tupleSet)))
where
tupleFilter tuple = isRight (verifyTuple attrs tuple)
--return a relation of the same type except without any tuples
relationWithEmptyTupleSet :: Relation -> Relation
relationWithEmptyTupleSet (Relation attrs _) = emptyRelationWithAttrs attrs
mkRelationFromTuples :: Attributes -> [RelationTuple] -> Either RelationalError Relation
mkRelationFromTuples attrs tupleSetList = do
tupSet <- mkTupleSet attrs tupleSetList
mkRelation attrs tupSet
relationTrue :: Relation
relationTrue = Relation A.emptyAttributes singletonTupleSet
relationFalse :: Relation
relationFalse = Relation A.emptyAttributes emptyTupleSet
--if the relation contains one tuple, return it, otherwise Nothing
singletonTuple :: Relation -> Maybe RelationTuple
singletonTuple rel@(Relation _ tupleSet) = if cardinality rel == Finite 1 then
Just $ head $ asList tupleSet
else
Nothing
-- this is still unncessarily expensive for (bigx union bigx) because each tuple is hashed and compared for equality (when the hashes match), but the major expense is attributesEqual, but we already know that all tuple attributes are equal (it's a precondition)
union :: Relation -> Relation -> Either RelationalError Relation
union (Relation attrs1 tupSet1) (Relation attrs2 tupSet2) =
if not (A.attributesEqual attrs1 attrs2)
then Left $ AttributeNamesMismatchError (A.attributeNameSet (A.attributesDifference attrs1 attrs2))
else
Right $ Relation attrs1 newtuples
where
newtuples = tupleSetUnion attrs1 tupSet1 tupSet2
project :: S.Set AttributeName -> Relation -> Either RelationalError Relation
project attrNames rel@(Relation _ tupSet) = do
newAttrs <- A.projectionAttributesForNames attrNames (attributes rel)
newTupleList <- mapM (tupleProject newAttrs) (asList tupSet)
pure (Relation newAttrs (RelationTupleSet (HS.toList (HS.fromList newTupleList))))
rename :: AttributeName -> AttributeName -> Relation -> Either RelationalError Relation
rename oldAttrName newAttrName rel@(Relation oldAttrs oldTupSet)
| not attributeValid = Left $ AttributeNamesMismatchError (S.singleton oldAttrName)
| newAttributeInUse = Left $ AttributeNameInUseError newAttrName
| otherwise = mkRelation newAttrs newTupSet
where
newAttributeInUse = A.attributeNamesContained (S.singleton newAttrName) (attributeNames rel)
attributeValid = A.attributeNamesContained (S.singleton oldAttrName) (attributeNames rel)
newAttrs = A.renameAttributes oldAttrName newAttrName oldAttrs
newTupSet = RelationTupleSet $ map tupsetmapper (asList oldTupSet)
tupsetmapper = tupleRenameAttribute oldAttrName newAttrName
--the algebra should return a relation of one attribute and one row with the arity
arity :: Relation -> Int
arity (Relation attrs _) = A.arity attrs
degree :: Relation -> Int
degree = arity
cardinality :: Relation -> RelationCardinality --we need to detect infinite tuple sets- perhaps with a flag
cardinality (Relation _ tupSet) = Finite (length (asList tupSet))
--find tuples where the atoms in the relation which are NOT in the AttributeNameSet are equal
-- create a relation for each tuple where the attributes NOT in the AttributeNameSet are equal
--the attrname set attrs end up in the nested relation
--algorithm:
-- map projection of non-grouped attributes to restriction of matching grouped attribute tuples and then project on grouped attributes to construct the sub-relation
{-
group :: S.Set AttributeName -> AttributeName -> Relation -> Either RelationalError Relation
group groupAttrNames newAttrName rel@(Relation oldAttrs tupleSet) = do
nonGroupProjection <- project nonGroupAttrNames rel
relFold folder (Right (Relation newAttrs emptyTupleSet)) nonGroupProjection
where
newAttrs = M.union (attributesForNames nonGroupAttrNames rel) groupAttr
groupAttr = Attribute newAttrName RelationAtomType (invertedAttributeNames groupAttrNames (attributes rel))
nonGroupAttrNames = invertAttributeNames (attributes rel) groupAttrNames
--map the projection to add the additional new attribute
--create the new attribute (a new relation) by filtering and projecting the tupleSet
folder tupleFromProjection acc = case acc of
Left err -> Left err
Right acc -> union acc (Relation newAttrs (HS.singleton (tupleExtend tupleFromProjection (matchingRelTuple tupleFromProjection))))
-}
--algorithm: self-join with image relation
group :: S.Set AttributeName -> AttributeName -> Relation -> Either RelationalError Relation
group groupAttrNames newAttrName rel = do
let nonGroupAttrNames = A.nonMatchingAttributeNameSet groupAttrNames (S.fromList (V.toList (A.attributeNames (attributes rel))))
nonGroupProjectionAttributes <- A.projectionAttributesForNames nonGroupAttrNames (attributes rel)
groupProjectionAttributes <- A.projectionAttributesForNames groupAttrNames (attributes rel)
let groupAttr = Attribute newAttrName (RelationAtomType groupProjectionAttributes)
matchingRelTuple tupIn = case imageRelationFor tupIn rel of
Right rel2 -> RelationTuple (A.singleton groupAttr) (V.singleton (RelationAtom rel2))
Left _ -> undefined
mogrifier tupIn = pure (tupleExtend tupIn (matchingRelTuple tupIn))
newAttrs = A.addAttribute groupAttr nonGroupProjectionAttributes
nonGroupProjection <- project nonGroupAttrNames rel
relMogrify mogrifier newAttrs nonGroupProjection
--help restriction function
--returns a subrelation of
restrictEq :: RelationTuple -> Relation -> Either RelationalError Relation
restrictEq tuple = restrict rfilter
where
rfilter :: RelationTuple -> Either RelationalError Bool
rfilter tupleIn = do
pure (tupleIntersection tuple tupleIn == tuple)
-- unwrap relation-valued attribute
-- return error if relval attrs and nongroup attrs overlap
ungroup :: AttributeName -> Relation -> Either RelationalError Relation
ungroup relvalAttrName rel = case attributesForRelval relvalAttrName rel of
Left err -> Left err
Right relvalAttrs -> relFold relFolder (Right $ Relation newAttrs emptyTupleSet) rel
where
newAttrs = A.addAttributes relvalAttrs nonGroupAttrs
nonGroupAttrs = A.deleteAttributeName relvalAttrName (attributes rel)
relFolder :: RelationTuple -> Either RelationalError Relation -> Either RelationalError Relation
relFolder tupleIn acc = case acc of
Left err -> Left err
Right accRel -> do
ungrouped <- tupleUngroup relvalAttrName newAttrs tupleIn
accRel `union` ungrouped
--take an relval attribute name and a tuple and ungroup the relval
tupleUngroup :: AttributeName -> Attributes -> RelationTuple -> Either RelationalError Relation
tupleUngroup relvalAttrName newAttrs tuple = do
relvalRelation <- relationForAttributeName relvalAttrName tuple
let nonGroupAttrs = A.intersection newAttrs (tupleAttributes tuple)
nonGroupTupleProjection <- tupleProject nonGroupAttrs tuple
let folder tupleIn acc = case acc of
Left err -> Left err
Right accRel ->
union accRel $ Relation newAttrs (RelationTupleSet [tupleExtend nonGroupTupleProjection tupleIn])
relFold folder (Right $ Relation newAttrs emptyTupleSet) relvalRelation
attributesForRelval :: AttributeName -> Relation -> Either RelationalError Attributes
attributesForRelval relvalAttrName (Relation attrs _) = do
atomType <- A.atomTypeForAttributeName relvalAttrName attrs
case atomType of
(RelationAtomType relAttrs) -> Right relAttrs
_ -> Left $ AttributeIsNotRelationValuedError relvalAttrName
type RestrictionFilter = RelationTuple -> Either RelationalError Bool
restrict :: RestrictionFilter -> Relation -> Either RelationalError Relation
restrict rfilter (Relation attrs tupset) = do
tuples <- filterM rfilter (asList tupset)
Right $ Relation attrs (RelationTupleSet tuples)
--joins on columns with the same name- use rename to avoid this- base case: cartesian product
--after changing from string atoms, there needs to be a type-checking step!
--this is a "nested loop" scan as described by the postgresql documentation
join :: Relation -> Relation -> Either RelationalError Relation
join (Relation attrs1 tupSet1) (Relation attrs2 tupSet2) = do
newAttrs <- A.joinAttributes attrs1 attrs2
let tupleSetJoiner accumulator tuple1 = do
joinedTupSet <- singleTupleSetJoin newAttrs tuple1 tupSet2
return $ joinedTupSet ++ accumulator
newTupSetList <- foldM tupleSetJoiner [] (asList tupSet1)
Relation newAttrs <$> mkTupleSet newAttrs newTupSetList
-- | Difference takes two relations of the same type and returns a new relation which contains only tuples which appear in the first relation but not the second.
difference :: Relation -> Relation -> Either RelationalError Relation
difference relA relB =
if not (A.attributesEqual (attributes relA) (attributes relB))
then
Left $ AttributeNamesMismatchError (A.attributeNameSet (A.attributesDifference attrsA attrsB))
else
restrict rfilter relA
where
attrsA = attributes relA
attrsB = attributes relB
rfilter tupInA = relFold (\tupInB acc -> if acc == Right False then pure False else pure (tupInB /= tupInA)) (Right True) relB
--a map should NOT change the structure of a relation, so attributes should be constant
relMap :: (RelationTuple -> Either RelationalError RelationTuple) -> Relation -> Either RelationalError Relation
relMap mapper (Relation attrs tupleSet) =
case forM (asList tupleSet) typeMapCheck of
Right remappedTupleSet -> mkRelation attrs (RelationTupleSet remappedTupleSet)
Left err -> Left err
where
typeMapCheck tupleIn = do
remappedTuple <- mapper tupleIn
if tupleAttributes remappedTuple == tupleAttributes tupleIn
then Right remappedTuple
else Left (TupleAttributeTypeMismatchError (A.attributesDifference (tupleAttributes tupleIn) attrs))
relMogrify :: (RelationTuple -> Either RelationalError RelationTuple) -> Attributes -> Relation -> Either RelationalError Relation
relMogrify mapper newAttributes (Relation _ tupSet) = do
newTuples <- mapM (fmap (reorderTuple newAttributes) . mapper) (asList tupSet)
mkRelationFromTuples newAttributes newTuples
relFold :: (RelationTuple -> a -> a) -> a -> Relation -> a
relFold folder acc (Relation _ tupleSet) = foldr folder acc (asList tupleSet)
-- | Generate a randomly-ordered list of tuples from the relation.
toList :: Relation -> IO [RelationTuple]
toList rel = do
gen <- newStdGen
let rel' = evalRand (randomizeTupleOrder rel) gen
pure (relFold (:) [] rel')
--image relation as defined by CJ Date
--given tupleA and relationB, return restricted relation where tuple attributes are not the attribues in tupleA but are attributes in relationB and match the tuple's value
--check that matching attribute names have the same types
imageRelationFor :: RelationTuple -> Relation -> Either RelationalError Relation
imageRelationFor matchTuple rel = do
restricted <- restrictEq matchTuple rel --restrict across matching tuples
let projectionAttrNames = A.nonMatchingAttributeNameSet (attributeNames rel) (tupleAttributeNameSet matchTuple)
project projectionAttrNames restricted --project across attributes not in rel
--returns a relation-valued attribute image relation for each tuple in rel1
--algorithm:
{-
imageRelationJoin :: Relation -> Relation -> Either RelationalError Relation
imageRelationJoin rel1@(Relation attrNameSet1 tupSet1) rel2@(Relation attrNameSet2 tupSet2) = do
Right $ Relation undefined
where
matchingAttrs = matchingAttributeNameSet attrNameSet1 attrNameSet2
newAttrs = nonMatchingAttributeNameSet matchingAttrs $ S.union attrNameSet1 attrNameSet2
tupleSetJoiner tup1 acc = undefined
-}
-- | Return a Relation describing the types in the mapping.
typesAsRelation :: TypeConstructorMapping -> Either RelationalError Relation
typesAsRelation types = mkRelationFromTuples attrs tuples
where
attrs = A.attributesFromList [Attribute "TypeConstructor" TextAtomType,
Attribute "DataConstructors" dConsType]
subAttrs = A.attributesFromList [Attribute "DataConstructor" TextAtomType]
dConsType = RelationAtomType subAttrs
tuples = map mkTypeConsDescription types
mkTypeConsDescription (tCons, dConsList) =
RelationTuple attrs (V.fromList [TextAtom (TCD.name tCons), mkDataConsRelation dConsList])
mkDataConsRelation dConsList = case mkRelationFromTuples subAttrs $ map (\dCons -> RelationTuple subAttrs (V.singleton $ TextAtom $ T.intercalate " " (DCD.name dCons:map (T.pack . show) (DCD.fields dCons)))) dConsList of
Left err -> error ("mkRelationFromTuples pooped " ++ show err)
Right rel -> RelationAtom rel
-- | Randomly resort the tuples. This is useful for emphasizing that two relations are equal even when they are printed to the console in different orders.
randomizeTupleOrder :: MonadRandom m => Relation -> m Relation
randomizeTupleOrder (Relation attrs tupSet) =
Relation attrs . RelationTupleSet <$> shuffleM (asList tupSet)
-- returns a tuple from the tupleset- this is useful for priming folds over the tuples
oneTuple :: Relation -> Maybe RelationTuple
oneTuple (Relation _ (RelationTupleSet [])) = Nothing
oneTuple (Relation _ (RelationTupleSet (x:_))) = Just x
tuplesList :: Relation -> [RelationTuple]
tuplesList (Relation _ tupleSet) = asList tupleSet
| agentm/project-m36 | src/lib/ProjectM36/Relation.hs | unlicense | 16,026 | 0 | 21 | 2,677 | 3,299 | 1,632 | 1,667 | 194 | 3 |
--------------------------------------------------------------------------
-- --
-- ImplementNfa.hs --
-- --
-- Implementing an NFA. --
-- --
-- Regular expressions are defined in regexp, and the type of --
-- NFAs in NfaTypes. The implementation of Sets used is in the --
-- sets module. The module NfaLib contains functions used by both --
-- this module and the module NfaToDfa which converts to a --
-- deterministic machine. --
-- --
-- (c) Simon Thompson, 1995, 2000 --
-- --
--------------------------------------------------------------------------
module ImplementNfa where
import RegExp
import Sets
import NfaTypes
import NfaLib
--------------------------------------------------------------------------
-- --
-- Trans runs down a string, applying onetrans repeatedly for the --
-- characters in the string, starting from the start state. --
-- The result is therefore the set of all states accessible from --
-- the start state by accepting the items in the string; the --
-- result can be the empty set, of course. --
-- --
--------------------------------------------------------------------------
trans :: Ord a => Nfa a -> String -> Set a
trans mach str = foldl step startset str
where
step set ch = onetrans mach ch set
startset = closure mach (sing (startstate mach))
--------------------------------------------------------------------------
-- Thanks are due to Sven Martin for pointing out the omission --
-- of the closure in the definition of startset. --
--------------------------------------------------------------------------
--------------------------------------------------------------------------
-- Turn the result of trans into printable form. --
--------------------------------------------------------------------------
print_trans :: Nfa Int -> String -> [Char]
print_trans mach str = show (flatten (trans mach str))
| SonomaStatist/CS454_NFA | haskellDFA/RegExp/ImplementNfa.hs | unlicense | 1,967 | 0 | 11 | 359 | 177 | 105 | 72 | 11 | 1 |
{-
Created : 2012 Jan 11 (Wed) 20:25:14 by carr.
Last Modified : 2012 Jan 20 (Fri) 18:16:38 by carr.
-}
-- ----------------------------------------------------------------------
-- Chapter 1
-- A number d divides n if there is a natural number a with a*d=n.
-- i.e., d divides n if there is a natural number a with n/d=a
-- i.e., division of n by d leaves no remainder.
divides :: Integral a => a -> a -> Bool
divides d n = rem n d == 0
-- LD(n) is the least natural number greater than 1 that divides n.
-- Note: LD(n) exists for every natural number n > 1,
-- since the natural number d = n is greater than 1 and divides n.
-- Therefore, the set of divisors of n that are greater than 1 is non-empty.
-- Thus, the set will have a least element.
ld :: Integral a => a -> a
ld n = ldf 2 n
-- Proposition 1.2
-- 1. If n > 1 then LD(n) is a prime number.
-- Proof (by contradiction)
-- Suppose, c = LD(n) is not a prime.
-- Then there are natural numbers a and b with c=a*b, and 1<a<c.
-- But then a divides n, a contradiction
-- Thus, LD(n) must be a prime number.
-- 2. If n > 1 and n is not a prime number, then LD(n)^2 <= n.
-- Proof
-- Suppose n > 1, n is not a prime and that p=LD(n).
-- Then there is a natural number a>1 with n=p*a.
-- Thus, a divides n.
-- Since p is the smallest divisor of n with p > 1,
-- we have that p<=a, and therefore p^2<=p*a=n, i.e., LD(n)^2 <= n.
-- Therefore ldf looks for a prime divisor of n by checking k|n for all k, 2<=k<=sqrt(n)
ldf :: Integral a => a -> a -> a
ldf k n | k `divides` n = k -- covers even numbers on first call then smallest prime divisor on iterations
| k^2 > n = n -- covers a "new" prime
| otherwise = ldf (k+1) n
-- Exercise 1.4
-- if ldf has k^2>=n instead, does it matter?
-- no, because k divides n so ">=" would never be reached
prime0 :: Integral a => a -> Bool
prime0 n | n < 1 = error "not a positive integer"
| n == 1 = False
| otherwise = ld n == n
-- Example 1.8/Exercise 1.9
min' :: Ord t => [t] -> t
min' l = mnmx (min) l
max' :: Ord t => [t] -> t
max' l = mnmx (max) l
mnmx :: (t -> t -> t) -> [t] -> t
mnmx f [] = error "empty list"
mnmx f [x] = x
mnmx f (x:xs) = f x (mnmx f xs)
-- Exercise 1.10
removeFst :: Eq a => a -> [a] -> [a]
removeFst m [] = []
removeFst m (x:xs) | m == x = xs
| otherwise = x : removeFst m xs
-- removeFst 4 [1,2,3,4,5,6,4,8]
-- Example 1.11
-- Sort a list of integers in order of increasing size
-- If a list is non-empty,
-- then put its minimum in front
-- of the result of sorting the list that results from removing its minimum.
srt :: Ord a => [a] -> [a]
srt [] = []
srt xs = m : srt (removeFst m xs) where m = min' xs
-- srt [4,8,1,8,3,5,7,4,0,1,3,4,5,6,8,1]
-- Example 1.12
average :: (Integral a1, Fractional a) => [a1] -> a
average [] = error "empty list"
average xs = fromIntegral (sum' xs) / fromIntegral (length xs)
-- average [0,1,2,3,4,5,6,7,8,9]
sum' [] = 0
sum' (x:xs) = x + sum' xs
length' [] = 0
length' (x:xs) = 1 + length' xs
fld :: (t1 -> t -> t1) -> t1 -> [t] -> t1
fld f v l = fld' f v l
where fld' f acc [] = acc
fld' f acc (x:xs) = fld' f (f acc x) xs
-- fld (\acc x -> acc + x) 0 [2,3,4,5]
-- fld (\acc x -> acc + 1) 0 [2,3,4,5]
-- foldl (\acc x -> acc + x) 0 [2,3,4,5]
-- foldl (\acc x -> acc + 1) 0 [2,3,4,5]
-- Exercise 1.13
countChar :: (Num a, Eq a1) => a1 -> [a1] -> a
countChar c = foldl (\acc x -> if x == c then acc + 1 else acc) 0
-- countChar 'r' "harold carr"
-- Exercise 1.14
-- repeat letters by the number of the position (counting from 1)
blowup :: String -> String
blowup s = let (_,r) = foldl (\(n,r) x -> (n+1, r++(replicate n x))) (1,"") s in r
-- blowup "bang!"
-- Exercise 1.15
-- already done, just use srt from Example 1.11
-- srt ["harold", "carr"]
-- Example 1.16
-- PRO: stops search on first fail (assuming left-to-right eval order of &&)
-- CON: this one is not tail-recursive
prefix [] ys = True
prefix (x:xs) [] = False
prefix (x:xs) (y:ys) = (x==y) && prefix xs ys
-- map (\(l,r) -> prefix l r) [("foo","fo"), ("foo","foo"), ("foo","foobar"), ("bar","foobar")]
-- PRO: tail recursive
-- CON: will go through the entire shortest list (a or b)
prefix' a b = prefix'' True a b
where prefix'' acc [] ys = acc
prefix'' acc (x:xs) [] = False
prefix'' acc (x:xs) (y:ys) = prefix'' ((x==y) && acc) xs ys
-- map (\(l,r) -> prefix' l r) [("foo","fo"), ("foo","foo"), ("foo","foobar"), ("bar","foobar")]
-- Exercise 1.17
substring x [] = False
substring x all@(y:ys) = if prefix x all then True else substring x ys
-- substring "aro" "harold"
-- substring "ld" "harold"
-- substring "el" "harold"
-- 1.7 Prime Factorization
{-
n0=84
n0/=1 p1=2 n1=84/2=42
n1/=1 p2=2 n2=42/2=21
n2/=1 p3=3 n3=21/3= 7
n3/=1 p4=7 n4= 7/7= 1
n4 =1
-}
factors n | n < 1 = error "argument not positive"
| n == 1 = []
| otherwise = p : factors (div n p) where p = ld n
-- factors 84
-- factors 557940830126698960967415390
-- 1.9 map/filter
map' :: (t -> a) -> [t] -> [a]
map' f [] = []
map' f (x:xs) = (f x) : (map' f xs)
-- Exercise 1.20-21
-- sum' $ map' length [[1],[1,2],[1,2,3]]
filter' :: (a -> Bool) -> [a] -> [a]
filter' p [] = []
filter' p (x:xs) | p x = x : filter' p xs
| otherwise = filter' p xs
-- filter (>3) [1..10]
-- Example 1.22
primes0 = filter prime0 [2..]
-- Example 1.23
-- more efficient Least Divisor
-- Previous ldf looks for a prime divisor of n by checking k|n for all k, 2<=k<=sqrt(n)
-- But better to only check p|n for primes p with 2<=p<=sqrt(n)
-- since other numbers are "composites" of primes already checked
ldp n = ldpf primes1 n
ldpf (p:ps) n | rem n p == 0 = p
| p^2 > n = n
| otherwise = ldpf ps n
-- To define primes1 above we need a test for primality.
-- but the test is defined in terms of the function LD,
-- which refers to primes1 (i.e. a cycle).
-- Break cycle by avoiding primality test for 2.
-- Make it given that 2 is prime.
-- Then we can use the primality of 2 in LD to check that 3 is prime, etc.
primes1 = 2 : filter prime [3..]
prime n | n < 1 = error "not a positive integer"
| n == 1 = False
| otherwise = ldp n == n
-- Exercise 1.24 - point free style
ldp'' = ldpf primes1
-- ----------------------------------------------------------------------
-- Chapter 2
-- Implication (if/then)
{-
Truth table of => is surprising.
To motivate:
for every natural number n, 5<n => 3<n.
Therefore, implication must hold (be true) for n = 2, 4 and 6.
- both antecedent and consequent are false (n = 2) (f f => t)
- antecedent false, consequent true (n = 4) (f t => t)
- both antecedent and consequent true (n = 6) (t t => t)
False only
- when antecedent true and consequent false (t f => f)
(which never happens above).
But it means anything with false antecedent is trivially true.
- introduce infix ==>
- number 1 declaration indicates binding power (0 lowest, 9 highest)
- declaration of an infix operator with it binding power is called a fixity declaration
-}
infix 1 ==>
(==>) :: Bool -> Bool -> Bool
True ==> x = x
False ==> x = True
-- Necessary and Sufficient Conditions
{-
P is called a sufficient condition for Q and
Q a necessary condition for P
if the implication P => Q holds
expressed various ways:
- if P,then Q
- Q if P
- P only if Q
- Q whenever P
- P is sufficient for Q
- Q is necessary for P
-}
-- Equivalence (iff)
infix 1 <=>
(<=>) :: Bool -> Bool -> Bool
x <=> y = x == y
-- Example 2.3 proving IFF
{-
To prove something of the form P iff Q
- split into two parts
- "only if" part: P==>Q
- "if" part: Q==>P
-}
-- Exercise 2.4 - exclusive OR
infixr 2 <+>
(<+>) :: Bool -> Bool -> Bool
x <+> y = x /= y
-- exclusive OR is NOT equivalent to "not (P<=>Q)"
e24 = map (\(p,q) -> let x = (p <=> q)
y = (p <+> q)
in (x,y,x==y))
[(False,False), (False,True), (True,False), (True,True)]
-- 2.2 Logical Validity ...
-- logically valid: true for any values of terms
valid1 :: (Bool -> Bool) -> Bool
valid1 f = (f True) && (f False)
excluded_middle :: Bool -> Bool
excluded_middle p = p || not p
-- valid1 excluded_middle
valid2 :: (Bool -> Bool -> Bool) -> Bool
valid2 f = (f True True)
&& (f True False)
&& (f False True)
&& (f False False)
-- using list comprehensions
valid3 :: (Bool -> Bool -> Bool -> Bool) -> Bool
valid3 f = and [ f p q r | p <- [True,False],
q <- [True,False],
r <- [True,False]]
valid4 :: (Bool -> Bool -> Bool -> Bool -> Bool) -> Bool
valid4 f = and [ f p q r s | p <- [True,False],
q <- [True,False],
r <- [True,False],
s <- [True,False]]
-- Logically Equivalent
-- Two formulas are logically equivalent if, for all truth values of
-- P, Q, . . . , the truth values obtained for them are the same.
logEquiv1 :: (Bool -> Bool) -> (Bool -> Bool) -> Bool
logEquiv1 f1 f2 = (f1 True <=> f2 True) && (f1 False <=> f2 False)
logEquiv2 :: (Bool -> Bool -> Bool) -> (Bool -> Bool -> Bool) -> Bool
logEquiv2 f1 f2 = and [(f1 p q) <=> (f2 p q) | p <- [True,False],
q <- [True,False]]
logEquiv3 :: (Bool -> Bool -> Bool -> Bool) -> (Bool -> Bool -> Bool -> Bool) -> Bool
logEquiv3 f1 f2 = and [(f1 p q r) <=> (f2 p q r) | p <- [True,False],
q <- [True,False],
r <- [True,False]]
-- TODO p 44
-- Exercise 2.9
| haroldcarr/learn-haskell-coq-ml-etc | math-and-functional-programming/2004-Haskell_Road_to_Logic_Maths_and_Programming/The_Haskell_Road_to_Logic_Maths_and_Programming.hs | unlicense | 9,893 | 4 | 15 | 2,775 | 2,494 | 1,330 | 1,164 | 112 | 3 |
{-# LANGUAGE UndecidableInstances #-}
module Type.Error where
import GHC.TypeLits (ErrorMessage(..), TypeError)
-- === Assertions === --
class Assert (ok :: Bool) (err :: ErrorMessage)
instance Assert 'True err
instance TypeError err => Assert 'False err
-- === Formatters === --
type Sentence a = a :<>: 'Text "."
type Ticked a = 'Text "`" :<>: a :<>: 'Text "`"
type a :</>: b = a :<>: 'Text " " :<>: b
| wdanilo/typelevel | src/Type/Error.hs | apache-2.0 | 452 | 0 | 8 | 119 | 139 | 77 | 62 | -1 | -1 |
module Subsets.A271410 (a271410) where
import Helpers.Subsets (oneIndexed)
a271410 :: Integer -> Integer
a271410 = foldr lcm 1 . oneIndexed
| peterokagey/haskellOEIS | src/Subsets/A271410.hs | apache-2.0 | 141 | 0 | 6 | 20 | 44 | 25 | 19 | 4 | 1 |
module Extension where
import Functor
import Base
-- The functor for a language extension
data Lang2X x = Plus x x
type Lang2F = SumF LangF Lang2X
instance Functor Lang2X
where
fmap f (Plus x x') = Plus (f x) (f x')
-- Fixed point of the language
type Lang2 = Fix Lang2F
-- The fold algebra for an interpreter function
evalLang2X :: Lang2X Int -> Int
evalLang2X (Plus x x') = x + x'
-- The interpreter function
evalLang2 :: Lang2 -> Int
evalLang2 = fold (bimap evalLangF evalLang2X)
| egaburov/funstuff | Haskell/tytag/xproblem_src/samples/expressions/Haskell/FunctorialStyle/Extension.hs | apache-2.0 | 505 | 0 | 8 | 112 | 146 | 79 | 67 | 12 | 1 |
{-|
Module : Pulsar.Decode
Description : Decode data into DCPU16 assembly.
Copyright : (c) Jesse Haber-Kucharsky, 2014
License : Apache-2.0
-}
{-# LANGUAGE OverloadedStrings #-}
module Pulsar.Decode
(file
,fileRaw
,program
,DecoderError (..))
where
import Pulsar.Ast hiding (ByteString)
import Pulsar.Output.Friendly
import Pulsar.Spec (SystemSpec)
import qualified Pulsar.Spec as Spec
import Pulsar.SymbolTable (SymbolTable)
import qualified Pulsar.SymbolTable as SymbolTable
import Control.Monad (forM)
import Data.Binary.Get
import Data.Bits
import qualified Data.ByteString as S
import qualified Data.ByteString.Char8 as S.Char8
import Data.ByteString.Lazy (ByteString)
import qualified Data.ByteString.Lazy as LBS
import Data.Foldable (toList)
import Data.List (groupBy, intersect, nub, sort)
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Maybe (fromMaybe, mapMaybe)
import Data.Monoid ((<>))
import qualified Data.Text as Text
import Data.Vector.Unboxed (Vector)
import qualified Data.Vector.Unboxed as Vec
data DecoderError = DecoderError FilePath deriving (Eq, Show)
instance Friendly DecoderError where
friendly (DecoderError path) =
"Unable to decode \"" <> Text.pack path <> "\"."
-- | Decode a whole file on disk.
file :: SystemSpec -> FilePath -> IO (Maybe (Ast Word16))
file spec path = LBS.readFile path >>= program spec
fileRaw :: FilePath -> IO (Maybe (Vector Word16))
fileRaw path =
do bs <- LBS.readFile path
case runGetOrFail (unfoldGet getNext) bs of
Left _ -> return Nothing
Right (_, _, result) -> return . Just . Vec.fromList $ result
unfoldGet :: Get (Maybe a) -> Get [a]
unfoldGet g =
do x <- g
case x of
Nothing -> return []
Just v ->
do vs <- unfoldGet g
return $ v : vs
-- | Decode a ByteString.
program :: SystemSpec
-> ByteString
-> IO (Maybe (Ast Word16))
program spec bs =
-- Start out by parsing the symbol table.
let (bs', headerSizeWords, symbols, symbolsStartLoc) =
case runGetOrFail (symbolTable spec) bs of
Left _ -> failCase
Right (remainingBs, numBytes, symTabResult) ->
case symTabResult of
Nothing -> failCase
Just (symTab, symTabLoc) -> (remainingBs, fromIntegral numBytes `div` 2, symTab, symTabLoc)
where
failCase = (bs, 0, SymbolTable.empty, 0)
programBytes = LBS.take
(fromIntegral $ locToByteOffsetFromHeader symbolsStartLoc)
bs'
in
do result <- bareProgram spec programBytes
case result of
Nothing -> return Nothing
Just ast ->
-- Correct the offsets in the AST to account for the size of the
-- symbol table header.
let ast' = map (fmap (+ headerSizeWords)) ast
statementLocs = concatMap toList ast'
-- The only valid locations for a label are those where a
-- statement begins.
validLabelLocs = labelLocs ast' `intersect` statementLocs
-- The default labels, without an symbol table knowledge.
rawLabelLocsTab = Map.fromList $ zip validLabelLocs
labelNames
labelLocsTab = insertMany (map swap . SymbolTable.toList $ symbols)
rawLabelLocsTab
in return $ Just
. optimize
. replaceJumps labelLocsTab
. insertLabels labelLocsTab $ ast'
swap :: (a, b) -> (b, a)
swap (x, y) = (y, x)
insertMany :: Ord k => [(k, a)] -> Map k a -> Map k a
insertMany pairs x = foldr (\(k, v) m -> Map.insert k v m) x pairs
-- | Condense statements that can be combined.
optimize :: Ast Word16 -> Ast Word16
optimize = concatMap combine . groupBy isMatchingDat
where
-- | Multiple .dat directives of the same value can be replaced with a .fill.
combine xs@(Statement loc (Dat [expr]) : _)
| length xs >= 3 =
[Statement loc (Fill (Expression loc (Word (fromIntegral $ length xs))) expr)]
combine xs = xs
isMatchingDat (Statement _ (Dat [Expression _ (Word x)]))
(Statement _ (Dat [Expression _ (Word y)])) = x == y
isMatchingDat _ _ = False
labelNames :: [S.ByteString]
labelNames = [S.Char8.pack $ "label" ++ show index | index <- [1 :: Integer ..]]
-- | Insert label definitions based on where labels are referenced in the
-- decoded program by key instructions.
insertLabels :: Map Word16 S.ByteString
-> Ast Word16
-> Ast Word16
insertLabels labelLocsTab = concatMap insert
where
insert :: Statement Word16 -> Ast Word16
insert s@(Statement loc _) =
case Map.lookup loc labelLocsTab of
Nothing -> [s]
Just labelName -> [Statement loc (Label labelName), s]
-- | Replace a jump to an address by a jump to a named label.
replaceJumps :: Map Word16 S.ByteString -> Ast Word16 -> Ast Word16
replaceJumps tab = map replace
where
replace (Statement pos s) = Statement pos $
case strip s of
Ins (Op () "SET")
(Address () Pc)
(Address () (Dir (Expression () (Word loc)))) ->
tag pos $ Ins (Op () "SET")
(Address () Pc)
(Address () (Dir (find loc)))
SpecialIns (SpecialOp () "JSR")
(Address () (Dir (Expression () (Word loc)))) ->
tag pos $ SpecialIns (SpecialOp () "JSR")
(Address () (Dir (find loc)))
_ -> s
find :: Word16 -> Expression ()
find loc = Expression () $
case Map.lookup loc tab of
Just name -> Ident name
Nothing -> Word loc
-- | Offsets into the data image where labels could be located.
labelLocs :: Ast Word16 -> [Word16]
labelLocs = sort . nub . mapMaybe (get . strip)
where
get (Statement _ (Ins (Op _ "SET")
(Address () Pc)
(Address () (Dir (Expression () (Word loc)))))) = Just loc
get (Statement _ (SpecialIns (SpecialOp _ "JSR")
(Address () (Dir (Expression () (Word loc)))))) = Just loc
get _ = Nothing
-- | The encoded program, without any symbol table.
bareProgram :: SystemSpec -> ByteString -> IO (Maybe (Ast Word16))
bareProgram spec initial =
do result <- loop 0 initial
case result of
[] -> return Nothing
ast -> return . Just $ ast
where
loop :: Word16
-> ByteString
-> IO (Ast Word16)
loop loc bs =
if LBS.null bs
then return []
else
case runGetOrFail (getStatement spec) bs of
Left _ -> return []
Right (bs', bytes, st) ->
let numWordsRead = fromIntegral $ bytes `div` 2
loc' = loc + numWordsRead
in
do next <- loop loc' bs'
return $ tag loc st : next
-- | If the SymbolTable is successfully decoded, then the second element of the
-- tuple is the location in the data image (in DCPU16 words) where the table
-- resides.
symbolTable :: SystemSpec -> Get (Maybe (SymbolTable, Word16))
symbolTable spec =
do (Statement () first) <- getStatement spec
case first of
Ins (Op () "SET")
(Address () Pc)
(Address () (Dir (Expression () (Word 3)))) ->
do hasSig <- getSignature
if not hasSig
then return Nothing
else
do tableLoc <- getWord
lookAhead $
do skip (locToByteOffsetFromHeader tableLoc)
numEntries <- getWord
entries <- forM [1 .. numEntries] (const getEntry)
return $ Just (SymbolTable.fromList entries, tableLoc)
_ -> return Nothing
where
getSignature :: Get Bool
getSignature =
do isEmpty_ <- isEmpty
if isEmpty_
then return False
else
do w <- getWord
return (w == SymbolTable.signature)
getEntry :: Get (S.ByteString, Word16)
getEntry =
do len <- fmap fromIntegral getWord
name <- getByteString len
_ <- getByteString (if odd len then 1 else 0)
loc <- getWord
return (name, loc)
-- | This function converts the index of the 16 bit word indicating the location
-- of the symbol table in the image to a byte offset from the end of the symbol
-- table header.
locToByteOffsetFromHeader :: Word16 -> Int
locToByteOffsetFromHeader n =
-- To prevent overflow.
let n' = fromIntegral n :: Word32
in fromIntegral $ (2 * n') - (3 * 2)
getStatement :: SystemSpec -> Get (Statement ())
getStatement spec =
do w <- getWord
insM <- choice [getIns spec w
,getSpecialIns spec w]
return $ fromMaybe (Statement () (Dat [Expression () (Word w)])) insM
getIns :: SystemSpec -> Word16 -> Get (Maybe (Statement ()))
getIns spec w =
let opM = opCode spec $ w .&. 0x1f
in
do addrAM <- getAddress spec ContextA $ (w .&. 0xfc00) `shiftR` 10
addrBM <- getAddress spec ContextB $ (w .&. 0x3e0) `shiftR` 5
return $
do op <- opM
addrA <- addrAM
addrB <- addrBM
return $ Statement () (Ins op addrB addrA)
opCode :: SystemSpec -> Word16 -> Maybe (Op ())
opCode = Spec.decode . Spec.opCodes
getSpecialIns :: SystemSpec
-> Word16
-> Get (Maybe (Statement ()))
getSpecialIns spec w =
let prefixM = zeroPrefix $ w .&. 0x10
opM = specialOpCode spec $ (w .&. 0x3e0) `shiftR` 5
in
do addrM <- getAddress spec ContextA $ (w .&. 0xfc00) `shiftR` 10
return $
do _ <- prefixM
op <- opM
addr <- addrM
return $ Statement () (SpecialIns op addr)
where
zeroPrefix 0 = return $ Just ()
zeroPrefix _ = return Nothing
specialOpCode :: SystemSpec -> Word16 -> Maybe (SpecialOp ())
specialOpCode = Spec.decode . Spec.specialOpCodes
getAddress :: SystemSpec
-> AddressContext
-> Word16
-> Get (Maybe (Address ()))
getAddress spec context w =
choice [getRegDir spec w
,getRegInd spec w
,getRegIndOff spec w
,getFastDir w
,getDir w
,getInd w
,getPeek w
,getPick w
,getPushOrPop context w
,getSp w
,getPc w
,getEx w]
getPushOrPop :: AddressContext -> Word16 -> Get (Maybe (Address ()))
getPushOrPop context 0x18 =
return . fmap (Address ()) $ case context of
ContextA -> Just Pop
ContextB -> Just Push
getPushOrPop _ _ = return Nothing
getFastDir :: Word16 -> Get (Maybe (Address ()))
getFastDir 0x20 = return . Just . Address () . Dir . Expression () . Word $ 0xffff
getFastDir w
| w >= 0x21 && w <= 0x3f =
return . Just . Address () . Dir . Expression () . Word $ w - 0x21
| otherwise = return Nothing
getDir :: Word16 -> Get (Maybe (Address ()))
getDir 0x1f = fmap (Address () . Dir) <$> getNextExpr
getDir _ = return Nothing
getPeek :: Word16 -> Get (Maybe (Address ()))
getPeek 0x19 = return . Just $ Address () Peek
getPeek _ = return Nothing
getPick :: Word16 -> Get (Maybe (Address ()))
getPick 0x1a = fmap (Address () . Pick) <$> getNextExpr
getPick _ = return Nothing
getSp :: Word16 -> Get (Maybe (Address ()))
getSp 0x1b = return . Just $ Address () Sp
getSp _ = return Nothing
getPc :: Word16 -> Get (Maybe (Address ()))
getPc 0x1c = return . Just $ Address () Pc
getPc _ = return Nothing
getEx :: Word16 -> Get (Maybe (Address ()))
getEx 0x1d = return . Just $ Address () Ex
getEx _ = return Nothing
getInd :: Word16 -> Get (Maybe (Address ()))
getInd 0x1e = fmap (Address () . Ind) <$> getNextExpr
getInd _ = return Nothing
getRegDir :: SystemSpec -> Word16 -> Get (Maybe (Address ()))
getRegDir spec = return . fmap (Address () . RegDir) . register spec
getRegInd :: SystemSpec -> Word16 -> Get (Maybe (Address ()))
getRegInd spec =
return . fmap (Address () . RegInd) . register spec . subtract 8
getRegIndOff :: SystemSpec -> Word16 -> Get (Maybe (Address ()))
getRegIndOff spec w =
case register spec (w - 0x10) of
Nothing -> return Nothing
Just r -> fmap (Address () . RegIndOff r) <$> getNextExpr
register :: SystemSpec -> Word16 -> Maybe (Register ())
register = Spec.decode . Spec.registers
-- | Decode one of many possibilities, with backtracking.
choice :: [Get (Maybe a)] -> Get (Maybe a)
choice [] = return Nothing
choice (x : xs) =
do result <- lookAheadM x
case result of
Just _ -> return result
Nothing -> choice xs
getNextExpr :: Get (Maybe (Expression ()))
getNextExpr = fmap (Expression () . Word) <$> getNext
getNext :: Get (Maybe Word16)
getNext =
do isEmpty_ <- isEmpty
if isEmpty_
then return Nothing
else Just <$> getWord
getWord :: Get Word16
getWord = getWord16be
| hakuch/Pulsar | src/Pulsar/Decode.hs | apache-2.0 | 13,389 | 0 | 22 | 4,238 | 4,435 | 2,229 | 2,206 | 297 | 5 |
----
-- Copyright (c) 2013 Andrea Bernardini.
--
-- Licensed under the Apache License, Version 2.0 (the "License");
-- you may not use this file except in compliance with the License.
-- You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing, software
-- distributed under the License is distributed on an "AS IS" BASIS,
-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-- See the License for the specific language governing permissions and
-- limitations under the License.
----
module Handler.Session where
import Import
import Data.Aeson
import qualified System.IO.Streams as S
import Network.Http.Client
import GHC.Generics
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import Data.Text.Encoding
import Data.Text (pack, unpack)
import Data.Time
data JSONSession = JSONSession {
expires :: UTCTime
, token :: Text
} deriving (Show, Generic)
data JSONAuth = JSONAuth {
username :: Text
, pwd :: B.ByteString
} deriving (Show, Generic)
data Link = Link
{ linkTitle :: Text
, linkUrl :: Text
, linkSite :: Text
, linkUser :: Text
, linkSaved :: UTCTime
} deriving (Show, Generic)
data LinkArray = LinkArray
{ links :: [Link]
} deriving (Show, Generic)
data Feed = Feed
{ feedTitle :: Text
, feedUrl :: Text
, feedSite :: Text
, feedRead :: UTCTime
, feedUser :: Text
} deriving (Show, Generic)
data FeedArray = FeedArray
{ feeds :: [Feed]
} deriving (Show, Generic)
instance FromJSON Link
instance FromJSON LinkArray
instance FromJSON Feed
instance FromJSON FeedArray
instance FromJSON JSONSession
instance ToJSON JSONAuth
getSessionR :: Handler Html
getSessionR = do user <- lookupSession "user"
token <- lookupSession "token"
e <- lookupSession "expires"
now <- lift getNow
case e of
Nothing -> do { clearSession; redirect HomeR }
Just exp
| (textToTime exp) < now -> do { clearSession; redirect HomeR }
| otherwise -> do
let tk = case token of
Nothing -> ("" :: Text)
Just t -> t
case user of
Nothing -> do { clearSession; redirect HomeR }
Just user -> do links <- lift $ withConnection (openConnection "127.0.0.1" 3000)
(getLinkArray tk)
feeds <- lift $ withConnection (openConnection "127.0.0.1" 3000)
(getFeedArray tk)
showPage links feeds
getLinkArray :: Text -> Connection -> IO [Link]
getLinkArray token c =
do q <- buildRequest $ do
http GET $ B.append "/links/" $ encodeUtf8 token
setContentType "application/json"
setAccept "application/json"
sendRequest c q emptyBody
receiveResponse c (\p i -> do
stream <- S.read i
case stream of
Just bytes -> do dby <- return $ decode $ fromStrict bytes
case dby of
Nothing -> return []
Just (LinkArray links) -> return links
Nothing -> return [])
getFeedArray :: Text -> Connection -> IO [Feed]
getFeedArray token c =
do q <- buildRequest $ do
http GET $ B.append "/feeds/" $ encodeUtf8 token
setContentType "application/json"
setAccept "application/json"
sendRequest c q emptyBody
receiveResponse c (\p i -> do
stream <- S.read i
case stream of
Just bytes -> do dby <- return $ decode $ fromStrict bytes
case dby of
Nothing -> return []
Just (FeedArray feeds) -> return feeds
Nothing -> return [])
showPage :: [Link] -> [Feed] -> Handler Html
showPage links feeds = defaultLayout $(widgetFile "UI")
postSessionR :: Handler ()
postSessionR = do user <- runInputPost $ ireq textField "user"
pwd <- runInputPost $ ireq textField "pwd"
mtk <- lift $ withConnection (openConnection "127.0.0.1" 3000)
(getSessionToken user pwd)
case mtk of
Just (tk, exp) -> do setSession "token" tk
setSession "expires" (timeToText exp)
setSession "user" user
redirectUltDest SessionR
Nothing -> redirect HomeR
getSessionToken :: Text -> Text -> Connection -> IO (Maybe (Text, UTCTime))
getSessionToken u p c =
do q <- buildRequest $ do
http POST "/session"
setContentType "application/json"
setAccept "application/json"
jaBS <- return (encode (JSONAuth {username = u, pwd = (encodeUtf8 p)}))
is <- S.fromLazyByteString jaBS
sendRequest c q (inputStreamBody is)
receiveResponse c (\p i -> do
stream <- S.read i
case stream of
Just bytes -> do dby <- return $ decode $ fromStrict bytes
case dby of
Nothing -> do return Nothing
Just (JSONSession exp tk) -> do return $ Just (tk, exp)
Nothing -> do return Nothing)
fromStrict :: B.ByteString -> BL.ByteString
fromStrict strict = BL.fromChunks [strict]
getNow :: IO UTCTime
getNow = do now <- getCurrentTime
return now
textToTime :: Text -> UTCTime
textToTime txt = (read (unpack txt) :: UTCTime)
timeToText :: UTCTime -> Text
timeToText t = pack $ show t | andrebask/newsprint | src/UserInterface/NewsPrint/Handler/Session.hs | apache-2.0 | 6,603 | 0 | 24 | 2,747 | 1,575 | 782 | 793 | 134 | 4 |
module Life.Display.ICanvas where
import Graphics.Blank
import Data.Text (pack)
import Numeric
import Control.Concurrent
import Life.Types
-- This still needs work, it does not render correctly due to the new variable-size boards
drawGrid :: Size -> Canvas ()
drawGrid (w,h) = do
sequence_ [ lineX x height | x <- [10, 20..width] ]
sequence_ [ lineY y width | y <- [10, 20..height] ]
where
width = fromIntegral $ 10 * w + 10
height = fromIntegral $ 10 * h + 10
lineX x y = do
beginPath()
moveTo(x,0)
lineTo(x,y)
lineWidth 1
strokeStyle $ pack "black"
closePath()
stroke()
lineY y x = do
beginPath()
moveTo(0,y)
lineTo(x,y)
lineWidth 1
strokeStyle $ pack "black"
closePath()
stroke()
renderSquare :: Pos -> Canvas ()
renderSquare (x,y) = do
beginPath()
fillStyle $ pack $ "red"
{-Need to fix this, some cells are appearing white
'#' : (concat [showHex (200 - ((x' + 50) `mod` 200)) "",
showHex ((x' + y') `mod` 200) "",
showHex ((y' + 50) `mod` 200) ""])
-}
rect ((fromIntegral x'), (fromIntegral y'), 9, 9)
closePath()
fill()
where
x' = 10 * x
y' = 10 * y
renderSquares :: [Pos] -> Canvas ()
renderSquares xs = mapM_ renderSquare xs
clearDeadCells :: [Pos] -> Canvas ()
clearDeadCells b = sequence_ [ clearRect (10 * fromIntegral x, 10 * fromIntegral y, 9, 9) | (x,y) <- b ]
lifeLoop :: Life board => DeviceContext -> board -> IO ()
lifeLoop dc b = do
send dc $ do
drawGrid $ fst $ config b
clearDeadCells $ alive $ diff b b'
renderSquares $ alive b'
threadDelay (50 * 500)
lifeLoop dc b'
where b' = next b
drawButton :: (Float, Float) -> Canvas ()
drawButton (x,y) = do
fillStyle $ pack "red"
fillRect (x, y, 30, 20)
fillStyle $ pack "white"
fillText (pack "start", x + 2.5, y + 12.5)
lifeICanvas :: Life board => DeviceContext -> board -> IO ()
lifeICanvas dc b = do
send dc $ do
let size = fst $ config b
drawGrid size
let bcoords = (5 * (fromIntegral (fst size)) - 15 , 10 * fromIntegral (snd size) + 20 )
drawButton bcoords
event <- wait dc
case ePageXY event of
-- if no mouse location, ignore, and redraw
Nothing -> lifeICanvas dc b
Just (x',y') ->
let (bx,by) = fst $ config b
(butx,buty) = (5 * (fromIntegral bx) - 15 , 10 * (fromIntegral by) + 20 )
in
if floor x' >= butx && floor x' <= (butx + 30) && floor y' >= buty && floor y' <= (buty + 20)
then lifeLoop dc b
else if (floor x') `div` 10 > bx || (floor y') `div` 10 > by
then lifeICanvas dc b
else do
send dc $ renderSquare ((floor x') `div` 10, (floor y') `div` 10)
lifeICanvas dc $ inv ((floor x') `div` 10, (floor y') `div` 10) b
| ku-fpg/better-life | Life/Display/ICanvas.hs | bsd-2-clause | 2,828 | 12 | 21 | 808 | 1,199 | 594 | 605 | 76 | 4 |
{-# Language FlexibleInstances, MultiParamTypeClasses, RankNTypes, StandaloneDeriving,
TypeFamilies, UndecidableInstances #-}
-- | The RepMin example - replicate a binary tree with all leaves replaced by the minimal leaf value.
module RepMin where
import Data.Functor.Identity
import qualified Rank2
import Transformation (Transformation(..))
import Transformation.AG (Inherited(..), Synthesized(..))
import qualified Transformation
import qualified Transformation.AG as AG
import qualified Transformation.Deep as Deep
import qualified Transformation.Full as Full
-- | tree data type
data Tree a (f' :: * -> *) (f :: * -> *) = Fork{left :: f (Tree a f' f'),
right:: f (Tree a f' f')}
| Leaf{leafValue :: f a}
-- | tree root
data Root a f' f = Root{root :: f (Tree a f' f')}
deriving instance (Show (f (Tree a f' f')), Show (f a)) => Show (Tree a f' f)
deriving instance (Show (f (Tree a f' f'))) => Show (Root a f' f)
instance Rank2.Functor (Tree a f') where
f <$> Fork l r = Fork (f l) (f r)
f <$> Leaf x = Leaf (f x)
instance Rank2.Functor (Root a f') where
f <$> Root x = Root (f x)
instance Rank2.Apply (Tree a f') where
Fork fl fr <*> ~(Fork l r) = Fork (Rank2.apply fl l) (Rank2.apply fr r)
Leaf f <*> ~(Leaf x) = Leaf (Rank2.apply f x)
instance Rank2.Applicative (Tree a f') where
pure x = Leaf x
instance Rank2.Apply (Root a f') where
Root f <*> ~(Root x) = Root (Rank2.apply f x)
instance (Transformation t, Transformation.At t a, Full.Functor t (Tree a)) => Deep.Functor t (Tree a) where
t <$> Fork l r = Fork (t Full.<$> l) (t Full.<$> r)
t <$> Leaf x = Leaf (t Transformation.$ x)
instance (Transformation t, Full.Functor t (Tree a)) => Deep.Functor t (Root a) where
t <$> Root x = Root (t Full.<$> x)
-- | The transformation type
data RepMin = RepMin
type Sem = AG.Semantics RepMin
instance Transformation RepMin where
type Domain RepMin = Identity
type Codomain RepMin = Sem
-- | Inherited attributes' type
data InhRepMin = InhRepMin{global :: Int}
deriving Show
-- | Synthesized attributes' type
data SynRepMin = SynRepMin{local :: Int,
tree :: Tree Int Identity Identity}
deriving Show
type instance AG.Atts (Inherited RepMin) (Tree Int f' f) = InhRepMin
type instance AG.Atts (Synthesized RepMin) (Tree Int f' f) = SynRepMin
type instance AG.Atts (Inherited RepMin) (Root Int f' f) = ()
type instance AG.Atts (Synthesized RepMin) (Root Int f' f) = SynRepMin
type instance AG.Atts (Inherited a) Int = ()
type instance AG.Atts (Synthesized a) Int = Int
instance Transformation.At RepMin (Tree Int Sem Sem) where
($) = AG.applyDefault runIdentity
instance Transformation.At RepMin (Root Int Sem Sem) where
($) = AG.applyDefault runIdentity
instance Full.Functor RepMin (Tree Int) where
(<$>) = Full.mapUpDefault
instance Full.Functor RepMin (Root Int) where
(<$>) = Full.mapUpDefault
-- | The semantics of the primitive 'Int' type must be defined manually.
instance Transformation.At RepMin Int where
RepMin $ Identity n = Rank2.Arrow (const $ Synthesized n)
instance AG.Attribution RepMin (Root Int) Sem Identity where
attribution RepMin self (inherited, Root root) = (Synthesized SynRepMin{local= local (syn root),
tree= tree (syn root)},
Root{root= Inherited InhRepMin{global= local (syn root)}})
instance AG.Attribution RepMin (Tree Int) Sem Identity where
attribution _ _ (inherited, Fork left right) = (Synthesized SynRepMin{local= local (syn left)
`min` local (syn right),
tree= tree (syn left) `fork` tree (syn right)},
Fork{left= Inherited InhRepMin{global= global $ inh inherited},
right= Inherited InhRepMin{global= global $ inh inherited}})
attribution _ _ (inherited, Leaf value) = (Synthesized SynRepMin{local= syn value,
tree= Leaf{leafValue= Identity $ global
$ inh inherited}},
Leaf{leafValue= Inherited ()})
-- * Helper functions
fork l r = Fork (Identity l) (Identity r)
leaf = Leaf . Identity
-- | The example tree
exampleTree :: Root Int Identity Identity
exampleTree = Root (Identity $ leaf 7 `fork` (leaf 4 `fork` leaf 1) `fork` leaf 3)
-- |
-- >>> Rank2.apply (Full.fmap RepMin $ Identity exampleTree) (Inherited ())
-- Synthesized {syn = SynRepMin {local = 1, tree = Fork {left = Identity (Fork {left = Identity (Leaf {leafValue = Identity 1}), right = Identity (Fork {left = Identity (Leaf {leafValue = Identity 1}), right = Identity (Leaf {leafValue = Identity 1})})}), right = Identity (Leaf {leafValue = Identity 1})}}}
| blamario/grampa | deep-transformations/test/RepMin.hs | bsd-2-clause | 5,198 | 0 | 14 | 1,535 | 1,659 | 878 | 781 | -1 | -1 |
-- 142913828922
import Euler(primeSieve)
nn = 2000000
primeSum n = sum $ primeSieve n
main = putStrLn $ show $ primeSum nn
| higgsd/euler | hs/10.hs | bsd-2-clause | 126 | 1 | 6 | 25 | 49 | 24 | 25 | 4 | 1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE UndecidableInstances #-}
{-
- Early parser for TAGs. Fourth preliminary version :-).
-}
module NLP.LTAG.Early4 where
import Control.Applicative ((<$>), (*>))
import Control.Monad (guard, void)
import qualified Control.Monad.RWS.Strict as RWS
import qualified Control.Monad.State.Strict as ST
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.Maybe (MaybeT (..))
import Data.List (intercalate)
import qualified Data.Map.Strict as M
import Data.Maybe ( isJust, isNothing
, listToMaybe, maybeToList)
import qualified Data.Set as S
import qualified Data.PSQueue as Q
import Data.PSQueue (Binding(..))
import qualified Pipes as P
import qualified NLP.LTAG.Tree as G
--------------------------------------------------
-- CUSTOM SHOW
--------------------------------------------------
class Show a => View a where
view :: a -> String
instance View String where
view x = x
instance View Int where
view = show
--------------------------------------------------
-- VIEW + ORD
--------------------------------------------------
class (View a, Ord a) => VOrd a where
instance (View a, Ord a) => VOrd a where
--------------------------------------------------
-- CORE TYPES
--------------------------------------------------
-- | Position in the sentence.
type Pos = Int
----------------------
-- Initial Trees
----------------------
-- Each initial tree is factorized into a collection of flat CF
-- rules. In order to make sure that this collection of rules
-- can be only used to recognize this particular tree, each
-- non-terminal is paired with an additional identifier.
--
-- Within the context of substitution, both the non-terminal and
-- the identifier have to agree. In case of adjunction, only the
-- non-terminals have to be equal.
-- | Additional identifier.
type ID = Int
-- | Symbol: a (non-terminal, maybe identifier) pair.
type Sym n = (n, Maybe ID)
-- | Show the symbol.
viewSym :: View n => Sym n -> String
viewSym (x, Just i) = "(" ++ view x ++ ", " ++ show i ++ ")"
viewSym (x, Nothing) = "(" ++ view x ++ ", _)"
-- | Label: a symbol, a terminal or a generalized foot node.
-- Generalized in the sense that it can represent not only a foot
-- note of an auxiliary tree, but also a non-terminal on the path
-- from the root to the real foot note of an auxiliary tree.
data Lab n t
= NonT (Sym n)
| Term t
| Foot (Sym n)
deriving (Show, Eq, Ord)
-- | Show the label.
viewLab :: (View n, View t) => Lab n t -> String
viewLab (NonT s) = "N" ++ viewSym s
viewLab (Term t) = "T(" ++ view t ++ ")"
viewLab (Foot s) = "F" ++ viewSym s
-- | A rule for initial tree.
data Rule n t = Rule {
-- | The head of the rule
headI :: Sym n
-- | The body of the rule
, body :: [Lab n t]
} deriving (Show, Eq, Ord)
--------------------------
-- Rule generation monad
--------------------------
-- | Identifier generation monad.
type RM n t a = RWS.RWS () [Rule n t] Int a
-- | Pull the next identifier.
nextID :: RM n t ID
nextID = RWS.state $ \i -> (i, i + 1)
-- | Save the rule in the writer component of the monad.
keepRule :: Rule n t -> RM n t ()
keepRule = RWS.tell . (:[])
-- | Evaluate the RM monad.
runRM :: RM n t a -> (a, [Rule n t])
runRM rm = RWS.evalRWS rm () 0
-----------------------------------------
-- Tree Factorization
-----------------------------------------
-- | Take an initial tree and factorize it into a list of rules.
treeRules
:: Bool -- ^ Is it a top level tree? `True' for
-- an entire initial tree, `False' otherwise.
-> G.Tree n t -- ^ The tree itself
-> RM n t (Lab n t)
treeRules isTop G.INode{..} = case subTrees of
[] -> do
let x = (labelI, Nothing)
-- keepRule $ Rule x []
return $ NonT x
_ -> do
x <- if isTop
then return (labelI, Nothing)
else (labelI,) . Just <$> nextID
xs <- mapM (treeRules False) subTrees
keepRule $ Rule x xs
return $ NonT x
treeRules _ G.FNode{..} = return $ Term labelF
-----------------------------------------
-- Auxiliary Tree Factorization
-----------------------------------------
-- | Convert an auxiliary tree to a lower-level auxiliary
-- representation and a list of corresponding rules which
-- represent the "substitution" trees on the left and on the
-- right of the spine.
auxRules :: Bool -> G.AuxTree n t -> RM n t (Lab n t)
-- auxRules :: Bool -> G.AuxTree n t -> RM n t (Maybe (Sym n))
auxRules b G.AuxTree{..} =
doit b auxTree auxFoot
where
-- doit _ G.INode{..} [] = return Nothing
doit _ G.INode{..} [] = return $ Foot (labelI, Nothing)
doit isTop G.INode{..} (k:ks) = do
let (ls, bt, rs) = split k subTrees
x <- if isTop
then return (labelI, Nothing)
else (labelI,) . Just <$> nextID
ls' <- mapM (treeRules False) ls
bt' <- doit False bt ks
rs' <- mapM (treeRules False) rs
-- keepAux $ Aux x ls' bt' rs'
-- return $ Just x
keepRule $ Rule x $ ls' ++ (bt' : rs')
return $ Foot x
doit _ _ _ = error "auxRules: incorrect path"
split =
doit []
where
doit acc 0 (x:xs) = (reverse acc, x, xs)
doit acc k (x:xs) = doit (x:acc) (k-1) xs
doit acc _ [] = error "auxRules.split: index to high"
--------------------------------------------------
-- CHART STATE ...
--
-- ... and chart extending operations
--------------------------------------------------
-- | Parsing state: processed initial rule elements and the elements
-- yet to process.
data State n t = State {
-- | The head of the rule represented by the state.
root :: Sym n
-- | The list of processed elements of the rule, stored in an
-- inverse order.
, left :: [Lab n t]
-- | The list of elements yet to process.
, right :: [Lab n t]
-- | The starting position.
, beg :: Pos
-- | The ending position (or rather the position of the dot).
, end :: Pos
-- | Coordinates of the gap (if applies)
, gap :: Maybe (Pos, Pos)
} deriving (Show, Eq, Ord)
-- | Is it a completed (fully-parsed) state?
completed :: State n t -> Bool
completed = null . right
-- | Does it represent a regular rule?
regular :: State n t -> Bool
regular = isNothing . gap
-- | Does it represent a regular rule?
auxiliary :: State n t -> Bool
auxiliary = isJust . gap
-- | Is it top-level? All top-level states (regular or
-- auxiliary) have an underspecified ID in the root symbol.
topLevel :: State n t -> Bool
topLevel = isNothing . snd . root
-- | Is it subsidiary (i.e. not top) level?
subLevel :: State n t -> Bool
subLevel = isJust . snd . root
-- | Deconstruct the right part of the state (i.e. labels yet to
-- process) within the MaybeT monad.
expects :: Monad m => State n t -> MaybeT m (Lab n t, [Lab n t])
expects = maybeT . expects'
-- | Deconstruct the right part of the state (i.e. labels yet to
-- process) within the MaybeT monad.
expects' :: State n t -> Maybe (Lab n t, [Lab n t])
expects' = decoList . right
-- | Print the state.
printState :: (View n, View t) => State n t -> IO ()
printState State{..} = do
putStr $ viewSym root
putStr " -> "
putStr $ intercalate " " $
map viewLab (reverse left) ++ ["*"] ++ map viewLab right
putStr " <"
putStr $ show beg
putStr ", "
case gap of
Nothing -> return ()
Just (p, q) -> do
putStr $ show p
putStr ", "
putStr $ show q
putStr ", "
putStr $ show end
putStrLn ">"
-- | Priority type.
type Prio = Int
-- | Priority of a state. Crucial for the algorithm -- states have
-- to be removed from the queue in a specific order.
prio :: State n t -> Prio
prio p = end p
--------------------------------------------------
-- Earley monad
--------------------------------------------------
-- | The state of the earley monad.
data EarSt n t = EarSt {
-- | Rules which expect a specific label and which end on a
-- specific position.
doneExpEnd :: M.Map (Lab n t, Pos) (S.Set (State n t))
-- | Rules providing a specific non-terminal in the root
-- and spanning over a given range.
, doneProSpan :: M.Map (n, Pos, Pos) (S.Set (State n t))
-- | The set of states waiting on the queue to be processed.
-- Invariant: the intersection of `done' and `waiting' states
-- is empty.
-- , waiting :: S.Set (State n t) }
, waiting :: Q.PSQ (State n t) Prio }
deriving (Show)
-- | Make an initial `EarSt` from a set of states.
mkEarSt :: (Ord n, Ord t) => S.Set (State n t) -> (EarSt n t)
mkEarSt s = EarSt
{ doneExpEnd = M.empty
, doneProSpan = M.empty
, waiting = Q.fromList
[ p :-> prio p
| p <- S.toList s ] }
-- | Earley parser monad. Contains the input sentence (reader)
-- and the state of the computation `EarSt'.
type Earley n t = RWS.RWST [t] () (EarSt n t) IO
-- | Read word from the given position of the input.
readInput :: Pos -> MaybeT (Earley n t) t
readInput i = do
-- ask for the input
xs <- RWS.ask
-- just a safe way to retrieve the i-th element
maybeT $ listToMaybe $ drop i xs
-- | Retrieve the set of "done" states.
-- :: Earley n t (S.Set (State n t))
-- = done <$> RWS.get
-- | Add the state to the waiting queue. Check first if it is
-- not already in the set of processed (`done') states.
pushState :: (Ord t, Ord n) => State n t -> Earley n t ()
pushState p = RWS.state $ \s ->
let waiting' = if isProcessed p s
then waiting s
else Q.insert p (prio p) (waiting s)
in ((), s {waiting = waiting'})
-- | Remove a state from the queue. In future, the queue
-- will be probably replaced by a priority queue which will allow
-- to order the computations in some smarter way.
popState :: (Ord t, Ord n) => Earley n t (Maybe (State n t))
popState = RWS.state $ \st -> case Q.minView (waiting st) of
Nothing -> (Nothing, st)
Just (x :-> _, s) -> (Just x, st {waiting = s})
-- | Add the state to the set of processed (`done') states.
saveState :: (Ord t, Ord n) => State n t -> Earley n t ()
saveState p = RWS.state $ \s -> ((), doit s)
where
doit st@EarSt{..} = st
{ doneExpEnd = case expects' p of
Just (x, _) -> M.insertWith S.union (x, end p)
(S.singleton p) doneExpEnd
Nothing -> doneExpEnd
, doneProSpan = if completed p
then M.insertWith S.union (fst $ root p, beg p, end p)
(S.singleton p) doneProSpan
else doneProSpan }
-- | Check if the state is not already processed (i.e. in one of the
-- done-related maps).
isProcessed :: (Ord n, Ord t) => State n t -> EarSt n t -> Bool
isProcessed p EarSt{..} = S.member p $ case expects' p of
Just (x, _) -> M.findWithDefault S.empty
(x, end p) doneExpEnd
Nothing -> M.findWithDefault S.empty
(fst $ root p, beg p, end p) doneProSpan
-- | Return all completed states which:
-- * expect a given label,
-- * end on the given position.
expectEnd
:: (Ord n, Ord t) => Lab n t -> Pos
-> P.ListT (Earley n t) (State n t)
expectEnd x i = do
EarSt{..} <- lift RWS.get
listValues (x, i) doneExpEnd
-- | Return all completed states with:
-- * the given root non-terminal value
-- * the given span
rootSpan
:: Ord n => n -> (Pos, Pos)
-> P.ListT (Earley n t) (State n t)
rootSpan x (i, j) = do
EarSt{..} <- lift RWS.get
listValues (x, i, j) doneProSpan
-- | A utility function.
listValues :: (Monad m, Ord a) => a -> M.Map a (S.Set b) -> P.ListT m b
listValues x m = each $ case M.lookup x m of
Nothing -> []
Just s -> S.toList s
-- | Perform the earley-style computation given the grammar and
-- the input sentence.
earley
:: (VOrd t, VOrd n)
=> S.Set (Rule n t) -- ^ The grammar (set of rules)
-> [t] -- ^ Input sentence
-- -> IO (S.Set (State n t))
-> IO ()
earley gram xs =
-- done . fst <$> RWS.execRWST loop xs st0
void $ RWS.execRWST loop xs st0
where
-- we put in the initial state all the states with the dot on
-- the left of the body of the rule (-> left = []) on all
-- positions of the input sentence.
st0 = mkEarSt $ S.fromList
[ State
{ root = headI
, left = []
, right = body
, beg = i, end = i
, gap = Nothing }
| Rule{..} <- S.toList gram
, i <- [0 .. length xs - 1] ]
-- the computetion is performed as long as the waiting queue
-- is non-empty.
loop = popState >>= \mp -> case mp of
Nothing -> return ()
Just p -> step p >> loop
-- | Step of the algorithm loop. `p' is the state popped up from
-- the queue.
step :: (VOrd t, VOrd n) => State n t -> Earley n t ()
step p = do
sequence $ map ($p)
[ tryScan, trySubst
, tryAdjoinInit
, tryAdjoinCont
, tryAdjoinTerm ]
saveState p
-- | Try to perform SCAN on the given state.
tryScan :: (VOrd t, VOrd n) => State n t -> Earley n t ()
tryScan p = void $ runMaybeT $ do
-- check that the state expects a terminal on the right
(Term t, right') <- expects p
-- read the word immediately following the ending position of
-- the state
c <- readInput $ end p
-- make sure that what the rule expects is consistent with
-- the input
guard $ c == t
-- construct the resultant state
let p' = p
{ end = end p + 1
, left = Term t : left p
, right = right' }
-- print logging information
lift . lift $ do
putStr "[S] " >> printState p
putStr " : " >> printState p'
-- push the resulting state into the waiting queue
lift $ pushState p'
-- | Try to use the state (only if fully parsed) to complement
-- (=> substitution) other rules.
trySubst
:: (VOrd t, VOrd n)
=> State n t
-> Earley n t ()
trySubst p = void $ P.runListT $ do
-- make sure that `p' is a fully-parsed regular rule
guard $ completed p && regular p
-- find rules which end where `p' begins and which
-- expect the non-terminal provided by `p' (ID included)
q <- expectEnd (NonT $ root p) (beg p)
-- construct the resultant state
let q' = q
{ end = end p
, left = NonT (root p) : left q
, right = tail (right q) }
-- print logging information
lift . lift $ do
putStr "[U] " >> printState p
putStr " + " >> printState q
putStr " : " >> printState q'
-- push the resulting state into the waiting queue
lift $ pushState q'
-- | `tryAdjoinInit p q':
-- * `p' is a completed state (regular or auxiliary)
-- * `q' not completed and expects a *real* foot
tryAdjoinInit
:: (VOrd n, VOrd t)
=> State n t
-> Earley n t ()
tryAdjoinInit p = void $ P.runListT $ do
-- make sure that `p' is fully-parsed
guard $ completed p
-- find all rules which expect a real foot (with ID == Nothing)
-- and which end where `p' begins.
let u = fst (root p)
q <- expectEnd (Foot (u, Nothing)) (beg p)
-- construct the resultant state
let q' = q
{ gap = Just (beg p, end p)
, end = end p
, left = Foot (u, Nothing) : left q
, right = tail (right q) }
-- print logging information
lift . lift $ do
putStr "[A] " >> printState p
putStr " + " >> printState q
putStr " : " >> printState q'
-- push the resulting state into the waiting queue
lift $ pushState q'
-- | `tryAdjoinCont p q':
-- * `p' is a completed, auxiliary state
-- * `q' not completed and expects a *dummy* foot
tryAdjoinCont
:: (VOrd n, VOrd t)
=> State n t
-> Earley n t ()
tryAdjoinCont p = void $ P.runListT $ do
-- make sure that `p' is a completed, sub-level auxiliary rule
guard $ completed p && subLevel p && auxiliary p
-- find all rules which expect a foot provided by `p'
-- and which end where `p' begins.
q <- expectEnd (Foot $ root p) (beg p)
-- construct the resulting state; the span of the gap of the
-- inner state `p' is copied to the outer state based on `q'
let q' = q
{ gap = gap p, end = end p
, left = Foot (root p) : left q
, right = tail (right q) }
-- logging info
lift . lift $ do
putStr "[B] " >> printState p
putStr " + " >> printState q
putStr " : " >> printState q'
-- push the resulting state into the waiting queue
lift $ pushState q'
-- | Adjoin a fully-parsed auxiliary state to a partially parsed
-- tree represented by a fully parsed rule/state.
tryAdjoinTerm
:: (VOrd t, VOrd n)
=> State n t
-> Earley n t ()
tryAdjoinTerm p = void $ P.runListT $ do
-- make sure that `p' is a completed, top-level state ...
guard $ completed p && topLevel p
-- ... and that it is an auxiliary state
(gapBeg, gapEnd) <- each $ maybeToList $ gap p
-- take all completed rules with a given span
-- and a given root non-terminal (IDs irrelevant)
q <- rootSpan (fst $ root p) (gapBeg, gapEnd)
-- make sure that `q' is completed as well and that it is
-- either a regular rule or an intermediate auxiliary rule
-- ((<=) used as an implication here!)
guard $ completed q && auxiliary q <= subLevel q
let q' = q
{ beg = beg p
, end = end p }
lift . lift $ do
putStr "[C] " >> printState p
putStr " + " >> printState q
putStr " : " >> printState q'
lift $ pushState q'
--------------------------------------------------
-- UTILS
--------------------------------------------------
-- | Deconstruct list. Utility function.
decoList :: [a] -> Maybe (a, [a])
decoList [] = Nothing
decoList (y:ys) = Just (y, ys)
-- | MaybeT transformer.
maybeT :: Monad m => Maybe a -> MaybeT m a
maybeT = MaybeT . return
-- | ListT from a list.
each :: Monad m => [a] -> P.ListT m a
each = P.Select . P.each
| kawu/ltag | src/NLP/LTAG/Early4.hs | bsd-2-clause | 18,495 | 0 | 16 | 5,251 | 4,817 | 2,536 | 2,281 | -1 | -1 |
module CalculatorKata.Day8 (calculate) where
calculate :: String -> Double
calculate src = calculate' src ""
where
calculate' :: String -> String -> Double
calculate' "" num = read num
calculate' (c:src) num
| c == '+' = read num + calculate' src ""
| c == '-' = read num - calculate' src ""
| c == '*' = read num * calculate' src ""
| c == '/' = read num / calculate' src ""
| otherwise = calculate' src (num ++ [c])
| Alex-Diez/haskell-tdd-kata | old-katas/src/CalculatorKata/Day8.hs | bsd-3-clause | 555 | 0 | 11 | 221 | 199 | 95 | 104 | 11 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Stack.Upgrade
( upgrade
, UpgradeOpts
, upgradeOpts
) where
import Control.Exception.Safe (catchAny)
import Control.Monad (unless, when)
import Control.Monad.IO.Class
import Control.Monad.Logger
import Data.Foldable (forM_)
import qualified Data.Map as Map
import Data.Maybe (isNothing)
import Data.Monoid.Extra
import qualified Data.Text as T
import Lens.Micro (set)
import Options.Applicative
import Path
import Path.IO
import qualified Paths_stack as Paths
import Stack.Build
import Stack.Config
import Stack.Fetch
import Stack.PackageIndex
import Stack.Setup
import Stack.Types.PackageIdentifier
import Stack.Types.PackageName
import Stack.Types.Version
import Stack.Types.Config
import Stack.Types.Resolver
import Stack.Types.StackT
import Stack.Types.StringError
import System.Exit (ExitCode (ExitSuccess))
import System.Process (rawSystem, readProcess)
import System.Process.Run
upgradeOpts :: Parser UpgradeOpts
upgradeOpts = UpgradeOpts
<$> (sourceOnly <|> optional binaryOpts)
<*> (binaryOnly <|> optional sourceOpts)
where
binaryOnly = flag' Nothing (long "binary-only" <> help "Do not use a source upgrade path")
sourceOnly = flag' Nothing (long "source-only" <> help "Do not use a binary upgrade path")
binaryOpts = BinaryOpts
<$> optional (strOption
( long "binary-platform"
<> help "Platform type for archive to download"
<> showDefault))
<*> switch
(long "force-download" <>
help "Download a stack executable, even if the version number is older than what we have")
<*> optional (strOption
(long "binary-version" <>
help "Download a specific version, even if it's out of date"))
<*> optional (strOption
(long "github-org" <>
help "Github organization name"))
<*> optional (strOption
(long "github-repo" <>
help "Github repository name"))
sourceOpts = SourceOpts
<$> ((\fromGit repo -> if fromGit then Just repo else Nothing)
<$> switch
( long "git"
<> help "Clone from Git instead of downloading from Hackage (more dangerous)" )
<*> strOption
( long "git-repo"
<> help "Clone from specified git repository"
<> value "https://github.com/commercialhaskell/stack"
<> showDefault ))
data BinaryOpts = BinaryOpts
{ _boPlatform :: !(Maybe String)
, _boForce :: !Bool
-- ^ force a download, even if the downloaded version is older
-- than what we are
, _boVersion :: !(Maybe String)
-- ^ specific version to download
, _boGithubOrg :: !(Maybe String)
, _boGithubRepo :: !(Maybe String)
}
deriving Show
newtype SourceOpts = SourceOpts
{ _soRepo :: Maybe String
}
deriving Show
data UpgradeOpts = UpgradeOpts
{ _uoBinary :: !(Maybe BinaryOpts)
, _uoSource :: !(Maybe SourceOpts)
}
deriving Show
upgrade :: (StackM env m, HasConfig env)
=> ConfigMonoid
-> Maybe AbstractResolver
-> Maybe String -- ^ git hash at time of building, if known
-> UpgradeOpts
-> m ()
upgrade gConfigMonoid mresolver builtHash (UpgradeOpts mbo mso) =
case (mbo, mso) of
-- FIXME It would be far nicer to capture this case in the
-- options parser itself so we get better error messages, but
-- I can't think of a way to make it happen.
(Nothing, Nothing) -> throwString "You must allow either binary or source upgrade paths"
(Just bo, Nothing) -> binary bo
(Nothing, Just so) -> source so
-- See #2977 - if --git or --git-repo is specified, do source upgrade.
(_, Just so@(SourceOpts (Just _))) -> source so
(Just bo, Just so) -> binary bo `catchAny` \e -> do
$logWarn "Exception occured when trying to perform binary upgrade:"
$logWarn $ T.pack $ show e
$logWarn "Falling back to source upgrade"
source so
where
binary bo = binaryUpgrade bo
source so = sourceUpgrade gConfigMonoid mresolver builtHash so
binaryUpgrade
:: (StackM env m, HasConfig env)
=> BinaryOpts
-> m ()
binaryUpgrade (BinaryOpts mplatform force' mver morg mrepo) = do
platforms0 <-
case mplatform of
Nothing -> preferredPlatforms
Just p -> return [("windows" `T.isInfixOf` T.pack p, p)]
archiveInfo <- downloadStackReleaseInfo morg mrepo mver
let mdownloadVersion = getDownloadVersion archiveInfo
force =
case mver of
Nothing -> force'
Just _ -> True -- specifying a version implies we're forcing things
isNewer <-
case mdownloadVersion of
Nothing -> do
$logError "Unable to determine upstream version from Github metadata"
unless force $
$logError "Rerun with --force-download to force an upgrade"
return False
Just downloadVersion -> do
$logInfo $ T.concat
[ "Current Stack version: "
, versionText stackVersion
, ", available download version: "
, versionText downloadVersion
]
return $ downloadVersion > stackVersion
toUpgrade <- case (force, isNewer) of
(False, False) -> do
$logInfo "Skipping binary upgrade, you are already running the most recent version"
return False
(True, False) -> do
$logInfo "Forcing binary upgrade"
return True
(_, True) -> do
$logInfo "Newer version detected, downloading"
return True
when toUpgrade $ do
config <- view configL
downloadStackExe platforms0 archiveInfo (configLocalBin config) $ \tmpFile -> do
-- Sanity check!
ec <- rawSystem (toFilePath tmpFile) ["--version"]
unless (ec == ExitSuccess)
$ throwString "Non-success exit code from running newly downloaded executable"
sourceUpgrade
:: (StackM env m, HasConfig env)
=> ConfigMonoid
-> Maybe AbstractResolver
-> Maybe String
-> SourceOpts
-> m ()
sourceUpgrade gConfigMonoid mresolver builtHash (SourceOpts gitRepo) =
withSystemTempDir "stack-upgrade" $ \tmp -> do
menv <- getMinimalEnvOverride
mdir <- case gitRepo of
Just repo -> do
remote <- liftIO $ readProcess "git" ["ls-remote", repo, "master"] []
let latestCommit = head . words $ remote
when (isNothing builtHash) $
$logWarn $ "Information about the commit this version of stack was "
<> "built from is not available due to how it was built. "
<> "Will continue by assuming an upgrade is needed "
<> "because we have no information to the contrary."
if builtHash == Just latestCommit
then do
$logInfo "Already up-to-date, no upgrade required"
return Nothing
else do
$logInfo "Cloning stack"
-- NOTE: "--recursive" was added after v1.0.0 (and before the
-- next release). This means that we can't use submodules in
-- the stack repo until we're comfortable with "stack upgrade
-- --git" not working for earlier versions.
let args = [ "clone", repo , "stack", "--depth", "1", "--recursive"]
runCmd (Cmd (Just tmp) "git" menv args) Nothing
return $ Just $ tmp </> $(mkRelDir "stack")
Nothing -> do
updateAllIndices
(caches, _gitShaCaches) <- getPackageCaches
let latest = Map.fromListWith max
$ map toTuple
$ Map.keys
-- Mistaken upload to Hackage, just ignore it
$ Map.delete (PackageIdentifier
$(mkPackageName "stack")
$(mkVersion "9.9.9"))
caches
case Map.lookup $(mkPackageName "stack") latest of
Nothing -> throwString "No stack found in package indices"
Just version | version <= fromCabalVersion Paths.version -> do
$logInfo "Already at latest version, no upgrade required"
return Nothing
Just version -> do
let ident = PackageIdentifier $(mkPackageName "stack") version
paths <- unpackPackageIdents tmp Nothing
-- accept latest cabal revision by not supplying a Git SHA
$ Map.singleton ident Nothing
case Map.lookup ident paths of
Nothing -> error "Stack.Upgrade.upgrade: invariant violated, unpacked directory not found"
Just path -> return $ Just path
forM_ mdir $ \dir -> do
lc <- loadConfig
gConfigMonoid
mresolver
(SYLOverride $ dir </> $(mkRelFile "stack.yaml"))
bconfig <- lcLoadBuildConfig lc Nothing
envConfig1 <- runInnerStackT bconfig $ setupEnv $ Just $
"Try rerunning with --install-ghc to install the correct GHC into " <>
T.pack (toFilePath (configLocalPrograms (view configL bconfig)))
runInnerStackT (set (buildOptsL.buildOptsInstallExesL) True envConfig1) $
build (const $ return ()) Nothing defaultBuildOptsCLI
{ boptsCLITargets = ["stack"]
}
| mrkkrp/stack | src/Stack/Upgrade.hs | bsd-3-clause | 10,277 | 0 | 26 | 3,508 | 2,079 | 1,042 | 1,037 | 227 | 6 |
dupli :: [a] -> [a]
dupli [] = []
dupli (x:xs) = x:x:dupli xs
| bradb/99problems | src/p14.hs | bsd-3-clause | 62 | 0 | 7 | 14 | 54 | 28 | 26 | 3 | 1 |
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Sound.OpenAL.AL.ALboolean
-- Copyright : (c) Sven Panne 2003-2005
-- License : BSD-style (see the file libraries/OpenAL/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : provisional
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling ALboolean.
--
--------------------------------------------------------------------------------
module Sound.OpenAL.AL.ALboolean (
marshalALboolean, unmarshalALboolean
) where
import Sound.OpenAL.AL.BasicTypes ( ALboolean )
import Sound.OpenAL.Constants ( al_FALSE, al_TRUE )
--------------------------------------------------------------------------------
marshalALboolean :: Bool -> ALboolean
marshalALboolean False = al_FALSE
marshalALboolean True = al_TRUE
unmarshalALboolean :: ALboolean -> Bool
unmarshalALboolean = (/= al_FALSE)
| FranklinChen/hugs98-plus-Sep2006 | packages/OpenAL/Sound/OpenAL/AL/ALboolean.hs | bsd-3-clause | 953 | 0 | 5 | 121 | 100 | 67 | 33 | 9 | 1 |
{-# LANGUAGE NoMonomorphismRestriction #-}
module Main where
import Diagrams.Prelude
import Diagrams.Backend.Canvas.CmdLine
stops = mkStops [(orange, 0, 1), (white, 0.5, 1), (blue, 1, 1)]
g = defaultRG & _RG . rGradStops .~ stops
stops' = mkStops [(lightskyblue, 0, 1), (darkgreen, 1, 0.5)]
h = mkLinearGradient stops ((-10) ^& (0)) (10 ^& (0)) GradPad
h' = mkLinearGradient stops' ((-50) ^& 0) (50 ^& 0) GradPad
linear = mkLinearGradient (mkStops [(black,0,1), (white,1,1)]) (0 ^& (-300)) (0 ^& 300) GradPad
radial = mkRadialGradient (mkStops [(orange, 0.0, 0.4)
, (orange, 0.05, 1)
, (gray, 0.35, 0.25)
, (teal, 0.50, 1)])
(0 ^& 0) 50
(0 ^& 0) 80 GradRepeat
s = square 100 # fillTexture h # lineTexture h' # lw ultraThick # scaleX 1.5
s' = square 100 # fillTexture radial # lineTexture h' # lw ultraThick # scaleX 1.5
e1 = vcat' (with & sep .~ 35) [s', s # rotateBy (1/16), s # rotateBy (1/8)]
e2 = vcat' (with & sep .~ 35) [s # rotateBy (3/16), s' # rotateBy (1/4), s # rotateBy (5/16)]
e3 = vcat' (with & sep .~ 35) [s # rotateBy (3/8), s # rotateBy (7/16), s' # rotateBy (1/2)]
example = hcat' (with & sep .~ 25) [e1, e2, e3]
main = defaultMain $ (example # centerXY # pad 1.1) <> (square 600 # fillTexture linear)
| ku-fpg/diagrams-canvas | examples/Gradients.hs | bsd-3-clause | 1,425 | 0 | 10 | 425 | 675 | 374 | 301 | -1 | -1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE BangPatterns #-}
module Counter
( startCounter,
getCount,
incCount,
resetCount,
wait,
waitTimeout
) where
import Control.Distributed.Process hiding (call)
import Control.Distributed.Process.Async
import Control.Distributed.Process.Extras
import Control.Distributed.Process.Extras.Time
import Control.Distributed.Process.ManagedProcess
import Data.Binary
import Data.Typeable (Typeable)
import GHC.Generics
--------------------------------------------------------------------------------
-- Types --
--------------------------------------------------------------------------------
-- Call and Cast request types. Response types are unnecessary as the GenProcess
-- API uses the Async API, which in turn guarantees that an async handle can
-- /only/ give back a reply for that *specific* request through the use of an
-- anonymous middle-man (as the sender and receiver in our case).
data Increment = Increment
deriving (Typeable, Generic, Eq, Show)
instance Binary Increment where
data Fetch = Fetch
deriving (Typeable, Generic, Eq, Show)
instance Binary Fetch where
data Reset = Reset
deriving (Typeable, Generic, Eq, Show)
instance Binary Reset where
type State = Int
--------------------------------------------------------------------------------
-- API --
--------------------------------------------------------------------------------
-- | Increment count
incCount :: ProcessId -> Process Int
incCount sid = call sid Increment
-- | Get the current count - this is replicating what 'call' actually does
getCount :: ProcessId -> Process Int
getCount sid = call sid Fetch
-- | Reset the current count
resetCount :: ProcessId -> Process ()
resetCount sid = cast sid Reset
-- | Start a counter server
startCounter :: Int -> Process ProcessId
startCounter startCount =
let server = serverDefinition
in spawnLocal $ serve startCount init' server
where init' :: InitHandler Int Int
init' count = return $ InitOk count Infinity
--------------------------------------------------------------------------------
-- Implementation --
--------------------------------------------------------------------------------
serverDefinition :: ProcessDefinition State
serverDefinition = defaultProcess {
apiHandlers = [
handleCallIf (condition (\count Increment -> count >= 10))-- invariant
(\_ (_ :: Increment) -> haltMaxCount)
, handleCall handleIncrement
, handleCall (\count Fetch -> reply count count)
, handleCast (\_ Reset -> continue 0)
]
} :: ProcessDefinition State
haltMaxCount :: Reply Int State
haltMaxCount = haltNoReply_ (ExitOther "Count > 10")
handleIncrement :: CallHandler State Increment Int
handleIncrement count Increment =
let next = count + 1 in continue next >>= replyWith next
| haskell-distributed/distributed-process-client-server | tests/Counter.hs | bsd-3-clause | 3,202 | 0 | 13 | 710 | 553 | 307 | 246 | 56 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module InitialData where
import Models
import Password
import Database.Persist.Sql
import Control.Monad.IO.Class (liftIO)
import Control.Monad (void, when)
import Data.Maybe (isNothing)
insertInitialData :: SqlPersistT IO ()
insertInitialData = do
mu <- getBy $ UniqueUser admin
uId <- case mu of
Just (Entity uId _) -> return uId
Nothing -> do
u <- liftIO $ setPassword admin (User {
userName = admin,
userPassword = ""
})
insert u
ma <- getBy $ UniqueApiKey apiKey
when (isNothing ma) $ void $ insert $ ApiKey uId apiKey apiSecret
where
apiKey = "1234"
apiSecret = "4321"
admin = "Admin"
| tlaitinen/servant-cookie-hmac-auth-example | app/InitialData.hs | bsd-3-clause | 790 | 0 | 18 | 262 | 221 | 116 | 105 | 23 | 2 |
{-
%
% (c) The University of Glasgow 2006
% (c) The GRASP/AQUA Project, Glasgow University, 1992-1998
%
\section[FastStringEnv]{@FastStringEnv@: FastString environments}
-}
module FastStringEnv (
-- * FastString environments (maps)
FastStringEnv,
-- ** Manipulating these environments
mkFsEnv,
emptyFsEnv, unitFsEnv, fsEnvElts,
extendFsEnv_C, extendFsEnv_Acc, extendFsEnv,
extendFsEnvList, extendFsEnvList_C,
filterFsEnv,
plusFsEnv, plusFsEnv_C, alterFsEnv,
lookupFsEnv, lookupFsEnv_NF, delFromFsEnv, delListFromFsEnv,
elemFsEnv, mapFsEnv,
) where
import UniqFM
import Maybes
import FastString
type FastStringEnv a = UniqFM a -- Domain is FastString
emptyFsEnv :: FastStringEnv a
mkFsEnv :: [(FastString,a)] -> FastStringEnv a
fsEnvElts :: FastStringEnv a -> [a]
alterFsEnv :: (Maybe a-> Maybe a) -> FastStringEnv a -> FastString -> FastStringEnv a
extendFsEnv_C :: (a->a->a) -> FastStringEnv a -> FastString -> a -> FastStringEnv a
extendFsEnv_Acc :: (a->b->b) -> (a->b) -> FastStringEnv b -> FastString -> a -> FastStringEnv b
extendFsEnv :: FastStringEnv a -> FastString -> a -> FastStringEnv a
plusFsEnv :: FastStringEnv a -> FastStringEnv a -> FastStringEnv a
plusFsEnv_C :: (a->a->a) -> FastStringEnv a -> FastStringEnv a -> FastStringEnv a
extendFsEnvList :: FastStringEnv a -> [(FastString,a)] -> FastStringEnv a
extendFsEnvList_C :: (a->a->a) -> FastStringEnv a -> [(FastString,a)] -> FastStringEnv a
delFromFsEnv :: FastStringEnv a -> FastString -> FastStringEnv a
delListFromFsEnv :: FastStringEnv a -> [FastString] -> FastStringEnv a
elemFsEnv :: FastString -> FastStringEnv a -> Bool
unitFsEnv :: FastString -> a -> FastStringEnv a
lookupFsEnv :: FastStringEnv a -> FastString -> Maybe a
lookupFsEnv_NF :: FastStringEnv a -> FastString -> a
filterFsEnv :: (elt -> Bool) -> FastStringEnv elt -> FastStringEnv elt
mapFsEnv :: (elt1 -> elt2) -> FastStringEnv elt1 -> FastStringEnv elt2
fsEnvElts x = eltsUFM x
emptyFsEnv = emptyUFM
unitFsEnv x y = unitUFM x y
extendFsEnv x y z = addToUFM x y z
extendFsEnvList x l = addListToUFM x l
lookupFsEnv x y = lookupUFM x y
alterFsEnv = alterUFM
mkFsEnv l = listToUFM l
elemFsEnv x y = elemUFM x y
plusFsEnv x y = plusUFM x y
plusFsEnv_C f x y = plusUFM_C f x y
extendFsEnv_C f x y z = addToUFM_C f x y z
mapFsEnv f x = mapUFM f x
extendFsEnv_Acc x y z a b = addToUFM_Acc x y z a b
extendFsEnvList_C x y z = addListToUFM_C x y z
delFromFsEnv x y = delFromUFM x y
delListFromFsEnv x y = delListFromUFM x y
filterFsEnv x y = filterUFM x y
lookupFsEnv_NF env n = expectJust "lookupFsEnv_NF" (lookupFsEnv env n)
| vikraman/ghc | compiler/utils/FastStringEnv.hs | bsd-3-clause | 2,957 | 0 | 10 | 793 | 867 | 443 | 424 | 52 | 1 |
{-# LANGUAGE RankNTypes, TypeOperators, DefaultSignatures #-}
-- | Compare to comonad.Control.Comonad (Copointed)
module MHask.Copointed where
import MHask.Arrow
import qualified MHask.Join as MHask
import qualified MHask.Impl.Identity as I
import qualified MHask.Impl.State as S
import qualified MHask.Impl.Reader as R
import qualified MHask.Impl.Writer as W
-- | Dual of "MHask.Pointed".
class (MHask.Join t) => Copointed t where
-- | Instances must obey the following laws:
--
-- > join ≡ extract :: t m <~ t (t m)
-- > extract ~<~ fmap f ≡ f ~<~ extract
extract :: (Monad m)
=> m <~ t m
instance Copointed I.IdentityT where
extract = I.extract
instance (S.Monoid s) => Copointed (S.StateT s) where
extract = S.extract
instance (R.Monoid r) => Copointed (R.ReaderT r) where
extract = R.extract
instance (W.Monoid w) => Copointed (W.WriterT w) where
extract = W.extract | DanBurton/MHask | MHask/Copointed.hs | bsd-3-clause | 918 | 0 | 9 | 176 | 222 | 132 | 90 | 19 | 0 |
{-# LANGUAGE RecordWildCards #-}
-- | Data pagination.
module Data.Pagination where
import Data.Default
import Data.Maybe
-- | A pagination object, holds information about the name, total, per
-- page, current page, etc.
data Pagination = Pagination
{ pnTotal :: Integer
, pnPerPage :: Integer
, pnName :: String
, pnCurrentPage :: Integer
, pnShowDesc :: Bool
} deriving (Show)
instance Default Pagination where
def = Pagination
{ pnTotal = 0
, pnPerPage = 5
, pnName = ""
, pnCurrentPage = 1
, pnShowDesc = True
}
-- | Get the page count of the pagination results.
pnPageCount :: Pagination -> Integer
pnPageCount Pagination{..} = max 1 $
if total/perpage > fromIntegral (round (total/perpage))
then round (total/perpage) + 1
else round (total/perpage)
where total = fromIntegral pnTotal
perpage = fromIntegral pnPerPage
| chrisdone/haskelldb-demo | src/Data/Pagination.hs | bsd-3-clause | 954 | 0 | 12 | 262 | 216 | 125 | 91 | 25 | 2 |
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
module Tinfoil.KDF(
defaultScrypt
, newScrypt
, hash
, kdfFor
, needsRehash
, verify
, verifyNoCredential
) where
import P
import System.IO (IO)
import Tinfoil.Data
import qualified Tinfoil.KDF.Scrypt as Scrypt
defaultScrypt :: KDF
defaultScrypt =
newScrypt Scrypt.defaultParams
newScrypt :: Scrypt.ScryptParams -> KDF
newScrypt params =
KDF
(Scrypt.hashCredential params)
Scrypt.verifyCredential
(Scrypt.verifyNoCredential params)
Scrypt0
(Scrypt.paramsUpToDate params)
kdfFor :: MCFPrefix -> KDF
kdfFor Scrypt0 = defaultScrypt
hash :: MCFPrefix -> Credential -> IO MCFHash
hash mp c = do
fmap (packMCFHash mp) $ (kdfGenHash kdf) c
where
kdf = kdfFor mp
verify :: MCFHash -> Credential -> IO Verified
verify mh c =
maybe' (pure VerificationError) (uncurry verify') $ unpackMCFHash mh
where
verify' mcf ch =
let kdf = kdfFor mcf in
(kdfVerifyCredential kdf) ch c
verifyNoCredential :: MCFPrefix -> Credential -> IO Verified
verifyNoCredential mp c =
(kdfVerifyNoCredential kdf) $ c
where
kdf = kdfFor mp
needsRehash :: MCFHash -> Maybe' NeedsRehash
needsRehash mh = do
(p, h) <- unpackMCFHash mh
(kdfUpToDate (kdfFor p)) h
| ambiata/tinfoil | src/Tinfoil/KDF.hs | bsd-3-clause | 1,313 | 0 | 11 | 290 | 394 | 203 | 191 | 45 | 1 |
module Main where
import Lib
import Baby
main :: IO ()
main = someFunc
| dueyfinster/haskell | app/Main.hs | bsd-3-clause | 73 | 0 | 6 | 16 | 25 | 15 | 10 | 5 | 1 |
{-# LANGUAGE Rank2Types #-}
module Validations.Internal.Lens
( getter
, setter
, lens
, Lens
) where
import Control.Monad.Identity(Identity(Identity), runIdentity)
import Control.Applicative(Const(Const), getConst, (<$>))
type Lens a s = (Functor f) => (a -> f a) -> s -> f s
getter :: (forall f. (Functor f) => (a -> f a) -> s -> f s) -> s -> a
getter lns x = getConst $ lns (\y -> Const y) x
setter :: (forall f. (Functor f) => (a -> f a) -> s -> f s) -> s -> a -> s
setter lns x y = runIdentity $ lns (\_ -> Identity y) x
lens :: (Functor f) => (s -> a) -> (s -> a -> s) -> (a -> f a) -> s -> f s
lens get set lift input = set input <$> (lift . get) input
| mavenraven/validations | src/Validations/Internal/Lens.hs | bsd-3-clause | 676 | 0 | 12 | 161 | 360 | 197 | 163 | 15 | 1 |
{-# LANGUAGE TypeFamilies #-}
module EFA.Equation.Pair (
T(Cons, first, second),
) where
import qualified EFA.Equation.Arithmetic as Arith
import EFA.Equation.Arithmetic
(Sum, (~+), (~-),
Product, (~*), (~/),
ZeroTestable, allZeros, coincidingZeros,
Constant, zero,
Integrate, Scalar, integrate)
import qualified EFA.Report.Format as Format
import EFA.Report.FormatValue (FormatValue, formatValue)
import Data.Function.HT (compose2)
import Data.Eq.HT (equating)
import Data.Ord.HT (comparing)
{- |
Data type that allows to perform the same computation
in different representations parallelly.
Don't think of it as a vector type.
Rather than this,
we use it for simultaneous symbolic and numeric computations.
This is reflected in the 'Eq', 'Ord' and 'ZeroTestable' instances
that only watch the 'second' component.
-}
data T a b = Cons {first :: a, second :: b}
deriving (Show)
instance (Eq b) => Eq (T a b) where
(==) = equating second
instance (Ord b) => Ord (T a b) where
compare = comparing second
instance (FormatValue a, FormatValue b) => FormatValue (T a b) where
formatValue (Cons a b) = Format.pair (formatValue a) (formatValue b)
liftP1 ::
(a -> a) ->
(b -> b) ->
T a b -> T a b
liftP1 f g (Cons a b) =
Cons (f a) (g b)
liftP2 ::
(a -> a -> a) ->
(b -> b -> b) ->
T a b -> T a b -> T a b
liftP2 f g (Cons a0 b0) (Cons a1 b1) =
Cons (f a0 a1) (g b0 b1)
instance (Num a, Num b) => Num (T a b) where
fromInteger n = Cons (fromInteger n) (fromInteger n)
(+) = liftP2 (+) (+)
(-) = liftP2 (-) (-)
(*) = liftP2 (*) (*)
negate = liftP1 negate negate
abs = liftP1 abs abs
signum = liftP1 signum signum
instance (Fractional a, Fractional b) => Fractional (T a b) where
fromRational x = Cons (fromRational x) (fromRational x)
(/) = liftP2 (/) (/)
recip = liftP1 recip recip
instance (Sum a, Sum b) => Sum (T a b) where
(~+) = liftP2 (~+) (~+)
(~-) = liftP2 (~-) (~-)
negate = liftP1 Arith.negate Arith.negate
instance (Product a, Product b) => Product (T a b) where
(~*) = liftP2 (~*) (~*)
(~/) = liftP2 (~/) (~/)
recip = liftP1 Arith.recip Arith.recip
constOne = liftP1 Arith.constOne Arith.constOne
instance (Constant a, Constant b) => Constant (T a b) where
zero = Cons Arith.zero Arith.zero
fromInteger n = Cons (Arith.fromInteger n) (Arith.fromInteger n)
fromRational x = Cons (Arith.fromRational x) (Arith.fromRational x)
instance (ZeroTestable b) => ZeroTestable (T a b) where
allZeros = allZeros . second
coincidingZeros = compose2 coincidingZeros second
instance (Integrate a, Integrate b) => Integrate (T a b) where
type Scalar (T a b) = T (Scalar a) (Scalar b)
integrate (Cons a b) = Cons (integrate a) (integrate b)
| energyflowanalysis/efa-2.1 | src/EFA/Equation/Pair.hs | bsd-3-clause | 2,815 | 0 | 9 | 640 | 1,126 | 624 | 502 | 69 | 1 |
dupli :: [b] -> [b]
dupli = concatMap (\ x -> [x, x])
| m00nlight/99-problems | haskell/p-14.hs | bsd-3-clause | 55 | 0 | 8 | 14 | 38 | 22 | 16 | 2 | 1 |
module StableName (module System.Mem.StableName) where
import System.Mem.StableName
| OS2World/DEV-UTIL-HUGS | oldlib/StableName.hs | bsd-3-clause | 84 | 0 | 5 | 7 | 19 | 13 | 6 | 2 | 0 |
module Tests.Goblin.Workshop.Graph where
import Data.Map
import Test.Tasty
import Test.Tasty.HUnit
import Goblin.Workshop.Graph
tests :: TestTree
tests = testGroup "Workshop.Graph"
[ testCase "Test initGraph" testInitGraph
, testCase "Test entryPoints" testEntryPoints
, testCase "Test removeEntryPoints" testRemoveEntryPoints
]
sampleData :: Graph Int String
sampleData = let kvps = [ (000, "Alpha")
, (111, "Beta")
, (222, "Gamma")
, (333, "Delta")
, (444, "Epsilon")
]
conns = [ (000, 111)
, (111, 222)
, (111, 333)
, (222, 444)
]
in initGraph kvps conns
testInitGraph :: Assertion
testInitGraph = do
vertices sampleData ! 222 @?= "Gamma"
outEdgesMap sampleData ! 111 @?= [222, 333]
inEdgesMap sampleData ! 111 @?= [000]
outEdgesMap sampleData ! 000 @?= [111]
inEdgesMap sampleData ! 000 @?= []
outEdgesMap sampleData ! 444 @?= []
inEdgesMap sampleData ! 444 @?= [222]
testEntryPoints :: Assertion
testEntryPoints = do
entryPoints sampleData @?= [ (000, "Alpha")
]
testRemoveEntryPoints :: Assertion
testRemoveEntryPoints = do
let g = removeEntryPoints [444] sampleData
g @?= sampleData
let g' = removeEntryPoints [000, 222] sampleData
g' @?= initGraph
[ (111, "Beta")
, (222, "Gamma")
, (333, "Delta")
, (444, "Epsilon")
]
[ (111, 222)
, (111, 333)
, (222, 444)
]
let empty = removeEntryPoint 444 . removeEntryPoints [222, 333] . removeEntryPoint 111 . removeEntryPoint 000 $ sampleData
empty @?= initGraph [] []
| y-usuzumi/goblin-workshop | test/Tests/Goblin/Workshop/Graph.hs | bsd-3-clause | 1,775 | 0 | 15 | 569 | 530 | 289 | 241 | 48 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Web.GCS(
list,
upload,
download,
downloadURL,
getSignedURL
) where
import Web.GCS.Types
import Control.Lens (view, each, to, (^?), (^..))
import Control.Monad.Except (MonadError)
import Control.Monad.Reader (MonadReader)
import Control.Monad.Trans (MonadIO, liftIO)
import Crypto.Hash.Algorithms (SHA256(..))
import Crypto.PubKey.RSA.PKCS15 (sign)
import Data.Aeson hiding (Array)
import Data.Aeson.Lens (key, _Array, _String)
import Data.DList (DList)
import Data.Monoid
import Data.Time.Clock.POSIX (getPOSIXTime)
import Network.HTTP.Nano
import Network.URI (escapeURIString)
import qualified Data.ByteString.Base64 as B64R
import qualified Data.ByteString.Base64.URL as B64
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Data.DList as D
import qualified Data.Text as T
-- | Get collection of files in bucket.
list
:: forall m e r. ( MonadIO m
, MonadError e m
, AsHttpError e
, MonadReader r m
, HasGcsCfg r
, HasHttpCfg r )
=> m [FilePath]
list = do
bucket <- view gcsCfgBucket
let getNames :: Value -> [FilePath]
getNames v = v ^.. key "items" . _Array . each . key "name" . _String . to T.unpack
go :: Maybe String -> m (DList FilePath)
go ptoken = do
let url = "https://www.googleapis.com/storage/v1/b/" <> bucket <> "/o"
<> maybe "" ("?pageToken=" ++) ptoken
r <- (httpJSON =<< buildGCSReq GET url NoRequestData) :: m Value
case r ^? (key "nextPageToken" . _String . to T.unpack) of
Nothing -> (return . D.fromList . getNames) r
Just ptoken' -> (D.fromList (getNames r) <>) <$> go (pure ptoken')
D.toList <$> go Nothing
-- |Upload an object
upload :: (MonadIO m, MonadError e m, AsHttpError e, MonadReader r m, HasGcsCfg r, HasHttpCfg r) => String -> String -> BL.ByteString -> m ()
upload mime name dta = do
bucket <- view gcsCfgBucket
let url = "https://www.googleapis.com/upload/storage/v1/b/"<>
bucket<>"/o?uploadType=media&name="<> escapeName name
req <- addHeaders [("Content-Type", mime),("Content-length", show $ BL.length dta)] <$> buildGCSReq POST url (RawRequestData dta)
http' req
-- |Download an object
download :: (MonadIO m, MonadError e m, AsHttpError e, MonadReader r m, HasGcsCfg r, HasHttpCfg r) => String -> m BL.ByteString
download name = do
bucket <- view gcsCfgBucket
downloadURL $ "https://www.googleapis.com/storage/v1/b/"<>bucket<>"/o/"<>
escapeName name <>"?alt=media"
-- |Download from a URL
downloadURL :: (MonadIO m, MonadError e m, AsHttpError e, MonadReader r m, HasGcsCfg r, HasHttpCfg r) => String -> m BL.ByteString
downloadURL url = do
req <- buildGCSReq GET url NoRequestData
http req
-- |Get a signed URL for an object
getSignedURL :: (MonadError e m, MonadReader r m, HasGcsCfg r) => String -> Int -> m String
getSignedURL name expUTC = do
(GcsCfg bucket email pkey) <- view gcsCfg
let path = "/"<>bucket<>"/"<>escapeName name
let str = B.pack $ mconcat ["GET\n", "\n", "\n", show expUTC<>"\n", path]
let sig = either (const "") B64R.encode $ sign Nothing (Just SHA256) pkey str
let esig = T.unpack . T.replace "+" "%2B" . T.replace "/" "%2F" . T.pack $ B.unpack sig
return $
mconcat [
"http://storage.googleapis.com",
path,
"?GoogleAccessId=",
email,
"&Expires=",
show expUTC,
"&Signature=",
esig
]
--
-- Utility
--
buildGCSReq :: (MonadIO m, MonadError e m, AsHttpError e, MonadReader r m, HasGcsCfg r, HasHttpCfg r) => HttpMethod -> String -> RequestData -> m Request
buildGCSReq mthd url dta = do
tok <- getGCSAccessToken
addHeaders [("Authorization", "Bearer "++tok)] <$> buildReq mthd url dta
getGCSAccessToken :: (MonadIO m, MonadError e m, AsHttpError e, MonadReader r m, HasGcsCfg r, HasHttpCfg r) => m String
getGCSAccessToken = do
jwt <- getGCSJWT
req <- buildReq POST "https://www.googleapis.com/oauth2/v4/token" (UrlEncodedRequestData [("grant_type", "urn:ietf:params:oauth:grant-type:jwt-bearer"),("assertion", jwt)])
(AuthResult tok) <- httpJSON req
return tok
getGCSJWT :: (MonadIO m, MonadError e m, AsHttpError e, MonadReader r m, HasGcsCfg r, HasHttpCfg r) => m String
getGCSJWT = do
tme <- liftIO $ (round <$> getPOSIXTime :: IO Int)
(GcsCfg _ email pkey) <- view gcsCfg
let obj = object ["iss" .= email, "scope" .= ("https://www.googleapis.com/auth/devstorage.read_write" :: String), "aud" .= ("https://www.googleapis.com/oauth2/v4/token" :: String), "exp" .= (tme + 3590), "iat" .= tme]
let header = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9"
let body = B64.encode . BL.toStrict $ encode obj
let sig = either (const "") B64.encode $ sign Nothing (Just SHA256) pkey (header<>"."<>body)
return . B.unpack $ header<>"."<>body<>"."<>sig
escapeName :: String -> String
escapeName = escapeURIString f
where
f x | x `elem` ['A'..'Z'] = True
| x `elem` ['a'..'z'] = True
| x `elem` ['0'..'9'] = True
| x `elem` ("-._~!$&\'()*+,;=:@" :: String) = True
| otherwise = False
| collegevine/gcs | src/Web/GCS.hs | bsd-3-clause | 5,357 | 0 | 19 | 1,186 | 1,826 | 958 | 868 | -1 | -1 |
-- © 2001, 2002 Peter Thiemann
-- |Types of outputable data.
module WASH.CGI.CGITypes where
import WASH.CGI.HTMLWrapper (WithHTML)
newtype URL = URL { unURL :: String }
data FileReference =
FileReference { fileReferenceName :: FilePath
-- ^ valid local filename where this file can be accessed
, fileReferenceContentType :: String
, fileReferenceExternalName :: String
}
deriving (Show, Read)
-- |Assumes that file contains correctly formatted HTTP Response starting with
-- Content-Type. Used internally to implement frames.
data ResponseFileReference =
ResponseFileReference FilePath
data Status = Status { statusCode :: Int -- ^ status code
, statusReason :: String -- ^ reason phrase
, statusContent :: Maybe (WithHTML () IO ()) -- ^ more explanation
}
newtype Location = Location URL -- ^ redirection
data FreeForm =
FreeForm { ffName :: String -- ^ internal name
, ffContentType :: String -- ^ MIME type
, ffRawContents :: String -- ^ contents as octet stream
}
data CGIOption
= NoPort -- ^ do not include port number in generated URLs
| AutoPort -- ^ include automatically generated port number in generated URLs (default)
| Port Int -- ^ use this port number in generated URLs
| NoHttps -- ^ do not attempt to detect Https
| AutoHttps -- ^ autodetect Https by checking for port number 443 and env var HTTPS (default)
| FullURL -- ^ generate full URL including scheme, host, and port
| PartialURL -- ^ generate absolute path URL, only (default)
| SessionMode { unSessionMode :: SessionMode }
deriving (Eq, Show)
type CGIOptions = [CGIOption]
data SessionMode
= LogOnly -- ^ generate log in hidden field, full server replay (default)
| StateIDOnly -- ^ generate state id, server threads without replay (only with WSP)
| LogAndState -- ^ log and state id, server threads with replay as fallback (only with WSP)
deriving (Eq, Show, Read, Bounded, Enum)
sessionNeedsLog :: SessionMode -> Bool
sessionNeedsLog LogOnly = True
sessionNeedsLog StateIDOnly = False
sessionNeedsLog LogAndState = True
sessionNeedsState :: SessionMode -> Bool
sessionNeedsState LogOnly = False
sessionNeedsState StateIDOnly = True
sessionNeedsState LogAndState = True
| nh2/WashNGo | WASH/CGI/CGITypes.hs | bsd-3-clause | 2,310 | 46 | 8 | 503 | 350 | 219 | 131 | 42 | 1 |
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
-- | One ounce of sanity checking is worth 10000000000000000 ounces
-- of staring blindly at assembly code trying to find the problem..
--
module SPARC.CodeGen.Sanity (
checkBlock
)
where
import SPARC.Instr
import SPARC.Ppr ()
import Instruction
import Cmm
import Outputable
-- | Enforce intra-block invariants.
--
checkBlock :: CmmBlock
-> NatBasicBlock Instr
-> NatBasicBlock Instr
checkBlock cmm block@(BasicBlock _ instrs)
| checkBlockInstrs instrs
= block
| otherwise
= pprPanic
("SPARC.CodeGen: bad block\n")
( vcat [ text " -- cmm -----------------\n"
, ppr cmm
, text " -- native code ---------\n"
, ppr block ])
checkBlockInstrs :: [Instr] -> Bool
checkBlockInstrs ii
-- An unconditional jumps end the block.
-- There must be an unconditional jump in the block, otherwise
-- the register liveness determinator will get the liveness
-- information wrong.
--
-- If the block ends with a cmm call that never returns
-- then there can be unreachable instructions after the jump,
-- but we don't mind here.
--
| instr : NOP : _ <- ii
, isUnconditionalJump instr
= True
-- All jumps must have a NOP in their branch delay slot.
-- The liveness determinator and register allocators aren't smart
-- enough to handle branch delay slots.
--
| instr : NOP : is <- ii
, isJumpishInstr instr
= checkBlockInstrs is
-- keep checking
| _:i2:is <- ii
= checkBlockInstrs (i2:is)
-- this block is no good
| otherwise
= False
| ekmett/ghc | compiler/nativeGen/SPARC/CodeGen/Sanity.hs | bsd-3-clause | 1,834 | 31 | 10 | 378 | 303 | 165 | 138 | 33 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE CPP #-}
#if MIN_VERSION_base(4,9,0)
{-# LANGUAGE TypeApplications #-}
#endif
module Main ( main ) where
#if MIN_VERSION_base(4,8,0)
#else
import Control.Applicative ( (<$>) )
#endif
import Control.Monad
import Text.Haiji
import Text.Haiji.Runtime
import Data.Aeson
import Data.Default
#if MIN_VERSION_base(4,11,0)
#else
import Data.Monoid
#endif
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.IO as LT
import System.Exit
import System.Process.Text.Lazy
import Test.Tasty.TH
import Test.Tasty.HUnit
main :: IO ()
main = $(defaultMainGenerator)
jinja2 :: Show a => FilePath -> a -> IO LT.Text
jinja2 template dict = do
(code, out, err) <- readProcessWithExitCode "python3" [] script
unless (code == ExitSuccess) $ LT.putStrLn err
return out where
script = LT.unlines
[ "import json"
, "from jinja2 import Environment, PackageLoader"
, "env = Environment(loader=PackageLoader('example', '.'),autoescape=True)"
, "template = env.get_template('" <> LT.pack template <> "')"
, "object = json.loads(" <> LT.pack (show $ show dict) <> ")"
, "print(template.render(object),end='')"
, "exit()"
]
case_example :: Assertion
case_example = do
expected <- jinja2 "example.tmpl" dict
expected @=? render $(haijiFile def "example.tmpl") dict
tmpl <- readTemplateFile def "example.tmpl"
expected @=? render tmpl (toJSON dict)
where
#if MIN_VERSION_base(4,9,0)
dict = toDict @"a_variable" ("Hello,World!" :: T.Text) `merge`
toDict @"navigation" [ toDict @"caption" ("A" :: T.Text) `merge`
toDict @"href" ("content/a.html" :: T.Text)
, toDict @"caption" "B" `merge`
toDict @"href" "content/b.html"
] `merge`
toDict @"foo" (1 :: Integer) `merge`
toDict @"bar" ("" :: T.Text)
#else
dict = [key|a_variable|] ("Hello,World!" :: T.Text) `merge`
[key|navigation|] [ [key|caption|] ("A" :: T.Text) `merge`
[key|href|] ("content/a.html" :: T.Text)
, [key|caption|] "B" `merge`
[key|href|] "content/b.html"
] `merge`
[key|foo|] (1 :: Integer) `merge`
[key|bar|] ("" :: T.Text)
#endif
case_empty :: Assertion
case_empty = do
expected <- jinja2 "test/empty.tmpl" empty
tmpl <- readTemplateFile def "test/empty.tmpl"
expected @=? render tmpl (toJSON empty)
expected @=? render $(haijiFile def "test/empty.tmpl") empty
case_lf1 :: Assertion
case_lf1 = do
expected <- jinja2 "test/lf1.tmpl" empty
tmpl <- readTemplateFile def "test/lf1.tmpl"
expected @=? render tmpl (toJSON empty)
expected @=? render $(haijiFile def "test/lf1.tmpl") empty
case_lf2 :: Assertion
case_lf2 = do
expected <- jinja2 "test/lf2.tmpl" empty
tmpl <- readTemplateFile def "test/lf2.tmpl"
expected @=? render tmpl (toJSON empty)
expected @=? render $(haijiFile def "test/lf2.tmpl") empty
case_line_without_newline :: Assertion
case_line_without_newline = do
expected <- jinja2 "test/line_without_newline.tmpl" empty
tmpl <- readTemplateFile def "test/line_without_newline.tmpl"
expected @=? render tmpl (toJSON empty)
expected @=? render $(haijiFile def "test/line_without_newline.tmpl") empty
case_line_with_newline :: Assertion
case_line_with_newline = do
expected <- jinja2 "test/line_with_newline.tmpl" empty
tmpl <- readTemplateFile def "test/line_with_newline.tmpl"
expected @=? render tmpl (toJSON empty)
expected @=? render $(haijiFile def "test/line_with_newline.tmpl") empty
case_variables :: Assertion
case_variables = do
expected <- jinja2 "test/variables.tmpl" dict
tmpl <- readTemplateFile def "test/variables.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/variables.tmpl") dict
where
dict = [key|foo|] ("normal" :: T.Text) `merge`
[key|_foo|] ("start '_'" :: T.Text) `merge`
[key|Foo|] ("start upper case" :: T.Text) `merge`
[key|F__o_o__|] ("include '_'" :: T.Text) `merge`
[key|F1a2b3c|] ("include num" :: T.Text)
case_string :: Assertion
case_string = do
expected <- jinja2 "test/string.tmpl" dict
tmpl <- readTemplateFile def "test/string.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/string.tmpl") dict
where
dict = [key|test|] ("test" :: T.Text)
case_range :: Assertion
case_range = do
expected <- jinja2 "test/range.tmpl" dict
tmpl <- readTemplateFile def "test/range.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/range.tmpl") dict
where
dict = [key|value|] (5 :: Integer) `merge`
[key|array|] ([1,2,3] :: [Integer])
case_arith :: Assertion
case_arith = do
expected <- jinja2 "test/arith.tmpl" dict
tmpl <- readTemplateFile def "test/arith.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/arith.tmpl") dict
where
dict = [key|value|] ((-1) :: Integer) `merge`
[key|array|] ([1,2,3] :: [Integer])
case_comparison :: Assertion
case_comparison = do
expected <- jinja2 "test/comparison.tmpl" dict
tmpl <- readTemplateFile def "test/comparison.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/comparison.tmpl") dict
where
dict = [key|value|] ((1) :: Integer) `merge` -- There exists jinja2 bug (https://github.com/pallets/jinja/issues/755)
[key|array|] ([1,2,3] :: [Integer]) `merge`
[key|text|] ("text" :: T.Text)
case_logic :: Assertion
case_logic = do
expected <- jinja2 "test/logic.tmpl" dict
tmpl <- readTemplateFile def "test/logic.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/logic.tmpl") dict
where
dict = [key|value|] ((1) :: Integer) `merge`
[key|array|] ([1,2,3] :: [Integer])
case_HTML_escape :: Assertion
case_HTML_escape = do
expected <- jinja2 "test/HTML_escape.tmpl" dict
tmpl <- readTemplateFile def "test/HTML_escape.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/HTML_escape.tmpl") dict
where
dict = [key|foo|] (T.pack [' '..'\126'])
case_condition :: Assertion
case_condition = forM_ (replicateM 3 [True, False]) $ \[foo, bar, baz] -> do
let dict = [key|foo|] foo `merge`
[key|bar|] bar `merge`
[key|baz|] baz
expected <- jinja2 "test/condition.tmpl" dict
tmpl <- readTemplateFile def "test/condition.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/condition.tmpl") dict
case_foreach :: Assertion
case_foreach = do
expected <- jinja2 "test/foreach.tmpl" dict
tmpl <- readTemplateFile def "test/foreach.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/foreach.tmpl") dict
where
dict = [key|foo|] ([0,2..10] :: [Integer])
case_foreach_shadowing :: Assertion
case_foreach_shadowing = do
expected <- jinja2 "test/foreach.tmpl" dict
tmpl <- readTemplateFile def "test/foreach.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/foreach.tmpl") dict
False @=? ("bar" `LT.isInfixOf` expected)
where
dict = [key|foo|] ([0,2..10] :: [Integer]) `merge`
[key|bar|] ("bar" :: T.Text)
case_foreach_else_block :: Assertion
case_foreach_else_block = do
expected <- jinja2 "test/foreach_else_block.tmpl" dict
tmpl <- readTemplateFile def "test/foreach_else_block.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/foreach_else_block.tmpl") dict
where
dict = [key|foo|] ([] :: [Integer])
case_include :: Assertion
case_include = do
testInclude ([0..10] :: [Integer])
testInclude (["","\n","\n\n"] :: [T.Text]) where
testInclude xs = do
expected <- jinja2 "test/include.tmpl" dict
tmpl <- readTemplateFile def "test/include.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/include.tmpl") dict
where
dict = [key|foo|] xs
case_raw :: Assertion
case_raw = do
expected <- jinja2 "test/raw.tmpl" dict
tmpl <- readTemplateFile def "test/raw.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/raw.tmpl") dict
where
dict = [key|foo|] ([0,2..10] :: [Integer]) `merge`
[key|bar|] ("bar" :: T.Text)
case_loop_variables :: Assertion
case_loop_variables = do
expected <- jinja2 "test/loop_variables.tmpl" dict
tmpl <- readTemplateFile def "test/loop_variables.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/loop_variables.tmpl") dict
where
dict = [key|foo|] ([0,2..10] :: [Integer])
case_whitespace_control :: Assertion
case_whitespace_control = do
expected <- jinja2 "test/whitespace_control.tmpl" dict
tmpl <- readTemplateFile def "test/whitespace_control.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/whitespace_control.tmpl") dict
where
dict = [key|seq|] ([0,2..10] :: [Integer])
case_comment :: Assertion
case_comment = do
expected <- jinja2 "test/comment.tmpl" dict
tmpl <- readTemplateFile def "test/comment.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/comment.tmpl") dict
where
dict = [key|seq|] ([0,2..10] :: [Integer])
case_set :: Assertion
case_set = do
expected <- jinja2 "test/set.tmpl" dict
tmpl <- readTemplateFile def "test/set.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/set.tmpl") dict
where
dict = [key|ys|] ([0..2] :: [Integer]) `merge`
[key|xs|] ([0..3] :: [Integer])
case_extends :: Assertion
case_extends = do
expected <- jinja2 "test/child.tmpl" dict
tmpl <- readTemplateFile def "test/child.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/child.tmpl") dict
where
dict = [key|foo|] ("foo" :: T.Text) `merge`
[key|bar|] ("bar" :: T.Text) `merge`
[key|baz|] ("baz" :: T.Text)
case_many_variables :: Assertion
case_many_variables = do
expected <- jinja2 "test/many_variables.tmpl" dict --
tmpl <- readTemplateFile def "test/many_variables.tmpl"
expected @=? render tmpl (toJSON dict)
expected @=? render $(haijiFile def "test/many_variables.tmpl") dict
where
dict = [key|a|] ("b" :: T.Text) `merge`
[key|b|] ("b" :: T.Text) `merge`
[key|c|] ("b" :: T.Text) `merge`
[key|d|] ("b" :: T.Text) `merge`
[key|e|] ("b" :: T.Text) `merge`
[key|f|] ("b" :: T.Text) `merge`
[key|g|] ("b" :: T.Text) `merge`
[key|h|] ("b" :: T.Text) `merge`
[key|i|] ("b" :: T.Text) `merge`
[key|j|] ("b" :: T.Text) `merge`
[key|k|] ("b" :: T.Text) `merge`
[key|l|] ("b" :: T.Text) `merge`
[key|m|] ("b" :: T.Text) `merge`
[key|n|] ("b" :: T.Text) `merge`
[key|o|] ("b" :: T.Text) `merge`
[key|p|] ("b" :: T.Text) `merge`
[key|q|] ("b" :: T.Text)
| notogawa/haiji | test/tests.hs | bsd-3-clause | 11,722 | 0 | 25 | 2,636 | 3,529 | 1,902 | 1,627 | 248 | 1 |
module Test where
import qualified CallByName
import qualified CallByValue
import Duality
import Substitution
import Syntax
import Control.Monad
import Data.Unique.Id
import Test.QuickCheck
import Text.Show.Functions ()
instance Arbitrary InlInr where
arbitrary = elements [Inl, Inr]
coarbitrary Inl = variant 0
coarbitrary Inr = variant 1
instance Arbitrary FstSnd where
arbitrary = elements [Fst, Snd]
coarbitrary Fst = variant 0
coarbitrary Snd = variant 1
instance Arbitrary Stmt where
arbitrary = arbitraryStmt ["input"] ["halt"]
coarbitrary = error "Stmt: coarbitrary"
arbitraryStmt :: [Var] -> [CoVar] -> Gen Stmt
arbitraryStmt vs covs = liftM2 Cut (arbitraryTerm vs covs) (arbitraryCoTerm vs covs)
arbitraryTerm :: [Var] -> [CoVar] -> Gen Term
arbitraryTerm vs covs = sized $ \n ->
if n <= 0
then liftM Var (elements vs)
else oneof [
resize (n - 1) $ liftM2 Data (arbitraryTerm vs covs) arbitrary,
resize (n `div` 2) $ liftM2 Tup (arbitraryTerm vs covs) (arbitraryTerm vs covs),
resize (n - 1) $ liftM Not (arbitraryCoTerm vs covs),
resize (n - 1) $ let a = "a" ++ show (length covs) in liftM (flip Bind a) (arbitraryStmt vs (a : covs))
]
arbitraryCoTerm :: [Var] -> [CoVar] -> Gen CoTerm
arbitraryCoTerm vs covs = sized $ \n ->
if n <= 0
then liftM CoVar (elements covs)
else oneof [
resize (n `div` 2) $ liftM2 CoData (arbitraryCoTerm vs covs) (arbitraryCoTerm vs covs),
resize (n - 1) $ liftM2 CoTup arbitrary (arbitraryCoTerm vs covs),
resize (n - 1) $ liftM CoNot (arbitraryTerm vs covs),
resize (n - 1) $ let x = "x" ++ show (length vs) in liftM (CoBind x) (arbitraryStmt (x : vs) covs)
]
main :: IdSupply -> IO ()
main ids = do
-- CBV Is Dual To CBN: Proposition 3.1 -- Duality is an involution
quickCheck $ \s -> dualizeStmt (dualizeStmt s) == s
-- CBV Is Dual To CBN: Proposition 5.1 -- Call-by-value is dual to call-by-name
quickCheck $ \s -> CallByName.step s == fmap dualizeStmt (CallByValue.step (dualizeStmt s))
-- CBV Is Dual To CBN: Proposition 5.2 -- Under call-by-value, implication can be defined by ...
quickCheck $ forAllTwoHoleCtxt $ \ctxt -> forAll (arbitraryTerm ["x"] []) $ \m ->
CallByValue.step (ctxt (CallByValue.lam "x" m) (CallByValue.colam (Var "y") (CoVar "halt"))) == CallByValue.step (ctxt (Lam "x" m) (CoLam (Var "y") (CoVar "halt")))
-- CBV Is Dual To CBN: Proposition 5.3 -- Under call-by-name, implication can be defined by ...
quickCheck $ forAllTwoHoleCtxt $ \ctxt -> forAll (arbitraryTerm ["x"] []) $ \m ->
normalise CallByName.step (ctxt (CallByName.lam "x" m) (CallByValue.colam (Var "y") (CoVar "halt"))) == normalise CallByName.step (ctxt (Lam "x" m) (CoLam (Var "y") (CoVar "halt")))
-- CBV Is Dual To CBN: Proposition 6.3 -- The call-by-value and call-by-name CPS translations are dual
quickCheck $ \s -> CallByValue.cps ids s == CallByName.cps ids (dualizeStmt s)
normalise :: (a -> Maybe a) -> a -> Maybe a
normalise step what = go 50 what
where go 0 _ = Nothing
go n what = case step what of Just what -> go (n - 1) what
Nothing -> Just what
forAllTwoHoleCtxt :: Testable b => ((Term -> CoTerm -> Stmt) -> b) -> Property
forAllTwoHoleCtxt test = forAll (arbitraryStmt ["f"] ["a"]) $ \stmt -> test $ \f a -> substStmt (extendSubstTerm (coTermSubst "a" a) "f" f) stmt | batterseapower/dual-calculus | Test.hs | bsd-3-clause | 3,480 | 0 | 18 | 795 | 1,295 | 660 | 635 | 57 | 3 |
-- Compiler Toolkit: Sets derived from finite maps
--
-- Author : Manuel M T Chakravarty
-- Created: 2 February 99
--
-- Version $Revision: 1.6 $ from $Date: 2003/04/16 11:11:46 $
--
-- Copyright (c) [1999..2003] Manuel M T Chakravarty
--
-- This file is free software; you can redistribute it and/or modify
-- it under the terms of the GNU General Public License as published by
-- the Free Software Foundation; either version 2 of the License, or
-- (at your option) any later version.
--
-- This file is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-- GNU General Public License for more details.
--
--- DESCRIPTION ---------------------------------------------------------------
--
-- This module provides sets as an abstract data type implemented on top of
-- finite maps.
--
--- DOCU ----------------------------------------------------------------------
--
-- language: Haskell 98
--
--- TODO ----------------------------------------------------------------------
--
module Text.CTK.Sets (
Set, zeroSet, unitSet, listToSet, joinSet, sizeSet, addToSet,
delFromSet, diffSet, isSubSet, isSuperSet, intersectSet, mapSet,
foldSet, filterSet, elemSet, toListSet, powerSet,
-- operations related to the underlying finite maps
--
domSetFM
) where
import Text.CTK.FiniteMaps (FiniteMap, addToFM, delFromFM, diffFM,
filterFM, foldFM, intersectFM, joinCombFM,
joinFM, listToFM, lookupDftFM, lookupFM,
mapFM, sizeFM, toListFM, unitFM, zeroFM)
-- a set is a finite map with a trivial image (EXPORTED ABSTRACT)
--
newtype (Ord a) =>
Set a = Set (FiniteMap a ())
deriving (Eq, Ord)
-- ATTENION: the ordering is _not_ the subset relation
--
instance (Show a, Ord a) => Show (Set a) where
showsPrec = toShowS -- defined below
zeroSet :: Ord a => Set a
zeroSet = Set zeroFM
unitSet :: Ord a => a -> Set a
unitSet x = Set $ unitFM x ()
listToSet :: Ord a => [a] -> Set a
listToSet = Set . listToFM . (map (\x -> (x, ())))
sizeSet :: Ord a => Set a -> Int
sizeSet (Set s) = sizeFM s
addToSet :: Ord a => a -> Set a -> Set a
addToSet x (Set s) = Set $ addToFM x () s
delFromSet :: Ord a => a -> Set a -> Set a
delFromSet x (Set s) = Set $ delFromFM x s
joinSet :: Ord a => Set a -> Set a -> Set a
joinSet (Set s) (Set t) = Set $ joinFM s t
diffSet :: Ord a => Set a -> Set a -> Set a
diffSet (Set s) (Set t) = Set $ diffFM s t
isSubSet :: Ord a => Set a -> Set a -> Bool
isSubSet s1 s2 = s1 `diffSet` s2 == zeroSet
isSuperSet :: Ord a => Set a -> Set a -> Bool
isSuperSet s1 s2 = s2 `isSubSet` s1
intersectSet :: Ord a => Set a -> Set a -> Set a
intersectSet (Set s) (Set t) = Set $ intersectFM s t
mapSet :: (Ord a, Ord b) => (a -> b) -> Set a -> Set b
mapSet f (Set s) = Set $ (listToFM . map (\(x, _) -> (f x, ())) . toListFM) s
foldSet :: Ord a => (a -> b -> b) -> b -> Set a -> b
foldSet f z (Set s) = foldFM (\x _ y -> f x y) z s
filterSet :: Ord a => (a -> Bool) -> Set a -> Set a
filterSet p (Set s) = Set $ filterFM (\x _ -> p x) s
elemSet :: Ord a => a -> Set a -> Bool
elemSet x (Set s) = case lookupFM s x of
Nothing -> False
Just _ -> True
toListSet :: Ord a => Set a -> [a]
toListSet (Set s) = (map fst . toListFM) s
-- compute the power set of the given set (EXPORTED)
--
powerSet :: Ord a => Set a -> Set (Set a)
powerSet = foldSet addOne (unitSet zeroSet)
where
addOne e s = mapSet (addToSet e) s `joinSet` s
-- pretty print routine (used as a method in the `Set' instance of `Show')
--
toShowS :: (Show a, Ord a) => Int -> Set a -> ShowS
toShowS _ (Set s) = showString "{"
. (format . map fst . toListFM $ s)
. showString "}"
where
format [] = showString ""
format [x] = shows x
format (x:xs) = shows x . showString ", " . format xs
-- Operations relating to the underlying finite maps
-- -------------------------------------------------
-- |Yield the domain of a finite map as a set
--
domSetFM :: Ord k => FiniteMap k e -> Set k
domSetFM = Set . mapFM (\_ _ -> ())
| mwotton/ctkl | src/Text/CTK/Sets.hs | bsd-3-clause | 4,515 | 0 | 14 | 1,295 | 1,398 | 734 | 664 | 60 | 3 |
module Formulas where
import Misc
-- We provide a datatype for condtions that can occur in the calculation
-- of preconditions for concurrent guarded workflows. We currently abstract
-- from atomic formulas in the sense that only strings are used to represent them.
-- Additionally, we currently do not actually calculate the weakest precondition
-- of some elementary action (also abstractly represented as string) with respect
-- to some condition. Furthermore, we include representations Andn and Orn to allow
-- for a better printing of these formulas. Semantically, they have no other meansing
-- than And and Or.
data Formula =
Atom String
| Wp String Formula
| And Formula Formula
| Andn Formula Formula
| Or Formula Formula
| Orn Formula Formula
| Not Formula
| Imp Formula Formula
deriving (Eq,Ord)
-- The function fshow generates a string representing a formula. It uses
-- an offset to keep track of the position at the end of the last line.
-- The offset is basically used to be able to print formulas containing
-- Andn and Orn more nicely.
fshow :: Formula -> Int -> (String,Int)
fshow (Atom a) offset = (a,offset + (length a))
fshow (Wp a f) offset = let (s1,o1) = (fshow f (offset + (length a) + 4))
in ("wp("++ a ++ "," ++ s1 ++ ")",o1+1)
fshow (And f1 f2) offset = let (s1,o1) = fshow f1 offset
(s2,o2) = fshow f2 (o1+4)
in (s1 ++ " /\\ " ++ s2 , o2)
fshow (Andn f1 f2) offset = let (s1,o1) = fshow f1 offset
(s2,o2) = fshow f2 offset
in (s1 ++ "\n" ++
ostring(offset) ++ "/\\" ++ "\n" ++
ostring(offset) ++ s2,o2)
fshow (Or f1 f2) offset = let (s1,o1) = fshow f1 (offset+1)
(s2,o2) = fshow f2 (o1+4)
in ("(" ++ s1 ++ " \\/ " ++ s2 ++ ")", o2+1)
fshow (Orn f1 f2) offset = let (s1,o1) = fshow f1 (offset+1)
(s2,o2) = fshow f2 (offset+1)
in ("(" ++ s1 ++ "\n" ++
ostring(offset+1) ++ "\\/" ++ "\n" ++
ostring(offset+1) ++ s2 ++ ")",o2+1)
fshow (Not f) offset = let (s1,o1) = fshow f (offset + 5)
in ("not (" ++ s1 ++ ")",o1+1)
fshow (Imp f1 f2) offset = let (s1,o1) = fshow f1 (offset+1)
(s2,o2) = fshow f2 (o1+5)
in ("(" ++ s1 ++ " --> " ++ s2 ++ ")", o2+1)
-- Using the fshow function above, we are able to instantiate the Show-class for Formula.
instance Show Formula where
show f = let (s,o) = fshow f 0
in s
| Booster2/Booster2 | Workflow_Precond/impl_nondisjoint/Formulas.hs | bsd-3-clause | 2,794 | 0 | 16 | 988 | 837 | 444 | 393 | 40 | 1 |
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE RecursiveDo #-}
module Hadoom.Editor.Mode.MoveSector (moveSectorMode) where
import BasePrelude hiding (union)
import Hadoom.Editor.GUI
import Hadoom.Editor.Render
import Hadoom.Editor.SectorBuilder
import Hadoom.Editor.Util
import Linear
import Linear.Affine
import Reactive.Banana
import Reactive.Banana.Frameworks
import Reactive.Banana.GTK
import qualified Data.IntMap.Strict as IntMap
import qualified Graphics.UI.Gtk as GTK
import {-# SOURCE #-} Hadoom.Editor.Mode.Default
moveSectorMode :: Frameworks t
=> HadoomGUI
-> SectorBuilder
-> IntMap.Key
-> Point V2 Double
-> Moment t (Behavior t Diagram)
moveSectorMode gui@HadoomGUI{..} initialSectorBuilder sectorId dragOrigin =
mdo let switch = once switchToDefault
active = stepper True (False <$ switch)
dragComplete <- filterE ((== GTK.RightButton) . mcButton) .
whenE active <$>
registerMouseReleased guiMap
mouseMoved <- whenE active <$> registerMotionNotify guiMap
let widgetSize =
pure (V2 30 30 ^*
50) -- TODO
originGrid =
toGridCoords <$>
(toDiagramCoords <$> widgetSize <*> pure mapExtents <*>
pure dragOrigin)
sectorBuilder =
stepper initialSectorBuilder
(moveSector sectorId initialSectorBuilder <$>
(flip (.-.) <$>
originGrid <@>
(toGridCoords <$>
(toDiagramCoords <$> widgetSize <*> pure mapExtents <@>
mouseMoved))))
switchToDefault <- execute ((\sb ->
FrameworksMoment
(trimB =<<
defaultMode gui sb)) <$>
(sectorBuilder <@ dragComplete))
let diagram =
renderSectorsWithSelection <$> sectorBuilder <*>
pure (Just sectorId)
return (switchB diagram switch)
moveSector :: IntMap.Key -> SectorBuilder -> V2 Double -> SectorBuilder
moveSector sectorId sectorBuilder offset =
sectorBuilder {sbVertices =
IntMap.mapWithKey
(\vId coords ->
if vId `elem` vertices
then coords .+^ offset
else coords)
(sbVertices sectorBuilder)}
where vertices = sbSectors sectorBuilder IntMap.! sectorId
| ocharles/hadoom | hadoom-editor/Hadoom/Editor/Mode/MoveSector.hs | bsd-3-clause | 2,645 | 0 | 20 | 978 | 557 | 298 | 259 | 63 | 2 |
{-# LANGUAGE FlexibleContexts #-}
module TimingIsEverything (solve) where
import Data.List (find)
import Data.Maybe (fromJust)
import Text.Parsec.Prim (Stream, ParsecT, parse)
import Text.Parsec.Char (endOfLine, string, digit)
import Text.Parsec.Combinator (endBy, many1)
type Disc = Int -> Bool
integerLiteral :: Stream s m Char => ParsecT s u m Int
integerLiteral = read <$> many1 digit
disk :: Stream s m Char => ParsecT s u m Disc
disk = (\n i t -> ((t + i) `mod` n) == 0) <$>
(string "Disc #" *> many1 digit *> string " has " *> integerLiteral) <*>
(string " positions; at time=0, it is at position " *> integerLiteral <* string ".")
diskList :: Stream s m Char => ParsecT s u m [Disc]
diskList = disk `endBy` endOfLine
fallsThroughAllDiscs :: [Disc] -> Int -> Bool
fallsThroughAllDiscs xs t = and $ zipWith (\f x -> f x) xs [t + 1..]
timeToDrop :: [Disc] -> Int
timeToDrop xs = fromJust $ find (fallsThroughAllDiscs xs) [0..]
solve :: String -> IO ()
solve input = do
let parsed = parse diskList "" input
case parsed of
Left err -> print err
Right ds -> do
let t = timeToDrop ds
print t
let newDisk t = (t `mod` 11) == 0
let newDs = ds ++ [newDisk]
let newT = timeToDrop newDs
print newT
| cjlarose/advent-2016 | src/TimingIsEverything.hs | bsd-3-clause | 1,266 | 0 | 17 | 291 | 513 | 267 | 246 | 32 | 2 |
module Kite.Test.Parser (parserTests) where
import Test.Tasty
import Test.Tasty.HUnit
import Kite.Lexer
import Kite.Parser
import Kite.Syntax
parserTests = testGroup "Parser"
[testCase "Integer" $
kiteparser (alexScanTokens "main = ->{3}") @?= [PDecl "main" (PLambda "Void" (PBlock [PReturn (PInteger 3)]))]
, testCase "Float" $
kiteparser (alexScanTokens "main = ->{21.1}") @?= [PDecl "main" (PLambda "Void" (PBlock [PReturn (PFloat 21.1)]))]
-- , testCase "Float" $
-- kiteparser [Float 21.1] @?= PTerm (PFloat 21.1)
-- , testCase "String" $
-- kiteparser [String "yolo"] @?= PTerm (PString "yolo")
-- , testCase "Symbol" $
-- kiteparser [Symbol '(', Float 21.1, Symbol ')'] @?= PGroup (PTerm (PFloat 21.1))
-- , testCase "Keyword return" $
-- kiteparser [Keyword "return", Identifier "moby"] @?= PReturn (PTerm (PIdentifier "moby"))
-- , testCase "Function" $
-- kiteparser [
-- Symbol '(', Type "Int", Symbol ')',
-- Operator "->",
-- Type "Float", Identifier "foo",
-- Operator "=",
-- Symbol '(', Type "Int", Identifier "a", Symbol ')',
-- Operator "->",
-- Type "Float",
-- Symbol '{', Integer 2, Symbol ';', Symbol '}'
-- ] @?=
-- PBind
-- (PLambdaType [PPrimType "Int"] (PPrimType "Float")) (PIdentifier "foo")
-- (PLambda
-- (PLambdaType
-- [PTypeArg (PPrimType "Int") (PIdentifier "a")]
-- (PPrimType "Float"))
-- (PBlock [PTerm (PInteger 2)]))
]
| kite-lang/kite | tests/Kite/Test/Parser.hs | mit | 1,544 | 0 | 17 | 382 | 188 | 111 | 77 | 11 | 1 |
{-# LANGUAGE CPP, ScopedTypeVariables, OverloadedStrings #-}
-----------------------------------------------------------------------------
--
-- Module : IDE.Utils.GUIUtils
-- Copyright : (c) Juergen Nicklisch-Franken, Hamish Mackenzie
-- License : GNU-GPL
--
-- Maintainer : <maintainer at leksah.org>
-- Stability : provisional
-- Portability : portable
--
-- |
--
-------------------------------------------------------------------------------
module IDE.Utils.GUIUtils (
chooseFile
, chooseDir
, chooseSaveFile
, openBrowser
, showDialog
, showErrorDialog
, getCandyState
, setCandyState
, getFullScreenState
, setFullScreenState
, getDarkState
, setDarkState
, getForgetSession
, getBackgroundBuildToggled
, setBackgroundBuildToggled
, getRunUnitTests
, setRunUnitTests
, getMakeModeToggled
, setMakeModeToggled
, getDebugToggled
, setDebugToggled
, getRecentFiles
, getRecentWorkspaces
, getVCS
, stockIdFromType
, mapControlCommand
, treeViewContextMenu
, __
, fontDescription
) where
import Graphics.UI.Gtk
import IDE.Utils.Tool (runProcess)
import Data.Maybe (fromJust, isJust)
import Control.Monad
import IDE.Core.State
--import Graphics.UI.Gtk.Selectors.FileChooser
-- (FileChooserAction(..))
--import Graphics.UI.Gtk.General.Structs
-- (ResponseId(..))
import Control.Monad.IO.Class (liftIO)
import Control.Exception as E
import Data.Text (Text)
import Data.Monoid ((<>))
import qualified Data.Text as T (unpack)
#ifdef LOCALIZATION
import Text.I18N.GetText
import System.IO.Unsafe (unsafePerformIO)
#endif
chooseDir :: Window -> Text -> Maybe FilePath -> IO (Maybe FilePath)
chooseDir window prompt mbFolder = do
dialog <- fileChooserDialogNew
(Just $ prompt)
(Just window)
FileChooserActionSelectFolder
[("gtk-cancel"
,ResponseCancel)
,("gtk-open"
,ResponseAccept)]
when (isJust mbFolder) $ fileChooserSetCurrentFolder dialog (fromJust mbFolder) >> return ()
widgetShow dialog
response <- dialogRun dialog
case response of
ResponseAccept -> do
fn <- fileChooserGetFilename dialog
widgetDestroy dialog
return fn
ResponseCancel -> do
widgetDestroy dialog
return Nothing
ResponseDeleteEvent -> do
widgetDestroy dialog
return Nothing
_ -> return Nothing
chooseFile :: Window -> Text -> Maybe FilePath -> IO (Maybe FilePath)
chooseFile window prompt mbFolder = do
dialog <- fileChooserDialogNew
(Just $ prompt)
(Just window)
FileChooserActionOpen
[("gtk-cancel"
,ResponseCancel)
,("gtk-open"
,ResponseAccept)]
when (isJust mbFolder) $ fileChooserSetCurrentFolder dialog (fromJust mbFolder) >> return ()
widgetShow dialog
response <- dialogRun dialog
case response of
ResponseAccept -> do
fn <- fileChooserGetFilename dialog
widgetDestroy dialog
return fn
ResponseCancel -> do
widgetDestroy dialog
return Nothing
ResponseDeleteEvent -> do
widgetDestroy dialog
return Nothing
_ -> return Nothing
chooseSaveFile :: Window -> Text -> Maybe FilePath -> IO (Maybe FilePath)
chooseSaveFile window prompt mbFolder = do
dialog <- fileChooserDialogNew
(Just $ prompt)
(Just window)
FileChooserActionSave
[("gtk-cancel"
,ResponseCancel)
,("gtk-save"
, ResponseAccept)]
when (isJust mbFolder) $ fileChooserSetCurrentFolder dialog (fromJust mbFolder) >> return ()
widgetShow dialog
res <- dialogRun dialog
case res of
ResponseAccept -> do
mbFileName <- fileChooserGetFilename dialog
widgetDestroy dialog
return mbFileName
_ -> do
widgetDestroy dialog
return Nothing
openBrowser :: Text -> IDEAction
openBrowser url = do
prefs' <- readIDE prefs
liftIO (E.catch (do
runProcess (T.unpack $ browser prefs') [T.unpack url] Nothing Nothing Nothing Nothing Nothing
return ())
(\ (_ :: SomeException) -> sysMessage Normal ("Can't find browser executable " <> browser prefs')))
return ()
showDialog :: Text -> MessageType -> IO ()
showDialog msg msgType = do
dialog <- messageDialogNew Nothing [] msgType ButtonsOk msg
_ <- dialogRun dialog
widgetDestroy dialog
return ()
showErrorDialog :: Text -> IO ()
showErrorDialog msg = showDialog msg MessageError
-- get widget elements (menu & toolbar)
getCandyState :: PaneMonad alpha => alpha Bool
getCandyState = do
ui <- getUIAction "ui/menubar/_Configuration/Source Candy" castToToggleAction
liftIO $toggleActionGetActive ui
setCandyState :: PaneMonad alpha => Bool -> alpha ()
setCandyState b = do
ui <- getUIAction "ui/menubar/_Configuration/Source Candy" castToToggleAction
liftIO $toggleActionSetActive ui b
getFullScreenState :: PaneMonad alpha => alpha Bool
getFullScreenState = do
ui <- getUIAction "ui/menubar/_View/_Full Screen" castToToggleAction
liftIO $toggleActionGetActive ui
setFullScreenState :: PaneMonad alpha => Bool -> alpha ()
setFullScreenState b = do
ui <- getUIAction "ui/menubar/_View/_Full Screen" castToToggleAction
liftIO $toggleActionSetActive ui b
getDarkState :: PaneMonad alpha => alpha Bool
getDarkState = do
ui <- getUIAction "ui/menubar/_View/Dark" castToToggleAction
liftIO $toggleActionGetActive ui
setDarkState :: PaneMonad alpha => Bool -> alpha ()
setDarkState b = do
ui <- getUIAction "ui/menubar/_View/Dark" castToToggleAction
liftIO $toggleActionSetActive ui b
getForgetSession :: PaneMonad alpha => alpha (Bool)
getForgetSession = do
ui <- getUIAction "ui/menubar/_Configuration/Forget Session" castToToggleAction
liftIO $toggleActionGetActive ui
getMenuItem :: Text -> IDEM MenuItem
getMenuItem path = do
uiManager' <- getUiManager
mbWidget <- liftIO $ uiManagerGetWidget uiManager' path
case mbWidget of
Nothing -> throwIDE ("State.hs>>getMenuItem: Can't find ui path " <> path)
Just widget -> return (castToMenuItem widget)
getBackgroundBuildToggled :: PaneMonad alpha => alpha (Bool)
getBackgroundBuildToggled = do
ui <- getUIAction "ui/toolbar/BuildToolItems/BackgroundBuild" castToToggleAction
liftIO $ toggleActionGetActive ui
setBackgroundBuildToggled :: PaneMonad alpha => Bool -> alpha ()
setBackgroundBuildToggled b = do
ui <- getUIAction "ui/toolbar/BuildToolItems/BackgroundBuild" castToToggleAction
liftIO $ toggleActionSetActive ui b
getRunUnitTests :: PaneMonad alpha => alpha (Bool)
getRunUnitTests = do
ui <- getUIAction "ui/toolbar/BuildToolItems/RunUnitTests" castToToggleAction
liftIO $ toggleActionGetActive ui
setRunUnitTests :: PaneMonad alpha => Bool -> alpha ()
setRunUnitTests b = do
ui <- getUIAction "ui/toolbar/BuildToolItems/RunUnitTests" castToToggleAction
liftIO $ toggleActionSetActive ui b
getMakeModeToggled :: PaneMonad alpha => alpha (Bool)
getMakeModeToggled = do
ui <- getUIAction "ui/toolbar/BuildToolItems/MakeMode" castToToggleAction
liftIO $ toggleActionGetActive ui
setMakeModeToggled :: PaneMonad alpha => Bool -> alpha ()
setMakeModeToggled b = do
ui <- getUIAction "ui/toolbar/BuildToolItems/MakeMode" castToToggleAction
liftIO $ toggleActionSetActive ui b
getDebugToggled :: PaneMonad alpha => alpha (Bool)
getDebugToggled = do
ui <- getUIAction "ui/toolbar/BuildToolItems/Debug" castToToggleAction
liftIO $ toggleActionGetActive ui
setDebugToggled :: PaneMonad alpha => Bool -> alpha ()
setDebugToggled b = do
ui <- getUIAction "ui/toolbar/BuildToolItems/Debug" castToToggleAction
liftIO $ toggleActionSetActive ui b
getRecentFiles , getRecentWorkspaces, getVCS :: IDEM MenuItem
getRecentFiles = getMenuItem "ui/menubar/_File/Open _Recent"
getRecentWorkspaces = getMenuItem "ui/menubar/_Workspace/Open _Recent"
getVCS = getMenuItem "ui/menubar/Version Con_trol" --this could fail, try returning Menu if it does
-- (toolbar)
stockIdFromType :: DescrType -> StockId
stockIdFromType Variable = "ide_function"
stockIdFromType Newtype = "ide_newtype"
stockIdFromType Type = "ide_type"
stockIdFromType Data = "ide_data"
stockIdFromType Class = "ide_class"
stockIdFromType Instance = "ide_instance"
stockIdFromType Constructor = "ide_konstructor"
stockIdFromType Field = "ide_slot"
stockIdFromType Method = "ide_method"
stockIdFromType _ = "ide_other"
-- maps control key for Macos
#if defined(darwin_HOST_OS)
mapControlCommand Alt = Control
#endif
mapControlCommand a = a
treeViewContextMenu :: TreeViewClass treeView
=> treeView
-> (Menu -> IO ())
-> IO (ConnectId treeView, ConnectId treeView)
treeViewContextMenu treeView populateMenu = do
cid1 <- treeView `on` popupMenuSignal $ showMenu Nothing
cid2 <- treeView `on` buttonPressEvent $ do
button <- eventButton
click <- eventClick
timestamp <- eventTime
(x, y) <- eventCoordinates
case (button, click) of
(RightButton, SingleClick) -> liftIO $ do
sel <- treeViewGetSelection treeView
selCount <- treeSelectionCountSelectedRows sel
when (selCount <= 1) $ do
pathInfo <- treeViewGetPathAtPos treeView (floor x, floor y)
case pathInfo of
Just (path, _, _) -> do
treeSelectionUnselectAll sel
treeSelectionSelectPath sel path
_ -> return ()
showMenu (Just (button, timestamp))
_ -> return False
return (cid1, cid2)
where
showMenu buttonEventDetails = do
theMenu <- menuNew
menuAttachToWidget theMenu treeView
populateMenu theMenu
menuPopup theMenu buttonEventDetails
widgetShowAll theMenu
return True
#ifdef LOCALIZATION
-- | For i18n using hgettext
__ :: Text -> Text
__ = T.pack . unsafePerformIO . getText . T.unpack
#else
-- | For i18n support. Not included in this build.
__ :: Text -> Text
__ = id
#endif
fontDescription :: Maybe Text -> IDEM FontDescription
fontDescription mbFontString = liftIO $ do
case mbFontString of
Just str -> do
fontDescriptionFromString str
Nothing -> do
f <- fontDescriptionNew
fontDescriptionSetFamily f ("Monospace" :: Text)
return f
| juhp/leksah | src/IDE/Utils/GUIUtils.hs | gpl-2.0 | 11,027 | 4 | 25 | 2,818 | 2,514 | 1,221 | 1,293 | 254 | 4 |
{-# LANGUAGE OverloadedStrings #-}
{-
Copyright (C) 2011-2014 John MacFarlane <jgm@berkeley.edu>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-}
{- |
Module : Text.Pandoc.SelfContained
Copyright : Copyright (C) 2011-2014 John MacFarlane
License : GNU GPL, version 2 or above
Maintainer : John MacFarlane <jgm@berkeley.edu>
Stability : alpha
Portability : portable
Functions for converting an HTML file into one that can be viewed
offline, by incorporating linked images, CSS, and scripts into
the HTML using data URIs.
-}
module Text.Pandoc.SelfContained ( makeSelfContained ) where
import Text.HTML.TagSoup
import Network.URI (isURI, escapeURIString, URI(..), parseURI)
import Data.ByteString.Base64
import qualified Data.ByteString.Char8 as B
import Data.ByteString (ByteString)
import System.FilePath (takeExtension, takeDirectory, (</>))
import Data.Char (toLower, isAscii, isAlphaNum)
import Codec.Compression.GZip as Gzip
import qualified Data.ByteString.Lazy as L
import Text.Pandoc.Shared (renderTags', err, fetchItem')
import Text.Pandoc.MediaBag (MediaBag)
import Text.Pandoc.MIME (MimeType)
import Text.Pandoc.UTF8 (toString, fromString)
import Text.Pandoc.Options (WriterOptions(..))
isOk :: Char -> Bool
isOk c = isAscii c && isAlphaNum c
convertTag :: MediaBag -> Maybe String -> Tag String -> IO (Tag String)
convertTag media sourceURL t@(TagOpen tagname as)
| tagname `elem`
["img", "embed", "video", "input", "audio", "source", "track"] = do
as' <- mapM processAttribute as
return $ TagOpen tagname as'
where processAttribute (x,y) =
if x == "src" || x == "href" || x == "poster"
then do
(raw, mime) <- getRaw media sourceURL (fromAttrib "type" t) y
let enc = "data:" ++ mime ++ ";base64," ++ toString (encode raw)
return (x, enc)
else return (x,y)
convertTag media sourceURL t@(TagOpen "script" as) =
case fromAttrib "src" t of
[] -> return t
src -> do
(raw, mime) <- getRaw media sourceURL (fromAttrib "type" t) src
let mime' = if ';' `elem` mime
then mime -- mime type already has charset
else mime ++ ";charset=utf-8"
let enc = "data:" ++ mime' ++ "," ++ escapeURIString isOk (toString raw)
return $ TagOpen "script" (("src",enc) : [(x,y) | (x,y) <- as, x /= "src"])
convertTag media sourceURL t@(TagOpen "link" as) =
case fromAttrib "href" t of
[] -> return t
src -> do
(raw, mime) <- getRaw media sourceURL (fromAttrib "type" t) src
let enc = "data:" ++ mime ++ "," ++ escapeURIString isOk (toString raw)
return $ TagOpen "link" (("href",enc) : [(x,y) | (x,y) <- as, x /= "href"])
convertTag _ _ t = return t
-- NOTE: This is really crude, it doesn't respect CSS comments.
cssURLs :: MediaBag -> Maybe String -> FilePath -> ByteString
-> IO ByteString
cssURLs media sourceURL d orig =
case B.breakSubstring "url(" orig of
(x,y) | B.null y -> return orig
| otherwise -> do
let (u,v) = B.breakSubstring ")" $ B.drop 4 y
let url = toString
$ case B.take 1 u of
"\"" -> B.takeWhile (/='"') $ B.drop 1 u
"'" -> B.takeWhile (/='\'') $ B.drop 1 u
_ -> u
let url' = if isURI url
then url
else d </> url
(raw, mime) <- getRaw media sourceURL "" url'
rest <- cssURLs media sourceURL d v
let enc = "data:" `B.append` fromString mime `B.append`
";base64," `B.append` (encode raw)
return $ x `B.append` "url(" `B.append` enc `B.append` rest
getRaw :: MediaBag -> Maybe String -> MimeType -> String
-> IO (ByteString, MimeType)
getRaw media sourceURL mimetype src = do
let ext = map toLower $ takeExtension src
fetchResult <- fetchItem' media sourceURL src
(raw, respMime) <- case fetchResult of
Left msg -> err 67 $ "Could not fetch " ++ src ++
"\n" ++ show msg
Right x -> return x
let raw' = if ext == ".gz"
then B.concat $ L.toChunks $ Gzip.decompress $ L.fromChunks
$ [raw]
else raw
let mime = case (mimetype, respMime) of
("",Nothing) -> error
$ "Could not determine mime type for `" ++ src ++ "'"
(x, Nothing) -> x
(_, Just x ) -> x
let cssSourceURL = case parseURI src of
Just u
| uriScheme u `elem` ["http:","https:"] ->
Just $ show u{ uriPath = "",
uriQuery = "",
uriFragment = "" }
_ -> Nothing
result <- if mime == "text/css"
then cssURLs media cssSourceURL (takeDirectory src) raw'
else return raw'
return (result, mime)
-- | Convert HTML into self-contained HTML, incorporating images,
-- scripts, and CSS using data: URIs.
makeSelfContained :: WriterOptions -> String -> IO String
makeSelfContained opts inp = do
let tags = parseTags inp
out' <- mapM (convertTag (writerMediaBag opts) (writerSourceURL opts)) tags
return $ renderTags' out'
| sapek/pandoc | src/Text/Pandoc/SelfContained.hs | gpl-2.0 | 6,295 | 0 | 20 | 2,032 | 1,634 | 854 | 780 | 103 | 7 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TypeFamilies #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
{-# OPTIONS_GHC -fno-warn-unused-binds #-}
{-# OPTIONS_GHC -fno-warn-unused-matches #-}
-- Derived from AWS service descriptions, licensed under Apache 2.0.
-- |
-- Module : Network.AWS.CloudFormation.GetTemplateSummary
-- Copyright : (c) 2013-2015 Brendan Hay
-- License : Mozilla Public License, v. 2.0.
-- Maintainer : Brendan Hay <brendan.g.hay@gmail.com>
-- Stability : auto-generated
-- Portability : non-portable (GHC extensions)
--
-- Returns information about a new or existing template. The
-- 'GetTemplateSummary' action is useful for viewing parameter information,
-- such as default parameter values and parameter types, before you create
-- or update a stack.
--
-- You can use the 'GetTemplateSummary' action when you submit a template,
-- or you can get template information for a running or deleted stack.
--
-- For deleted stacks, 'GetTemplateSummary' returns the template
-- information for up to 90 days after the stack has been deleted. If the
-- template does not exist, a 'ValidationError' is returned.
--
-- /See:/ <http://docs.aws.amazon.com/AWSCloudFormation/latest/APIReference/API_GetTemplateSummary.html AWS API Reference> for GetTemplateSummary.
module Network.AWS.CloudFormation.GetTemplateSummary
(
-- * Creating a Request
getTemplateSummary
, GetTemplateSummary
-- * Request Lenses
, gtsTemplateBody
, gtsTemplateURL
, gtsStackName
-- * Destructuring the Response
, getTemplateSummaryResponse
, GetTemplateSummaryResponse
-- * Response Lenses
, gtsrsVersion
, gtsrsCapabilitiesReason
, gtsrsParameters
, gtsrsMetadata
, gtsrsDescription
, gtsrsCapabilities
, gtsrsResponseStatus
) where
import Network.AWS.CloudFormation.Types
import Network.AWS.CloudFormation.Types.Product
import Network.AWS.Prelude
import Network.AWS.Request
import Network.AWS.Response
-- | The input for the GetTemplateSummary action.
--
-- /See:/ 'getTemplateSummary' smart constructor.
data GetTemplateSummary = GetTemplateSummary'
{ _gtsTemplateBody :: !(Maybe Text)
, _gtsTemplateURL :: !(Maybe Text)
, _gtsStackName :: !(Maybe Text)
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetTemplateSummary' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gtsTemplateBody'
--
-- * 'gtsTemplateURL'
--
-- * 'gtsStackName'
getTemplateSummary
:: GetTemplateSummary
getTemplateSummary =
GetTemplateSummary'
{ _gtsTemplateBody = Nothing
, _gtsTemplateURL = Nothing
, _gtsStackName = Nothing
}
-- | Structure containing the template body with a minimum length of 1 byte
-- and a maximum length of 51,200 bytes. For more information about
-- templates, see
-- <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/template-anatomy.html Template Anatomy>
-- in the AWS CloudFormation User Guide.
--
-- Conditional: You must specify only one of the following parameters:
-- 'StackName', 'TemplateBody', or 'TemplateURL'.
gtsTemplateBody :: Lens' GetTemplateSummary (Maybe Text)
gtsTemplateBody = lens _gtsTemplateBody (\ s a -> s{_gtsTemplateBody = a});
-- | Location of file containing the template body. The URL must point to a
-- template (max size: 460,800 bytes) located in an Amazon S3 bucket. For
-- more information about templates, see
-- <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/template-anatomy.html Template Anatomy>
-- in the AWS CloudFormation User Guide.
--
-- Conditional: You must specify only one of the following parameters:
-- 'StackName', 'TemplateBody', or 'TemplateURL'.
gtsTemplateURL :: Lens' GetTemplateSummary (Maybe Text)
gtsTemplateURL = lens _gtsTemplateURL (\ s a -> s{_gtsTemplateURL = a});
-- | The name or the stack ID that is associated with the stack, which are
-- not always interchangeable. For running stacks, you can specify either
-- the stack\'s name or its unique stack ID. For deleted stack, you must
-- specify the unique stack ID.
--
-- Conditional: You must specify only one of the following parameters:
-- 'StackName', 'TemplateBody', or 'TemplateURL'.
gtsStackName :: Lens' GetTemplateSummary (Maybe Text)
gtsStackName = lens _gtsStackName (\ s a -> s{_gtsStackName = a});
instance AWSRequest GetTemplateSummary where
type Rs GetTemplateSummary =
GetTemplateSummaryResponse
request = postQuery cloudFormation
response
= receiveXMLWrapper "GetTemplateSummaryResult"
(\ s h x ->
GetTemplateSummaryResponse' <$>
(x .@? "Version") <*> (x .@? "CapabilitiesReason")
<*>
(x .@? "Parameters" .!@ mempty >>=
may (parseXMLList "member"))
<*> (x .@? "Metadata")
<*> (x .@? "Description")
<*>
(x .@? "Capabilities" .!@ mempty >>=
may (parseXMLList "member"))
<*> (pure (fromEnum s)))
instance ToHeaders GetTemplateSummary where
toHeaders = const mempty
instance ToPath GetTemplateSummary where
toPath = const "/"
instance ToQuery GetTemplateSummary where
toQuery GetTemplateSummary'{..}
= mconcat
["Action" =: ("GetTemplateSummary" :: ByteString),
"Version" =: ("2010-05-15" :: ByteString),
"TemplateBody" =: _gtsTemplateBody,
"TemplateURL" =: _gtsTemplateURL,
"StackName" =: _gtsStackName]
-- | The output for the GetTemplateSummary action.
--
-- /See:/ 'getTemplateSummaryResponse' smart constructor.
data GetTemplateSummaryResponse = GetTemplateSummaryResponse'
{ _gtsrsVersion :: !(Maybe Text)
, _gtsrsCapabilitiesReason :: !(Maybe Text)
, _gtsrsParameters :: !(Maybe [ParameterDeclaration])
, _gtsrsMetadata :: !(Maybe Text)
, _gtsrsDescription :: !(Maybe Text)
, _gtsrsCapabilities :: !(Maybe [Capability])
, _gtsrsResponseStatus :: !Int
} deriving (Eq,Read,Show,Data,Typeable,Generic)
-- | Creates a value of 'GetTemplateSummaryResponse' with the minimum fields required to make a request.
--
-- Use one of the following lenses to modify other fields as desired:
--
-- * 'gtsrsVersion'
--
-- * 'gtsrsCapabilitiesReason'
--
-- * 'gtsrsParameters'
--
-- * 'gtsrsMetadata'
--
-- * 'gtsrsDescription'
--
-- * 'gtsrsCapabilities'
--
-- * 'gtsrsResponseStatus'
getTemplateSummaryResponse
:: Int -- ^ 'gtsrsResponseStatus'
-> GetTemplateSummaryResponse
getTemplateSummaryResponse pResponseStatus_ =
GetTemplateSummaryResponse'
{ _gtsrsVersion = Nothing
, _gtsrsCapabilitiesReason = Nothing
, _gtsrsParameters = Nothing
, _gtsrsMetadata = Nothing
, _gtsrsDescription = Nothing
, _gtsrsCapabilities = Nothing
, _gtsrsResponseStatus = pResponseStatus_
}
-- | The AWS template format version, which identifies the capabilities of
-- the template.
gtsrsVersion :: Lens' GetTemplateSummaryResponse (Maybe Text)
gtsrsVersion = lens _gtsrsVersion (\ s a -> s{_gtsrsVersion = a});
-- | The list of resources that generated the values in the 'Capabilities'
-- response element.
gtsrsCapabilitiesReason :: Lens' GetTemplateSummaryResponse (Maybe Text)
gtsrsCapabilitiesReason = lens _gtsrsCapabilitiesReason (\ s a -> s{_gtsrsCapabilitiesReason = a});
-- | A list of parameter declarations that describe various properties for
-- each parameter.
gtsrsParameters :: Lens' GetTemplateSummaryResponse [ParameterDeclaration]
gtsrsParameters = lens _gtsrsParameters (\ s a -> s{_gtsrsParameters = a}) . _Default . _Coerce;
-- | The value that is defined for the 'Metadata' property of the template.
gtsrsMetadata :: Lens' GetTemplateSummaryResponse (Maybe Text)
gtsrsMetadata = lens _gtsrsMetadata (\ s a -> s{_gtsrsMetadata = a});
-- | The value that is defined in the 'Description' property of the template.
gtsrsDescription :: Lens' GetTemplateSummaryResponse (Maybe Text)
gtsrsDescription = lens _gtsrsDescription (\ s a -> s{_gtsrsDescription = a});
-- | The capabilities found within the template. Currently, AWS
-- CloudFormation supports only the CAPABILITY_IAM capability. If your
-- template contains IAM resources, you must specify the CAPABILITY_IAM
-- value for this parameter when you use the CreateStack or UpdateStack
-- actions with your template; otherwise, those actions return an
-- InsufficientCapabilities error.
gtsrsCapabilities :: Lens' GetTemplateSummaryResponse [Capability]
gtsrsCapabilities = lens _gtsrsCapabilities (\ s a -> s{_gtsrsCapabilities = a}) . _Default . _Coerce;
-- | The response status code.
gtsrsResponseStatus :: Lens' GetTemplateSummaryResponse Int
gtsrsResponseStatus = lens _gtsrsResponseStatus (\ s a -> s{_gtsrsResponseStatus = a});
| olorin/amazonka | amazonka-cloudformation/gen/Network/AWS/CloudFormation/GetTemplateSummary.hs | mpl-2.0 | 9,203 | 0 | 18 | 1,845 | 1,204 | 723 | 481 | 132 | 1 |
{-# LANGUAGE OverloadedStrings #-}
{- |
Module : Network.MPD.Applicative.StoredPlaylists
Copyright : (c) Joachim Fasting 2012
License : MIT
Maintainer : joachifm@fastmail.fm
Stability : stable
Portability : unportable
Stored playlists.
-}
module Network.MPD.Applicative.StoredPlaylists
( listPlaylist
, listPlaylistInfo
, listPlaylists
, load
, playlistAdd
, playlistClear
, playlistDelete
, playlistMove
, rename
, rm
, save
) where
import Network.MPD.Applicative.Internal
import Network.MPD.Applicative.Util
import Network.MPD.Commands.Arg hiding (Command)
import Network.MPD.Commands.Types
import Network.MPD.Util
-- | List song items in the playlist.
listPlaylist :: PlaylistName -> Command [Path]
listPlaylist plName = Command p ["listplaylist" <@> plName]
where
p = map Path . takeValues <$> getResponse
-- | List song items in the playlist with metadata.
listPlaylistInfo :: PlaylistName -> Command [Song]
listPlaylistInfo plName =
Command (liftParser takeSongs) ["listplaylistinfo" <@> plName]
-- | Get a list of stored playlists.
listPlaylists :: Command [PlaylistName]
listPlaylists = Command p ["listplaylists"]
where
p = map PlaylistName . go [] . toAssocList <$> getResponse
-- XXX: need to fail gracefully here
-- After each playlist name we get a timestamp
go acc [] = acc
go acc ((_, b):_:xs) = go (b : acc) xs
go _ _ = error "listPlaylists: bug"
-- | Load playlist into the current queue.
load :: PlaylistName -> Command ()
load plName = Command emptyResponse ["load" <@> plName]
-- | Add a database path to the named playlist.
playlistAdd :: PlaylistName -> Path -> Command ()
playlistAdd plName path =
Command emptyResponse ["playlistadd" <@> plName <++> path]
-- | Clear the playlist.
playlistClear :: PlaylistName -> Command ()
playlistClear plName = Command emptyResponse ["playlistclear" <@> plName]
-- | Delete the item at the given position from the playlist.
playlistDelete :: PlaylistName -> Position -> Command ()
playlistDelete name pos =
Command emptyResponse ["playlistdelete" <@> name <++> pos]
-- | Move a song to a new position within the playlist.
playlistMove :: PlaylistName -> Id -> Position -> Command ()
playlistMove name from to =
Command emptyResponse ["playlistmove" <@> name <++> from <++> to]
-- | Rename the playlist.
rename :: PlaylistName -> PlaylistName -> Command ()
rename plName new = Command emptyResponse ["rename" <@> plName <++> new]
-- | Remove the playlist.
rm :: PlaylistName -> Command ()
rm plName = Command emptyResponse ["rm" <@> plName]
-- | Save current queue to the named playlist.
save :: PlaylistName -> Command ()
save plName = Command emptyResponse ["save" <@> plName]
| bens/libmpd-haskell | src/Network/MPD/Applicative/StoredPlaylists.hs | lgpl-2.1 | 2,848 | 0 | 11 | 603 | 629 | 340 | 289 | 49 | 3 |
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE OverloadedStrings #-}
import Control.Arrow
import Control.Lens
import Data.Aeson
import Data.Aeson.TH
import Data.Align
import qualified Data.ByteString.Lazy as LBS
import Data.Either
import Data.Foldable
import qualified Data.Map as Map
import Data.Monoid
import qualified Data.Text as T
import qualified Data.Text.IO as T
import Data.Text (Text)
import Data.These
import System.Environment
import System.FilePath
data BenchmarkResult = BenchmarkResult
{ _benchmarkResult_framework :: Text
, _benchmarkResult_benchmark :: Text
, _benchmarkResult_type :: Text
, _benchmarkResult_min :: Double
, _benchmarkResult_max :: Double
, _benchmarkResult_mean :: Double
, _benchmarkResult_median :: Double
, _benchmarkResult_geometricMean :: Double
, _benchmarkResult_standardDeviation :: Double
, _benchmarkResult_values :: [Double]
}
deriving (Show, Read, Eq, Ord)
deriveJSON (defaultOptions { fieldLabelModifier = drop $ length ("_benchmarkResult_" :: String) }) ''BenchmarkResult
loadResult :: FilePath -> IO [BenchmarkResult]
loadResult p = either error id . eitherDecode <$> LBS.readFile p
main :: IO ()
main = do
[file1, file2] <- getArgs
results1 <- loadResult file1
results2 <- loadResult file2
let byFramework = Map.fromListWith (<>) . map (_benchmarkResult_framework &&& pure)
befores = byFramework results1
afters = byFramework results2
inters = Map.intersectionWith (,) befores afters
traverse_ T.putStrLn $ Map.mapWithKey table inters
table :: Text -> ([BenchmarkResult], [BenchmarkResult]) -> Text
table framework (before, after) =
let resultMap = Map.fromList . fmap (_benchmarkResult_benchmark &&& _benchmarkResult_geometricMean)
results1 = resultMap before
results2 = resultMap after
showMNum = maybe "?" (T.pack . show)
title = "### " <> framework
header = "| Benchmark | Before | After | Ratio |"
separator = "| --- | --- | --- | --- |"
formatLine (b, rs) =
let r1 = preview here rs
r2 = preview there rs
in "| " <> b <> " | " <> showMNum r1 <> " | " <> showMNum r2 <> " | " <> showMNum ((/) <$> r2 <*> r1) <> " |"
in
T.unlines $ title : header : separator : fmap formatLine (Map.toList $ align results1 results2)
| reflex-frp/reflex-platform | benchmarking/compareBenchmarkResults.hs | bsd-3-clause | 2,307 | 0 | 18 | 448 | 652 | 357 | 295 | 57 | 1 |
{-------------------------------------------------------------------------------
HofmDe test file
(c) 2012 Jan Snajder <jan.snajder@fer.hr>
-------------------------------------------------------------------------------}
import MorphGrammar.Hofm hiding (($$))
import qualified MorphGrammar.Hofm (($$))
import MorphGrammar.Hofm.Language.German hiding (iPatterns,dPatterns)
import qualified MorphGrammar.Hofm.Language.German as G (iPatterns,dPatterns)
-- type specializations
($$) = (MorphGrammar.Hofm.$$) :: TransfDefault -> String -> [String]
iPatterns = G.iPatterns :: [IPatternDefault]
dPatterns = G.dPatterns :: [DPatternDefault]
t1 = pfx "ge" & rifx "i" "a" & dsfx "en" :: TransfDefault
testDPattern ::
DPatternDefault -> (String,IPatternDefault) -> [String]
testDPattern d (l1,ip1) = [l2 ++ "_" ++ label (ip2::IPatternDefault) |
(l2,ips) <- lDerive d (l1,ip1), ip2 <- ips]
-- > testDPattern d1 ("singen",verb)
-- ["gesang_Nm"]
testDPatterns :: (String,IPatternDefault) -> [(String,String)]
testDPatterns lp1 = [(l2,label d) | d <- dPatterns, l2 <- testDPattern d lp1 ]
-- > testDPatterns ("singen",verb)
-- [("gesang_Nm","dVN01"),("gesinge_Nn","dVN02"),("unsingbar_A","dVA01")]
| jsnajder/hofm | test/hofm-de.hs | bsd-3-clause | 1,202 | 0 | 9 | 141 | 302 | 181 | 121 | 14 | 1 |
{-# LANGUAGE
OverloadedStrings
, ExtendedDefaultRules
, FlexibleContexts
#-}
module Pages.Home where
import Application.Types
import Lucid
import Data.Url
import qualified Data.Text as T
import Control.Monad.Reader.Class
homeContent :: ( Monad m
, MonadReader Env m
) => HtmlT (AbsoluteUrlT T.Text m) ()
homeContent = do
leader
profile
mountain
services
leader :: ( Monad m
, MonadReader Env m
) => HtmlT (AbsoluteUrlT T.Text m) ()
leader =
div_ [ id_ "leader"
, class_ "row"] $
div_ [class_ "column sixteen wide"] $ do
h1_ [class_ "ui huge header inverted"] $ do
"Clark Mining Tech"
div_ [class_ "sub header"] "Delivering Business Value though Technology"
profile :: ( Monad m
, MonadReader Env m
) => HtmlT (AbsoluteUrlT T.Text m) ()
profile =
div_ [ id_ "profile"
, class_ "row"] $
div_ [class_ "column sixteen wide"] $ do
h2_ [class_ "ui center aligned header"] "Profile"
div_ [class_ "ui divider"] ""
img_ [ id_ "profile-image"
, class_ "ui small left floated image"
, src_ "/images/lawrence.jpg"]
p_ [] "The path of the righteous man is beset on all sides by the iniquities of the selfish and the tyranny of evil men. Blessed is he who, in the name of charity and good will, shepherds the weak through the valley of darkness, for he is truly his brother's keeper and the finder of lost children. And I will strike down upon thee with great vengeance and furious anger those who would attempt to poison and destroy My brothers. And you will know My name is the Lord when I lay My vengeance upon thee."
p_ [] "My money's in that office, right? If she start giving me some bullshit about it ain't there, and we got to go someplace else and get it, I'm gonna shoot you in the head then and there. Then I'm gonna shoot that bitch in the kneecaps, find out where my goddamn money is. She gonna tell me too. Hey, look at me when I'm talking to you, motherfucker. You listen: we go in there, and that nigga Winston or anybody else is in there, you the first motherfucker to get shot. You understand?"
h3_ [class_ "ui left aligned header"] "A List of Stuff"
ul_ [] $ do
li_ [] "One Thing"
li_ [] "Two Thing"
li_ [] "Three Thing"
mountain :: ( Monad m
, MonadReader Env m
) => HtmlT (AbsoluteUrlT T.Text m) ()
mountain =
div_ [ id_ "mountain"
, class_ "row"] $
div_ [class_ "column sixteen wide"] $
div_ [class_ "ui grid stackable"] $ do
div_ [class_ "row"] $
div_ [class_ "column sixteen wide"] $ do
h2_ [class_ "ui center aligned header"] "Another Headline"
-- div_ [class_ "ui divider"] ""
div_ [class_ "two column row fuzzy-bg"] $ do
div_ [class_ "column"] $ do
p_ [] "Normally, both your asses would be dead as fucking fried chicken, but you happen to pull this shit while I'm in a transitional period so I don't wanna kill you, I wanna help you. But I can't give you this case, it don't belong to me. Besides, I've already been through too much shit this morning over this case to hand it over to your dumb ass."
div_ [class_ "column"] $ do
p_ [] "Well, the way they make shows is, they make one show. That show's called a pilot. Then they show that show to the people who make shows, and on the strength of that one show they decide if they're going to make more shows. Some pilots get picked and become television programs. Some don't, become nothing. She starred in one of the ones that became nothing."
services :: ( Monad m
, MonadReader Env m
) => HtmlT (AbsoluteUrlT T.Text m) ()
services =
div_ [ id_ "services"
, class_ "row stackable"] $ do
div_ [class_ "column sixteen wide"] $ do
h2_ [class_ "ui center aligned header"] "Service Offerings"
div_ [class_ "ui divider"] ""
col1
col2
where
col1 =
div_ [class_ "column eight wide left aligned"] $ do
h3_ [] "Private equity placement in technology growth companies"
ul_ [] $ do
li_ [] "Market assessments"
li_ [] "Value proposition development"
li_ [] "Value-based marketing strategies"
h3_ [] "Technology audits"
h3_ [] "Technology business case evaluations/development"
h3_ [] "Enterprise Mining Technology Strategy [EMTS] development"
col2 =
div_ [class_ "column eight wide left aligned"] $ do
h3_ [] "Technology implementation"
ul_ [] $ do
li_ [] "Personnel and equipment tracking systems"
li_ [] "Underground wired and wireless networks"
li_ [] "Mine planning software systems"
li_ [] "Grade control and reconciliation systems"
li_ [] "Mobile LiDAR-based underground excavation mapping"
h3_ [] "Solution sustainability programs"
ul_ [] $ do
li_ [] "Implementation planning"
li_ [] "Change management strategies"
li_ [] "Executive engagement"
h3_ [] "Value stream analysis"
| athanclark/clark-mining-tech | src/Pages/About/Management.hs | bsd-3-clause | 5,179 | 0 | 15 | 1,426 | 972 | 447 | 525 | 99 | 1 |
{-# LANGUAGE CPP #-}
module Test.E.Arbitrary where
import Test.Tasty.QuickCheck
import Test.E
-- GENERATED START
instance () => Arbitrary E2 where
arbitrary
= do x <- choose (0 :: Int, 1)
case x of
0 -> return E2_1
1 -> return E2_2
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E3 where
arbitrary
= do x <- choose (0 :: Int, 2)
case x of
0 -> return E3_1
1 -> return E3_2
2 -> return E3_3
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E4 where
arbitrary
= do x <- choose (0 :: Int, 3)
case x of
0 -> return E4_1
1 -> return E4_2
2 -> return E4_3
3 -> return E4_4
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E8 where
arbitrary
= do x <- choose (0 :: Int, 7)
case x of
0 -> return E8_1
1 -> return E8_2
2 -> return E8_3
3 -> return E8_4
4 -> return E8_5
5 -> return E8_6
6 -> return E8_7
7 -> return E8_8
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E16 where
arbitrary
= do x <- choose (0 :: Int, 15)
case x of
0 -> return E16_1
1 -> return E16_2
2 -> return E16_3
3 -> return E16_4
4 -> return E16_5
5 -> return E16_6
6 -> return E16_7
7 -> return E16_8
8 -> return E16_9
9 -> return E16_10
10 -> return E16_11
11 -> return E16_12
12 -> return E16_13
13 -> return E16_14
14 -> return E16_15
15 -> return E16_16
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E17 where
arbitrary
= do x <- choose (0 :: Int, 16)
case x of
0 -> return E17_1
1 -> return E17_2
2 -> return E17_3
3 -> return E17_4
4 -> return E17_5
5 -> return E17_6
6 -> return E17_7
7 -> return E17_8
8 -> return E17_9
9 -> return E17_10
10 -> return E17_11
11 -> return E17_12
12 -> return E17_13
13 -> return E17_14
14 -> return E17_15
15 -> return E17_16
16 -> return E17_17
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E32 where
arbitrary
= do x <- choose (0 :: Int, 31)
case x of
0 -> return E32_1
1 -> return E32_2
2 -> return E32_3
3 -> return E32_4
4 -> return E32_5
5 -> return E32_6
6 -> return E32_7
7 -> return E32_8
8 -> return E32_9
9 -> return E32_10
10 -> return E32_11
11 -> return E32_12
12 -> return E32_13
13 -> return E32_14
14 -> return E32_15
15 -> return E32_16
16 -> return E32_17
17 -> return E32_18
18 -> return E32_19
19 -> return E32_20
20 -> return E32_21
21 -> return E32_22
22 -> return E32_23
23 -> return E32_24
24 -> return E32_25
25 -> return E32_26
26 -> return E32_27
27 -> return E32_28
28 -> return E32_29
29 -> return E32_30
30 -> return E32_31
31 -> return E32_32
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
#ifdef ENUM_LARGE
instance () => Arbitrary E256 where
arbitrary
= do x <- choose (0 :: Int, 255)
case x of
0 -> return E256_1
1 -> return E256_2
2 -> return E256_3
3 -> return E256_4
4 -> return E256_5
5 -> return E256_6
6 -> return E256_7
7 -> return E256_8
8 -> return E256_9
9 -> return E256_10
10 -> return E256_11
11 -> return E256_12
12 -> return E256_13
13 -> return E256_14
14 -> return E256_15
15 -> return E256_16
16 -> return E256_17
17 -> return E256_18
18 -> return E256_19
19 -> return E256_20
20 -> return E256_21
21 -> return E256_22
22 -> return E256_23
23 -> return E256_24
24 -> return E256_25
25 -> return E256_26
26 -> return E256_27
27 -> return E256_28
28 -> return E256_29
29 -> return E256_30
30 -> return E256_31
31 -> return E256_32
32 -> return E256_33
33 -> return E256_34
34 -> return E256_35
35 -> return E256_36
36 -> return E256_37
37 -> return E256_38
38 -> return E256_39
39 -> return E256_40
40 -> return E256_41
41 -> return E256_42
42 -> return E256_43
43 -> return E256_44
44 -> return E256_45
45 -> return E256_46
46 -> return E256_47
47 -> return E256_48
48 -> return E256_49
49 -> return E256_50
50 -> return E256_51
51 -> return E256_52
52 -> return E256_53
53 -> return E256_54
54 -> return E256_55
55 -> return E256_56
56 -> return E256_57
57 -> return E256_58
58 -> return E256_59
59 -> return E256_60
60 -> return E256_61
61 -> return E256_62
62 -> return E256_63
63 -> return E256_64
64 -> return E256_65
65 -> return E256_66
66 -> return E256_67
67 -> return E256_68
68 -> return E256_69
69 -> return E256_70
70 -> return E256_71
71 -> return E256_72
72 -> return E256_73
73 -> return E256_74
74 -> return E256_75
75 -> return E256_76
76 -> return E256_77
77 -> return E256_78
78 -> return E256_79
79 -> return E256_80
80 -> return E256_81
81 -> return E256_82
82 -> return E256_83
83 -> return E256_84
84 -> return E256_85
85 -> return E256_86
86 -> return E256_87
87 -> return E256_88
88 -> return E256_89
89 -> return E256_90
90 -> return E256_91
91 -> return E256_92
92 -> return E256_93
93 -> return E256_94
94 -> return E256_95
95 -> return E256_96
96 -> return E256_97
97 -> return E256_98
98 -> return E256_99
99 -> return E256_100
100 -> return E256_101
101 -> return E256_102
102 -> return E256_103
103 -> return E256_104
104 -> return E256_105
105 -> return E256_106
106 -> return E256_107
107 -> return E256_108
108 -> return E256_109
109 -> return E256_110
110 -> return E256_111
111 -> return E256_112
112 -> return E256_113
113 -> return E256_114
114 -> return E256_115
115 -> return E256_116
116 -> return E256_117
117 -> return E256_118
118 -> return E256_119
119 -> return E256_120
120 -> return E256_121
121 -> return E256_122
122 -> return E256_123
123 -> return E256_124
124 -> return E256_125
125 -> return E256_126
126 -> return E256_127
127 -> return E256_128
128 -> return E256_129
129 -> return E256_130
130 -> return E256_131
131 -> return E256_132
132 -> return E256_133
133 -> return E256_134
134 -> return E256_135
135 -> return E256_136
136 -> return E256_137
137 -> return E256_138
138 -> return E256_139
139 -> return E256_140
140 -> return E256_141
141 -> return E256_142
142 -> return E256_143
143 -> return E256_144
144 -> return E256_145
145 -> return E256_146
146 -> return E256_147
147 -> return E256_148
148 -> return E256_149
149 -> return E256_150
150 -> return E256_151
151 -> return E256_152
152 -> return E256_153
153 -> return E256_154
154 -> return E256_155
155 -> return E256_156
156 -> return E256_157
157 -> return E256_158
158 -> return E256_159
159 -> return E256_160
160 -> return E256_161
161 -> return E256_162
162 -> return E256_163
163 -> return E256_164
164 -> return E256_165
165 -> return E256_166
166 -> return E256_167
167 -> return E256_168
168 -> return E256_169
169 -> return E256_170
170 -> return E256_171
171 -> return E256_172
172 -> return E256_173
173 -> return E256_174
174 -> return E256_175
175 -> return E256_176
176 -> return E256_177
177 -> return E256_178
178 -> return E256_179
179 -> return E256_180
180 -> return E256_181
181 -> return E256_182
182 -> return E256_183
183 -> return E256_184
184 -> return E256_185
185 -> return E256_186
186 -> return E256_187
187 -> return E256_188
188 -> return E256_189
189 -> return E256_190
190 -> return E256_191
191 -> return E256_192
192 -> return E256_193
193 -> return E256_194
194 -> return E256_195
195 -> return E256_196
196 -> return E256_197
197 -> return E256_198
198 -> return E256_199
199 -> return E256_200
200 -> return E256_201
201 -> return E256_202
202 -> return E256_203
203 -> return E256_204
204 -> return E256_205
205 -> return E256_206
206 -> return E256_207
207 -> return E256_208
208 -> return E256_209
209 -> return E256_210
210 -> return E256_211
211 -> return E256_212
212 -> return E256_213
213 -> return E256_214
214 -> return E256_215
215 -> return E256_216
216 -> return E256_217
217 -> return E256_218
218 -> return E256_219
219 -> return E256_220
220 -> return E256_221
221 -> return E256_222
222 -> return E256_223
223 -> return E256_224
224 -> return E256_225
225 -> return E256_226
226 -> return E256_227
227 -> return E256_228
228 -> return E256_229
229 -> return E256_230
230 -> return E256_231
231 -> return E256_232
232 -> return E256_233
233 -> return E256_234
234 -> return E256_235
235 -> return E256_236
236 -> return E256_237
237 -> return E256_238
238 -> return E256_239
239 -> return E256_240
240 -> return E256_241
241 -> return E256_242
242 -> return E256_243
243 -> return E256_244
244 -> return E256_245
245 -> return E256_246
246 -> return E256_247
247 -> return E256_248
248 -> return E256_249
249 -> return E256_250
250 -> return E256_251
251 -> return E256_252
252 -> return E256_253
253 -> return E256_254
254 -> return E256_255
255 -> return E256_256
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
instance () => Arbitrary E258 where
arbitrary
= do x <- choose (0 :: Int, 257)
case x of
0 -> return E258_1
1 -> return E258_2
2 -> return E258_3
3 -> return E258_4
4 -> return E258_5
5 -> return E258_6
6 -> return E258_7
7 -> return E258_8
8 -> return E258_9
9 -> return E258_10
10 -> return E258_11
11 -> return E258_12
12 -> return E258_13
13 -> return E258_14
14 -> return E258_15
15 -> return E258_16
16 -> return E258_17
17 -> return E258_18
18 -> return E258_19
19 -> return E258_20
20 -> return E258_21
21 -> return E258_22
22 -> return E258_23
23 -> return E258_24
24 -> return E258_25
25 -> return E258_26
26 -> return E258_27
27 -> return E258_28
28 -> return E258_29
29 -> return E258_30
30 -> return E258_31
31 -> return E258_32
32 -> return E258_33
33 -> return E258_34
34 -> return E258_35
35 -> return E258_36
36 -> return E258_37
37 -> return E258_38
38 -> return E258_39
39 -> return E258_40
40 -> return E258_41
41 -> return E258_42
42 -> return E258_43
43 -> return E258_44
44 -> return E258_45
45 -> return E258_46
46 -> return E258_47
47 -> return E258_48
48 -> return E258_49
49 -> return E258_50
50 -> return E258_51
51 -> return E258_52
52 -> return E258_53
53 -> return E258_54
54 -> return E258_55
55 -> return E258_56
56 -> return E258_57
57 -> return E258_58
58 -> return E258_59
59 -> return E258_60
60 -> return E258_61
61 -> return E258_62
62 -> return E258_63
63 -> return E258_64
64 -> return E258_65
65 -> return E258_66
66 -> return E258_67
67 -> return E258_68
68 -> return E258_69
69 -> return E258_70
70 -> return E258_71
71 -> return E258_72
72 -> return E258_73
73 -> return E258_74
74 -> return E258_75
75 -> return E258_76
76 -> return E258_77
77 -> return E258_78
78 -> return E258_79
79 -> return E258_80
80 -> return E258_81
81 -> return E258_82
82 -> return E258_83
83 -> return E258_84
84 -> return E258_85
85 -> return E258_86
86 -> return E258_87
87 -> return E258_88
88 -> return E258_89
89 -> return E258_90
90 -> return E258_91
91 -> return E258_92
92 -> return E258_93
93 -> return E258_94
94 -> return E258_95
95 -> return E258_96
96 -> return E258_97
97 -> return E258_98
98 -> return E258_99
99 -> return E258_100
100 -> return E258_101
101 -> return E258_102
102 -> return E258_103
103 -> return E258_104
104 -> return E258_105
105 -> return E258_106
106 -> return E258_107
107 -> return E258_108
108 -> return E258_109
109 -> return E258_110
110 -> return E258_111
111 -> return E258_112
112 -> return E258_113
113 -> return E258_114
114 -> return E258_115
115 -> return E258_116
116 -> return E258_117
117 -> return E258_118
118 -> return E258_119
119 -> return E258_120
120 -> return E258_121
121 -> return E258_122
122 -> return E258_123
123 -> return E258_124
124 -> return E258_125
125 -> return E258_126
126 -> return E258_127
127 -> return E258_128
128 -> return E258_129
129 -> return E258_130
130 -> return E258_131
131 -> return E258_132
132 -> return E258_133
133 -> return E258_134
134 -> return E258_135
135 -> return E258_136
136 -> return E258_137
137 -> return E258_138
138 -> return E258_139
139 -> return E258_140
140 -> return E258_141
141 -> return E258_142
142 -> return E258_143
143 -> return E258_144
144 -> return E258_145
145 -> return E258_146
146 -> return E258_147
147 -> return E258_148
148 -> return E258_149
149 -> return E258_150
150 -> return E258_151
151 -> return E258_152
152 -> return E258_153
153 -> return E258_154
154 -> return E258_155
155 -> return E258_156
156 -> return E258_157
157 -> return E258_158
158 -> return E258_159
159 -> return E258_160
160 -> return E258_161
161 -> return E258_162
162 -> return E258_163
163 -> return E258_164
164 -> return E258_165
165 -> return E258_166
166 -> return E258_167
167 -> return E258_168
168 -> return E258_169
169 -> return E258_170
170 -> return E258_171
171 -> return E258_172
172 -> return E258_173
173 -> return E258_174
174 -> return E258_175
175 -> return E258_176
176 -> return E258_177
177 -> return E258_178
178 -> return E258_179
179 -> return E258_180
180 -> return E258_181
181 -> return E258_182
182 -> return E258_183
183 -> return E258_184
184 -> return E258_185
185 -> return E258_186
186 -> return E258_187
187 -> return E258_188
188 -> return E258_189
189 -> return E258_190
190 -> return E258_191
191 -> return E258_192
192 -> return E258_193
193 -> return E258_194
194 -> return E258_195
195 -> return E258_196
196 -> return E258_197
197 -> return E258_198
198 -> return E258_199
199 -> return E258_200
200 -> return E258_201
201 -> return E258_202
202 -> return E258_203
203 -> return E258_204
204 -> return E258_205
205 -> return E258_206
206 -> return E258_207
207 -> return E258_208
208 -> return E258_209
209 -> return E258_210
210 -> return E258_211
211 -> return E258_212
212 -> return E258_213
213 -> return E258_214
214 -> return E258_215
215 -> return E258_216
216 -> return E258_217
217 -> return E258_218
218 -> return E258_219
219 -> return E258_220
220 -> return E258_221
221 -> return E258_222
222 -> return E258_223
223 -> return E258_224
224 -> return E258_225
225 -> return E258_226
226 -> return E258_227
227 -> return E258_228
228 -> return E258_229
229 -> return E258_230
230 -> return E258_231
231 -> return E258_232
232 -> return E258_233
233 -> return E258_234
234 -> return E258_235
235 -> return E258_236
236 -> return E258_237
237 -> return E258_238
238 -> return E258_239
239 -> return E258_240
240 -> return E258_241
241 -> return E258_242
242 -> return E258_243
243 -> return E258_244
244 -> return E258_245
245 -> return E258_246
246 -> return E258_247
247 -> return E258_248
248 -> return E258_249
249 -> return E258_250
250 -> return E258_251
251 -> return E258_252
252 -> return E258_253
253 -> return E258_254
254 -> return E258_255
255 -> return E258_256
256 -> return E258_257
257 -> return E258_258
_ -> error "FATAL ERROR: Arbitrary instance, logic bug"
-- GENERATED STOP
#endif | tittoassini/typed | test/Test/E/Arbitrary.hs | bsd-3-clause | 23,640 | 0 | 11 | 11,939 | 5,855 | 2,644 | 3,211 | 121 | 0 |
import Distribution.PackageDescription
import Distribution.Simple
import Distribution.Simple.LocalBuildInfo
import System.Cmd
import System.FilePath
main :: IO ()
main = defaultMainWithHooks rsaUserHooks
where
rsaUserHooks = simpleUserHooks {
runTests = runLMTests
, instHook = filter_test $ instHook defaultUserHooks
}
type Hook a = PackageDescription -> LocalBuildInfo -> UserHooks -> a -> IO ()
filter_test :: Hook a -> Hook a
filter_test f pd lbi uhs x = f pd' lbi uhs x
where
pd' = pd { executables = [] }
runLMTests :: Args -> Bool -> PackageDescription -> LocalBuildInfo -> IO ()
runLMTests _args _unknown descr _lbi = system test_exe >> return ()
where
test_exe = "dist" </> "build" </> "test_rsa" </> (exeName $ head $ executables descr)
| armoredsoftware/protocol | tpm/mainline/Setup.hs | bsd-3-clause | 773 | 0 | 10 | 140 | 243 | 127 | 116 | 17 | 1 |
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ExistentialQuantification #-}
-- | <https://www.fpcomplete.com/user/agocorona/the-hardworking-programmer-ii-practical-backtracking-to-undo-actions>
module Transient.Backtrack (onUndo, undo, retry, undoCut,registerUndo,
-- * generalized versions of backtracking with an extra parameter that gives the reason for going back
-- different kinds of backtracking with different reasons can be managed in the same program
onBack, back, forward, backCut,registerBack,
-- * finalization primitives
finish, onFinish, onFinish' ,initFinish , noFinish, killOnFinish ,checkFinalize , FinishReason
) where
import Transient.Base
import Transient.Internals((!>),EventF(..),killChildren,onNothing,runClosure,runContinuation)
import Data.Typeable
import Control.Applicative
import Control.Monad.State
import Unsafe.Coerce
import System.Mem.StableName
import Control.Exception
import Control.Concurrent.STM hiding (retry)
data Backtrack b= Show b =>Backtrack{backtracking :: Bool
,backStack :: [EventF]
,backReason :: b}
deriving Typeable
-- | assures that backtracking will not go further back
backCut :: (Typeable reason, Show reason) => reason -> TransientIO ()
backCut reason= Transient $ do
delData $ Backtrack False [] reason
return $ Just ()
undoCut :: TransientIO ()
undoCut = backCut ()
-- | the second parameter will be executed when backtracking
{-# NOINLINE onBack #-}
onBack :: (Typeable b, Show b) => TransientIO a -> b -> TransientIO a -> TransientIO a
onBack ac reason bac= registerBack reason $ Transient $ do
liftIO $ print "onback"
Backtrack back _ reas <- getData `onNothing` backStateOf reason !> typeOf reason
return () !> ("ONBACK REASON",back,reas)
runTrans $ if back then bac else ac
onUndo :: TransientIO a -> TransientIO a -> TransientIO a
onUndo x y= onBack x () y
-- | register an action that will be executed when backtracking
{-# NOINLINE registerUndo #-}
registerBack :: (Typeable b, Show b) => b -> TransientIO a -> TransientIO a
registerBack reason f = Transient $ do
cont@(EventF _ _ x _ _ _ _ _ _ _ _) <- get -- !!> "backregister"
md <- getData `asTypeOf` (Just <$> backStateOf reason)
ss <- case md of
Just (bss@(Backtrack b (bs@((EventF _ _ x' _ _ _ _ _ _ _ _):_))_)) -> do
addrx <- addr x
addrx' <- addr x' -- to avoid duplicate backtracking points
return $ if addrx == addrx' then bss else Backtrack b (cont:bs) reason
Nothing -> return $ Backtrack False [cont] reason
setData ss
runTrans f
where
addr x = liftIO $ return . hashStableName =<< (makeStableName $! x)
registerUndo :: TransientIO a -> TransientIO a
registerUndo f= registerBack () f
-- | restart the flow forward from this point on
forward :: (Typeable b, Show b) => b -> TransIO ()
forward reason= Transient $ do
Backtrack _ stack _<- getData `onNothing` return (Backtrack False [] reason)
setData $ Backtrack False stack reason
return $ Just ()
retry= forward ()
noFinish= forward (FinishReason Nothing)
-- | execute backtracking. It execute the registered actions in reverse order.
--
-- If the backtracking flag is changed the flow proceed forward from that point on.
--
--If the backtrack stack is finished or undoCut executed, `undo` will stop.
back :: (Typeable b, Show b) => b -> TransientIO a
back reason = Transient $ do
bs <- getData `onNothing` backStateOf reason -- !!>"GOBACK"
goBackt bs
where
goBackt (Backtrack _ [] _)= return Nothing -- !!> "END"
goBackt (Backtrack b (stack@(first : bs)) _)= do
setData $ Backtrack True stack reason !> ("REASOOOOOOOOOOOOOON", reason)
setData reason
mr <- runClosure first !> "RUNCLOSURE"
Backtrack back _ r <- getData `onNothing` backStateOf reason
-- !!>"END RUNCLOSURE"
case back of
True -> goBackt $ Backtrack True bs reason -- !!> "BACK AGAIN"
False -> case mr of
Nothing -> return empty -- !!> "FORWARD END"
Just x -> runContinuation first x -- !!> "FORWARD EXEC"
backStateOf :: (Monad m, Show a, Typeable a) => a -> m (Backtrack a)
backStateOf reason= return $ Backtrack False [] reason
undo :: TransIO a
undo= back ()
------ finalization
newtype FinishReason= FinishReason (Maybe SomeException) deriving (Typeable, Show)
-- | initialize the event variable for finalization.
-- all the following computations in different threads will share it
-- it also isolate this event from other branches that may have his own finish variable
initFinish= backCut (FinishReason Nothing)
-- | set a computation to be called when the finish event happens
onFinish :: ((Maybe SomeException) ->TransIO ()) -> TransIO ()
onFinish f= onFinish' (return ()) f
-- | set a computation to be called when the finish event happens this only apply for
onFinish' ::TransIO a ->((Maybe SomeException) ->TransIO a) -> TransIO a
onFinish' proc f= proc `onBack` (FinishReason Nothing) $ do
Backtrack back _ (FinishReason reason) <- getData `onNothing` backStateOf (FinishReason Nothing)
FinishReason reason <- getData `onNothing` return (FinishReason Nothing)
f reason !> ("ONFINISH", reason)
!> "onfinish"
-- | trigger the event, so this closes all the resources
finish :: Maybe SomeException -> TransIO a
finish reason= back (FinishReason reason)
-- | kill all the processes generated by the parameter when finish event occurs
killOnFinish comp= do
chs <- liftIO $ newTVarIO []
onFinish $ const $ liftIO $ killChildren chs -- !> "killOnFinish event"
r <- comp
modify $ \ s -> s{children= chs}
return r
-- | trigger finish when the stream of data ends
checkFinalize v=
case v of
SDone -> finish Nothing >> stop
SLast x -> return x
SError e -> liftIO ( print e) >> finish Nothing >> stop
SMore x -> return x
| geraldus/transient | src/Transient/Backtrackold.hs | mit | 6,229 | 0 | 22 | 1,533 | 1,677 | 862 | 815 | 99 | 4 |
module Multi.Paths where
-- $Id$
import Dot.Dot
import Util.Datei
basename :: FilePath -> FilePath
basename = reverse . takeWhile (/= '/') . reverse
public_html :: Datei -> Datei
public_html d = d { pfad = "public_html" : pfad d }
data_index :: Datei -> Datei
data_index d = d { pfad = "data" : pfad d, name = "index.text" }
deng :: ( ToDot a )
=> Datei -- ohne extension
-> a
-> IO Datei -- mit extension
deng d a = do
erzeugeVerzeichnisse d
h <- home d
f <- meng h a
return $ d { name = basename f }
| Erdwolf/autotool-bonn | src/Multi/Paths.hs | gpl-2.0 | 547 | 0 | 10 | 149 | 199 | 105 | 94 | 18 | 1 |
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Server where
import Thrift
import ThriftTest
import ThriftTest_Iface
import Data.Map as Map
import TServer
import Control.Exception
import ThriftTest_Types
data TestHandler = TestHandler
instance ThriftTest_Iface TestHandler where
testVoid a = return ()
testString a (Just s) = do print s; return s
testByte a (Just x) = do print x; return x
testI32 a (Just x) = do print x; return x
testI64 a (Just x) = do print x; return x
testDouble a (Just x) = do print x; return x
testStruct a (Just x) = do print x; return x
testNest a (Just x) = do print x; return x
testMap a (Just x) = do print x; return x
testSet a (Just x) = do print x; return x
testList a (Just x) = do print x; return x
testEnum a (Just x) = do print x; return x
testTypedef a (Just x) = do print x; return x
testMapMap a (Just x) = return (Map.fromList [(1,Map.fromList [(2,2)])])
testInsanity a (Just x) = return (Map.fromList [(1,Map.fromList [(ONE,x)])])
testMulti a a1 a2 a3 a4 a5 a6 = return (Xtruct Nothing Nothing Nothing Nothing)
testException a c = throwDyn (Xception (Just 1) (Just "bya"))
testMultiException a c1 c2 = return (Xtruct Nothing Nothing Nothing Nothing)
testOneway a (Just i) = do print i
main = do (run_basic_server TestHandler process 9090) `catchDyn` (\(TransportExn s t) -> print s)
| musixmatch/thrift | test/hs/Server.hs | apache-2.0 | 2,168 | 0 | 13 | 441 | 685 | 342 | 343 | 30 | 1 |
{-# OPTIONS_GHC -fplugin RuleDefiningPlugin #-}
module Plugins07a where
| fmthoma/ghc | testsuite/tests/plugins/Plugins07a.hs | bsd-3-clause | 72 | 0 | 2 | 8 | 5 | 4 | 1 | 2 | 0 |
{- |
Module : $Header$
Description : symbol map analysis for the CspCASL logic.
Copyright : (c) Christian Maeder, DFKI GmbH 2011
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : provisional
Portability : portable
-}
module CspCASL.SymMapAna where
import CspCASL.AS_CspCASL_Process
import CspCASL.Morphism
import CspCASL.SignCSP
import CspCASL.SymbItems
import CspCASL.Symbol
import CASL.Sign
import CASL.AS_Basic_CASL
import CASL.Morphism
import CASL.SymbolMapAnalysis
import Common.DocUtils
import Common.ExtSign
import Common.Id
import Common.Result
import qualified Common.Lib.Rel as Rel
import qualified Common.Lib.MapSet as MapSet
import qualified Data.Map as Map
import qualified Data.Set as Set
import Data.List (partition)
import Data.Maybe
type CspRawMap = Map.Map CspRawSymbol CspRawSymbol
cspInducedFromToMorphism :: CspRawMap -> ExtSign CspCASLSign CspSymbol
-> ExtSign CspCASLSign CspSymbol -> Result CspCASLMorphism
cspInducedFromToMorphism rmap (ExtSign sSig sy) (ExtSign tSig tSy) =
let (crm, rm) = splitSymbolMap rmap
in if Map.null rm then
inducedFromToMorphismExt inducedCspSign
(constMorphExt emptyCspAddMorphism)
composeMorphismExtension isCspSubSign diffCspSig
crm (ExtSign sSig $ getCASLSymbols sy)
$ ExtSign tSig $ getCASLSymbols tSy
else do
mor <- cspInducedFromMorphism rmap sSig
let iSig = mtarget mor
if isSubSig isCspSubSign iSig tSig then do
incl <- sigInclusion emptyCspAddMorphism iSig tSig
composeM composeMorphismExtension mor incl
else
fatal_error
("No signature morphism for csp symbol map found.\n" ++
"The following mapped symbols are missing in the target signature:\n"
++ showDoc (diffSig diffCspSig iSig tSig) "")
$ concatMapRange getRange $ Map.keys rmap
cspInducedFromMorphism :: CspRawMap -> CspCASLSign -> Result CspCASLMorphism
cspInducedFromMorphism rmap sigma = do
let (crm, _) = splitSymbolMap rmap
m <- inducedFromMorphism emptyCspAddMorphism crm sigma
let sm = sort_map m
om = op_map m
pm = pred_map m
csig = extendedInfo sigma
newSRel = Rel.transClosure . sortRel $ mtarget m
-- compute the channel name map (as a Map)
cm <- Map.foldWithKey (chanFun sigma rmap sm)
(return Map.empty) (MapSet.toMap $ chans csig)
-- compute the process name map (as a Map)
proc_Map <- Map.foldWithKey (procFun sigma rmap sm newSRel cm)
(return Map.empty) (MapSet.toMap $ procSet csig)
let em = emptyCspAddMorphism
{ channelMap = cm
, processMap = proc_Map }
return (embedMorphism em sigma $ closeSortRel
$ inducedSignAux inducedCspSign sm om pm em sigma)
{ sort_map = sm
, op_map = om
, pred_map = pm }
chanFun :: CspCASLSign -> CspRawMap -> Sort_map -> Id -> Set.Set SORT
-> Result ChanMap -> Result ChanMap
chanFun sig rmap sm cn ss m =
let sls = Rel.partSet (relatedSorts sig) ss
m1 = foldr (directChanMap rmap sm cn) m sls
in case (Map.lookup (CspKindedSymb ChannelKind cn) rmap,
Map.lookup (CspKindedSymb (CaslKind Implicit) cn) rmap) of
(Just rsy1, Just rsy2) -> let
m2 = Set.fold (insertChanSym sm cn rsy1) m1 ss
in Set.fold (insertChanSym sm cn rsy2) m2 ss
(Just rsy, Nothing) ->
Set.fold (insertChanSym sm cn rsy) m1 ss
(Nothing, Just rsy) ->
Set.fold (insertChanSym sm cn rsy) m1 ss
-- Anything not mapped explicitly is left unchanged
(Nothing, Nothing) -> m1
directChanMap :: CspRawMap -> Sort_map -> Id -> Set.Set SORT
-> Result ChanMap -> Result ChanMap
directChanMap rmap sm cn ss m =
let sl = Set.toList ss
rl = map (\ s -> Map.lookup (ACspSymbol $ toChanSymbol (cn, s)) rmap) sl
(ms, ps) = partition (isJust . fst) $ zip rl sl
in case ms of
l@((Just rsy, _) : rs) ->
foldr (\ (_, s) ->
insertChanSym sm cn
(ACspSymbol $ toChanSymbol
(rawId rsy, mapSort sm s)) s)
(foldr (\ (rsy2, s) ->
insertChanSym sm cn (fromJust rsy2) s) m l)
$ rs ++ ps
_ -> m
insertChanSym :: Sort_map -> Id -> CspRawSymbol -> SORT -> Result ChanMap
-> Result ChanMap
insertChanSym sm cn rsy s m = do
m1 <- m
c1 <- mappedChanSym sm cn s rsy
let ptsy = CspSymbol cn $ ChanAsItemType s
pos = getRange rsy
m2 = Map.insert (cn, s) c1 m1
case Map.lookup (cn, s) m1 of
Nothing -> if cn == c1 then
case rsy of
ACspSymbol _ -> return m1
_ -> hint m1 ("identity mapping of "
++ showDoc ptsy "") pos
else return m2
Just c2 -> if c1 == c2 then
warning m1
("ignoring duplicate mapping of " ++ showDoc ptsy "") pos
else plain_error m1
("conflicting mapping of " ++ showDoc ptsy " to " ++
show c1 ++ " and " ++ show c2) pos
mappedChanSym :: Sort_map -> Id -> SORT -> CspRawSymbol -> Result Id
mappedChanSym sm cn s rsy =
let chanSym = "channel symbol " ++ showDoc (toChanSymbol (cn, s))
" is mapped to "
in case rsy of
ACspSymbol (CspSymbol ide (ChanAsItemType s1)) ->
let s2 = mapSort sm s
in if s1 == s2
then return ide
else plain_error cn
(chanSym ++ "sort " ++ showDoc s1
" but should be mapped to type " ++
showDoc s2 "") $ getRange rsy
CspKindedSymb k ide | elem k [CaslKind Implicit, ChannelKind] ->
return ide
_ -> plain_error cn
(chanSym ++ "symbol of wrong kind: " ++ showDoc rsy "")
$ getRange rsy
procFun :: CspCASLSign -> CspRawMap -> Sort_map -> Rel.Rel SORT -> ChanMap -> Id
-> Set.Set ProcProfile -> Result ProcessMap -> Result ProcessMap
procFun sig rmap sm rel cm pn ps m =
let pls = Rel.partSet (relatedProcs sig) ps
m1 = foldr (directProcMap rmap sm rel cm pn) m pls
-- now try the remaining ones with (un)kinded raw symbol
in case (Map.lookup (CspKindedSymb ProcessKind pn) rmap,
Map.lookup (CspKindedSymb (CaslKind Implicit) pn) rmap) of
(Just rsy1, Just rsy2) -> let
m2 = Set.fold (insertProcSym sm rel cm pn rsy1) m1 ps
in Set.fold (insertProcSym sm rel cm pn rsy2) m2 ps
(Just rsy, Nothing) ->
Set.fold (insertProcSym sm rel cm pn rsy) m1 ps
(Nothing, Just rsy) ->
Set.fold (insertProcSym sm rel cm pn rsy) m1 ps
-- Anything not mapped explicitly is left unchanged
(Nothing, Nothing) -> m1
directProcMap :: CspRawMap -> Sort_map -> Rel.Rel SORT -> ChanMap
-> Id -> Set.Set ProcProfile -> Result ProcessMap -> Result ProcessMap
directProcMap rmap sm rel cm pn ps m =
let pl = Set.toList ps
rl = map (lookupProcSymbol rmap pn) pl
(ms, os) = partition (isJust . fst) $ zip rl pl
in case ms of
l@((Just rsy, _) : rs) ->
foldr (\ (_, p) ->
insertProcSym sm rel cm pn
(ACspSymbol $ toProcSymbol
(rawId rsy, mapProcProfile sm cm p)) p)
(foldr (\ (rsy2, p) ->
insertProcSym sm rel cm pn (fromJust rsy2) p) m l)
$ rs ++ os
_ -> m
lookupProcSymbol :: CspRawMap -> Id -> ProcProfile
-> Maybe CspRawSymbol
lookupProcSymbol rmap pn p = case
filter (\ (k, _) -> case k of
ACspSymbol (CspSymbol i (ProcAsItemType pf)) ->
i == pn && matchProcTypes p pf
_ -> False) $ Map.toList rmap of
[(_, r)] -> Just r
[] -> Nothing
-- in case of ambiguities try to find an exact match
l -> lookup (ACspSymbol $ toProcSymbol (pn, p)) l
insertProcSym :: Sort_map -> Rel.Rel SORT -> ChanMap -> Id -> CspRawSymbol
-> ProcProfile -> Result ProcessMap -> Result ProcessMap
insertProcSym sm rel cm pn rsy pf@(ProcProfile _ al) m = do
m1 <- m
(p1, al1) <- mappedProcSym sm rel cm pn pf rsy
let otsy = toProcSymbol (pn, pf)
pos = getRange rsy
m2 = Map.insert (pn, pf) p1 m1
case Map.lookup (pn, pf) m1 of
Nothing -> if pn == p1 && al == al1 then
case rsy of
ACspSymbol _ -> return m1
_ -> hint m1 ("identity mapping of "
++ showDoc otsy "") pos
else return m2
Just p2 -> if p1 == p2 then
warning m1
("ignoring duplicate mapping of " ++ showDoc otsy "")
pos
else plain_error m1
("conflicting mapping of " ++ showDoc otsy " to " ++
show p1 ++ " and " ++ show p2) pos
mappedProcSym :: Sort_map -> Rel.Rel SORT -> ChanMap -> Id
-> ProcProfile -> CspRawSymbol -> Result (Id, CommAlpha)
mappedProcSym sm rel cm pn pfSrc rsy =
let procSym = "process symbol " ++ showDoc (toProcSymbol (pn, pfSrc))
" is mapped to "
pfMapped@(ProcProfile _ al2) = reduceProcProfile rel
$ mapProcProfile sm cm pfSrc
in case rsy of
ACspSymbol (CspSymbol ide (ProcAsItemType pf)) ->
let pfTar@(ProcProfile _ al1) = reduceProcProfile rel pf
in if compatibleProcTypes rel pfMapped pfTar
then return (ide, al1)
else plain_error (pn, al2)
(procSym ++ "type " ++ showDoc pfTar
"\nbut should be mapped to type " ++
showDoc pfMapped
"\npossibly using a sub-alphabet of " ++
showDoc (closeCspCommAlpha rel al2) ".")
$ getRange rsy
CspKindedSymb k ide | elem k [CaslKind Implicit, ProcessKind] ->
return (ide, al2)
_ -> plain_error (pn, al2)
(procSym ++ "symbol of wrong kind: " ++ showDoc rsy "")
$ getRange rsy
compatibleProcTypes :: Rel.Rel SORT -> ProcProfile -> ProcProfile -> Bool
compatibleProcTypes rel (ProcProfile l1 al1) (ProcProfile l2 al2) =
l1 == l2 && liamsRelatedCommAlpha rel al1 al2
liamsRelatedCommAlpha :: Rel.Rel SORT -> CommAlpha -> CommAlpha -> Bool
liamsRelatedCommAlpha rel al1 al2 =
all (\ a2 -> any (\ a1 -> liamsRelatedCommTypes rel a1 a2) $ Set.toList al1)
$ Set.toList al2
liamsRelatedCommTypes :: Rel.Rel SORT -> CommType -> CommType -> Bool
liamsRelatedCommTypes rel ct1 ct2 = case (ct1, ct2) of
(CommTypeSort s1, CommTypeSort s2)
-> s1 == s2 || s1 `Set.member` Rel.succs rel s2
(CommTypeChan (TypedChanName c1 s1), CommTypeChan (TypedChanName c2 s2))
-> c1 == c2 && s1 == s2
_ -> False
matchProcTypes :: ProcProfile -> ProcProfile -> Bool
matchProcTypes (ProcProfile l1 al1) (ProcProfile l2 al2) = l1 == l2
&& (Set.null al2 || Set.null al1 || not (Set.null $ Set.intersection al1 al2))
cspMatches :: CspSymbol -> CspRawSymbol -> Bool
cspMatches (CspSymbol i t) rsy = case rsy of
ACspSymbol (CspSymbol j t2) -> i == j && case (t, t2) of
(CaslSymbType t1, CaslSymbType t3) -> matches (Symbol i t1)
$ ASymbol $ Symbol j t3
(ChanAsItemType s1, ChanAsItemType s2) -> s1 == s2
(ProcAsItemType p1, ProcAsItemType p2) -> matchProcTypes p1 p2
_ -> False
CspKindedSymb k j -> let res = i == j in case (k, t) of
(CaslKind ck, CaslSymbType t1) -> matches (Symbol i t1)
$ AKindedSymb ck j
(ChannelKind, ChanAsItemType _) -> res
(ProcessKind, ProcAsItemType _) -> res
(CaslKind Implicit, _) -> res
_ -> False
procProfile2Sorts :: ProcProfile -> Set.Set SORT
procProfile2Sorts (ProcProfile sorts al) =
Set.union (Set.fromList sorts) $ Set.map commType2Sort al
cspRevealSym :: CspSymbol -> CspCASLSign -> CspCASLSign
cspRevealSym sy sig = let
n = cspSymName sy
r = sortRel sig
ext = extendedInfo sig
cs = chans ext
in case cspSymbType sy of
CaslSymbType t -> revealSym (Symbol n t) sig
ChanAsItemType s -> sig
{ sortRel = Rel.insertKey s r
, extendedInfo = ext { chans = MapSet.insert n s cs }}
ProcAsItemType p@(ProcProfile _ al) -> sig
{ sortRel = Rel.union (Rel.fromKeysSet $ procProfile2Sorts p) r
, extendedInfo = ext
{ chans = Set.fold (\ ct -> case ct of
CommTypeSort _ -> id
CommTypeChan (TypedChanName c s) -> MapSet.insert c s) cs al
, procSet = MapSet.insert n p $ procSet ext }
}
cspGeneratedSign :: Set.Set CspSymbol -> CspCASLSign -> Result CspCASLMorphism
cspGeneratedSign sys sigma = let
symset = Set.unions $ symSets sigma
sigma1 = Set.fold cspRevealSym sigma
{ sortRel = Rel.empty
, opMap = MapSet.empty
, predMap = MapSet.empty
, extendedInfo = emptyCspSign } sys
sigma2 = sigma1
{ sortRel = sortRel sigma `Rel.restrict` sortSet sigma1
, emptySortSet = Set.intersection (sortSet sigma1) $ emptySortSet sigma }
in if not $ Set.isSubsetOf sys symset
then let diffsyms = sys Set.\\ symset in
fatal_error ("Revealing: The following symbols "
++ showDoc diffsyms " are not in the signature")
$ getRange diffsyms
else cspSubsigInclusion sigma2 sigma
cspCogeneratedSign :: Set.Set CspSymbol -> CspCASLSign -> Result CspCASLMorphism
cspCogeneratedSign symset sigma = let
symset0 = Set.unions $ symSets sigma
symset1 = Set.fold cspHideSym symset0 symset
in if Set.isSubsetOf symset symset0
then cspGeneratedSign symset1 sigma
else let diffsyms = symset Set.\\ symset0 in
fatal_error ("Hiding: The following symbols "
++ showDoc diffsyms " are not in the signature")
$ getRange diffsyms
cspHideSym :: CspSymbol -> Set.Set CspSymbol -> Set.Set CspSymbol
cspHideSym sy set1 = let
set2 = Set.delete sy set1
n = cspSymName sy
in case cspSymbType sy of
CaslSymbType SortAsItemType ->
Set.filter (not . cspProfileContains n . cspSymbType) set2
ChanAsItemType s ->
Set.filter (unusedChan n s) set2
_ -> set2
cspProfileContains :: Id -> CspSymbType -> Bool
cspProfileContains s ty = case ty of
CaslSymbType t -> profileContainsSort s t
ChanAsItemType s2 -> s == s2
ProcAsItemType p -> Set.member s $ procProfile2Sorts p
unusedChan :: Id -> SORT -> CspSymbol -> Bool
unusedChan c s sy = case cspSymbType sy of
ProcAsItemType (ProcProfile _ al) ->
Set.fold (\ ct b -> case ct of
CommTypeSort _ -> b
CommTypeChan (TypedChanName c2 s2) -> b && (c, s) /= (c2, s2)) True al
_ -> True
| keithodulaigh/Hets | CspCASL/SymMapAna.hs | gpl-2.0 | 14,427 | 0 | 22 | 4,038 | 4,843 | 2,414 | 2,429 | 322 | 9 |
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
This module converts Template Haskell syntax into HsSyn
-}
{-# LANGUAGE CPP #-}
module Convert( convertToHsExpr, convertToPat, convertToHsDecls,
convertToHsType,
thRdrNameGuesses ) where
import HsSyn as Hs
import HsTypes ( mkHsForAllTy )
import qualified Class
import RdrName
import qualified Name
import Module
import RdrHsSyn
import qualified OccName
import OccName
import SrcLoc
import Type
import qualified Coercion ( Role(..) )
import TysWiredIn
import TysPrim (eqPrimTyCon)
import BasicTypes as Hs
import ForeignCall
import Unique
import ErrUtils
import Bag
import Lexeme
import Util
import FastString
import Outputable
import qualified Data.ByteString as BS
import Control.Monad( unless, liftM, ap )
#if __GLASGOW_HASKELL__ < 709
import Control.Applicative (Applicative(..))
#endif
import Data.Char ( chr )
import Data.Word ( Word8 )
import Data.Maybe( catMaybes )
import Language.Haskell.TH as TH hiding (sigP)
import Language.Haskell.TH.Syntax as TH
-------------------------------------------------------------------
-- The external interface
convertToHsDecls :: SrcSpan -> [TH.Dec] -> Either MsgDoc [LHsDecl RdrName]
convertToHsDecls loc ds = initCvt loc (fmap catMaybes (mapM cvt_dec ds))
where
cvt_dec d = wrapMsg "declaration" d (cvtDec d)
convertToHsExpr :: SrcSpan -> TH.Exp -> Either MsgDoc (LHsExpr RdrName)
convertToHsExpr loc e
= initCvt loc $ wrapMsg "expression" e $ cvtl e
convertToPat :: SrcSpan -> TH.Pat -> Either MsgDoc (LPat RdrName)
convertToPat loc p
= initCvt loc $ wrapMsg "pattern" p $ cvtPat p
convertToHsType :: SrcSpan -> TH.Type -> Either MsgDoc (LHsType RdrName)
convertToHsType loc t
= initCvt loc $ wrapMsg "type" t $ cvtType t
-------------------------------------------------------------------
newtype CvtM a = CvtM { unCvtM :: SrcSpan -> Either MsgDoc (SrcSpan, a) }
-- Push down the source location;
-- Can fail, with a single error message
-- NB: If the conversion succeeds with (Right x), there should
-- be no exception values hiding in x
-- Reason: so a (head []) in TH code doesn't subsequently
-- make GHC crash when it tries to walk the generated tree
-- Use the loc everywhere, for lack of anything better
-- In particular, we want it on binding locations, so that variables bound in
-- the spliced-in declarations get a location that at least relates to the splice point
instance Functor CvtM where
fmap = liftM
instance Applicative CvtM where
pure = return
(<*>) = ap
instance Monad CvtM where
return x = CvtM $ \loc -> Right (loc,x)
(CvtM m) >>= k = CvtM $ \loc -> case m loc of
Left err -> Left err
Right (loc',v) -> unCvtM (k v) loc'
initCvt :: SrcSpan -> CvtM a -> Either MsgDoc a
initCvt loc (CvtM m) = fmap snd (m loc)
force :: a -> CvtM ()
force a = a `seq` return ()
failWith :: MsgDoc -> CvtM a
failWith m = CvtM (\_ -> Left m)
getL :: CvtM SrcSpan
getL = CvtM (\loc -> Right (loc,loc))
setL :: SrcSpan -> CvtM ()
setL loc = CvtM (\_ -> Right (loc, ()))
returnL :: a -> CvtM (Located a)
returnL x = CvtM (\loc -> Right (loc, L loc x))
returnJustL :: a -> CvtM (Maybe (Located a))
returnJustL = fmap Just . returnL
wrapParL :: (Located a -> a) -> a -> CvtM a
wrapParL add_par x = CvtM (\loc -> Right (loc, add_par (L loc x)))
wrapMsg :: (Show a, TH.Ppr a) => String -> a -> CvtM b -> CvtM b
-- E.g wrapMsg "declaration" dec thing
wrapMsg what item (CvtM m)
= CvtM (\loc -> case m loc of
Left err -> Left (err $$ getPprStyle msg)
Right v -> Right v)
where
-- Show the item in pretty syntax normally,
-- but with all its constructors if you say -dppr-debug
msg sty = hang (ptext (sLit "When splicing a TH") <+> text what <> colon)
2 (if debugStyle sty
then text (show item)
else text (pprint item))
wrapL :: CvtM a -> CvtM (Located a)
wrapL (CvtM m) = CvtM (\loc -> case m loc of
Left err -> Left err
Right (loc',v) -> Right (loc',L loc v))
-------------------------------------------------------------------
cvtDecs :: [TH.Dec] -> CvtM [LHsDecl RdrName]
cvtDecs = fmap catMaybes . mapM cvtDec
cvtDec :: TH.Dec -> CvtM (Maybe (LHsDecl RdrName))
cvtDec (TH.ValD pat body ds)
| TH.VarP s <- pat
= do { s' <- vNameL s
; cl' <- cvtClause (Clause [] body ds)
; returnJustL $ Hs.ValD $ mkFunBind s' [cl'] }
| otherwise
= do { pat' <- cvtPat pat
; body' <- cvtGuard body
; ds' <- cvtLocalDecs (ptext (sLit "a where clause")) ds
; returnJustL $ Hs.ValD $
PatBind { pat_lhs = pat', pat_rhs = GRHSs body' ds'
, pat_rhs_ty = placeHolderType, bind_fvs = placeHolderNames
, pat_ticks = ([],[]) } }
cvtDec (TH.FunD nm cls)
| null cls
= failWith (ptext (sLit "Function binding for")
<+> quotes (text (TH.pprint nm))
<+> ptext (sLit "has no equations"))
| otherwise
= do { nm' <- vNameL nm
; cls' <- mapM cvtClause cls
; returnJustL $ Hs.ValD $ mkFunBind nm' cls' }
cvtDec (TH.SigD nm typ)
= do { nm' <- vNameL nm
; ty' <- cvtType typ
; returnJustL $ Hs.SigD (TypeSig [nm'] ty' PlaceHolder) }
cvtDec (TH.InfixD fx nm)
-- fixity signatures are allowed for variables, constructors, and types
-- the renamer automatically looks for types during renaming, even when
-- the RdrName says it's a variable or a constructor. So, just assume
-- it's a variable or constructor and proceed.
= do { nm' <- vcNameL nm
; returnJustL (Hs.SigD (FixSig (FixitySig [nm'] (cvtFixity fx)))) }
cvtDec (PragmaD prag)
= cvtPragmaD prag
cvtDec (TySynD tc tvs rhs)
= do { (_, tc', tvs') <- cvt_tycl_hdr [] tc tvs
; rhs' <- cvtType rhs
; returnJustL $ TyClD $
SynDecl { tcdLName = tc'
, tcdTyVars = tvs', tcdFVs = placeHolderNames
, tcdRhs = rhs' } }
cvtDec (DataD ctxt tc tvs constrs derivs)
= do { (ctxt', tc', tvs') <- cvt_tycl_hdr ctxt tc tvs
; cons' <- mapM cvtConstr constrs
; derivs' <- cvtDerivs derivs
; let defn = HsDataDefn { dd_ND = DataType, dd_cType = Nothing
, dd_ctxt = ctxt'
, dd_kindSig = Nothing
, dd_cons = cons', dd_derivs = derivs' }
; returnJustL $ TyClD (DataDecl { tcdLName = tc', tcdTyVars = tvs'
, tcdDataDefn = defn
, tcdFVs = placeHolderNames }) }
cvtDec (NewtypeD ctxt tc tvs constr derivs)
= do { (ctxt', tc', tvs') <- cvt_tycl_hdr ctxt tc tvs
; con' <- cvtConstr constr
; derivs' <- cvtDerivs derivs
; let defn = HsDataDefn { dd_ND = NewType, dd_cType = Nothing
, dd_ctxt = ctxt'
, dd_kindSig = Nothing
, dd_cons = [con']
, dd_derivs = derivs' }
; returnJustL $ TyClD (DataDecl { tcdLName = tc', tcdTyVars = tvs'
, tcdDataDefn = defn
, tcdFVs = placeHolderNames }) }
cvtDec (ClassD ctxt cl tvs fds decs)
= do { (cxt', tc', tvs') <- cvt_tycl_hdr ctxt cl tvs
; fds' <- mapM cvt_fundep fds
; (binds', sigs', fams', ats', adts') <- cvt_ci_decs (ptext (sLit "a class declaration")) decs
; unless (null adts')
(failWith $ (ptext (sLit "Default data instance declarations are not allowed:"))
$$ (Outputable.ppr adts'))
; at_defs <- mapM cvt_at_def ats'
; returnJustL $ TyClD $
ClassDecl { tcdCtxt = cxt', tcdLName = tc', tcdTyVars = tvs'
, tcdFDs = fds', tcdSigs = sigs', tcdMeths = binds'
, tcdATs = fams', tcdATDefs = at_defs, tcdDocs = []
, tcdFVs = placeHolderNames }
-- no docs in TH ^^
}
where
cvt_at_def :: LTyFamInstDecl RdrName -> CvtM (LTyFamDefltEqn RdrName)
-- Very similar to what happens in RdrHsSyn.mkClassDecl
cvt_at_def decl = case RdrHsSyn.mkATDefault decl of
Right def -> return def
Left (_, msg) -> failWith msg
cvtDec (InstanceD ctxt ty decs)
= do { let doc = ptext (sLit "an instance declaration")
; (binds', sigs', fams', ats', adts') <- cvt_ci_decs doc decs
; unless (null fams') (failWith (mkBadDecMsg doc fams'))
; ctxt' <- cvtContext ctxt
; L loc ty' <- cvtType ty
; let inst_ty' = L loc $ mkHsForAllTy Implicit [] ctxt' $ L loc ty'
; returnJustL $ InstD $ ClsInstD $
ClsInstDecl inst_ty' binds' sigs' ats' adts' Nothing }
cvtDec (ForeignD ford)
= do { ford' <- cvtForD ford
; returnJustL $ ForD ford' }
cvtDec (DataFamilyD tc tvs kind)
= do { (_, tc', tvs') <- cvt_tycl_hdr [] tc tvs
; result <- cvtMaybeKindToFamilyResultSig kind
; returnJustL $ TyClD $ FamDecl $
FamilyDecl DataFamily tc' tvs' result Nothing }
cvtDec (DataInstD ctxt tc tys constrs derivs)
= do { (ctxt', tc', typats') <- cvt_tyinst_hdr ctxt tc tys
; cons' <- mapM cvtConstr constrs
; derivs' <- cvtDerivs derivs
; let defn = HsDataDefn { dd_ND = DataType, dd_cType = Nothing
, dd_ctxt = ctxt'
, dd_kindSig = Nothing
, dd_cons = cons', dd_derivs = derivs' }
; returnJustL $ InstD $ DataFamInstD
{ dfid_inst = DataFamInstDecl { dfid_tycon = tc', dfid_pats = typats'
, dfid_defn = defn
, dfid_fvs = placeHolderNames } }}
cvtDec (NewtypeInstD ctxt tc tys constr derivs)
= do { (ctxt', tc', typats') <- cvt_tyinst_hdr ctxt tc tys
; con' <- cvtConstr constr
; derivs' <- cvtDerivs derivs
; let defn = HsDataDefn { dd_ND = NewType, dd_cType = Nothing
, dd_ctxt = ctxt'
, dd_kindSig = Nothing
, dd_cons = [con'], dd_derivs = derivs' }
; returnJustL $ InstD $ DataFamInstD
{ dfid_inst = DataFamInstDecl { dfid_tycon = tc', dfid_pats = typats'
, dfid_defn = defn
, dfid_fvs = placeHolderNames } }}
cvtDec (TySynInstD tc eqn)
= do { tc' <- tconNameL tc
; eqn' <- cvtTySynEqn tc' eqn
; returnJustL $ InstD $ TyFamInstD
{ tfid_inst = TyFamInstDecl { tfid_eqn = eqn'
, tfid_fvs = placeHolderNames } } }
cvtDec (OpenTypeFamilyD tc tvs result injectivity)
= do { (_, tc', tvs') <- cvt_tycl_hdr [] tc tvs
; result' <- cvtFamilyResultSig result
; injectivity' <- traverse cvtInjectivityAnnotation injectivity
; returnJustL $ TyClD $ FamDecl $
FamilyDecl OpenTypeFamily tc' tvs' result' injectivity' }
cvtDec (ClosedTypeFamilyD tc tyvars result injectivity eqns)
= do { (_, tc', tvs') <- cvt_tycl_hdr [] tc tyvars
; result' <- cvtFamilyResultSig result
; eqns' <- mapM (cvtTySynEqn tc') eqns
; injectivity' <- traverse cvtInjectivityAnnotation injectivity
; returnJustL $ TyClD $ FamDecl $
FamilyDecl (ClosedTypeFamily (Just eqns')) tc' tvs' result'
injectivity' }
cvtDec (TH.RoleAnnotD tc roles)
= do { tc' <- tconNameL tc
; let roles' = map (noLoc . cvtRole) roles
; returnJustL $ Hs.RoleAnnotD (RoleAnnotDecl tc' roles') }
cvtDec (TH.StandaloneDerivD cxt ty)
= do { cxt' <- cvtContext cxt
; L loc ty' <- cvtType ty
; let inst_ty' = L loc $ mkHsForAllTy Implicit [] cxt' $ L loc ty'
; returnJustL $ DerivD $
DerivDecl { deriv_type = inst_ty', deriv_overlap_mode = Nothing } }
cvtDec (TH.DefaultSigD nm typ)
= do { nm' <- vNameL nm
; ty' <- cvtType typ
; returnJustL $ Hs.SigD $ GenericSig [nm'] ty' }
----------------
cvtTySynEqn :: Located RdrName -> TySynEqn -> CvtM (LTyFamInstEqn RdrName)
cvtTySynEqn tc (TySynEqn lhs rhs)
= do { lhs' <- mapM cvtType lhs
; rhs' <- cvtType rhs
; returnL $ TyFamEqn { tfe_tycon = tc
, tfe_pats = mkHsWithBndrs lhs'
, tfe_rhs = rhs' } }
----------------
cvt_ci_decs :: MsgDoc -> [TH.Dec]
-> CvtM (LHsBinds RdrName,
[LSig RdrName],
[LFamilyDecl RdrName],
[LTyFamInstDecl RdrName],
[LDataFamInstDecl RdrName])
-- Convert the declarations inside a class or instance decl
-- ie signatures, bindings, and associated types
cvt_ci_decs doc decs
= do { decs' <- cvtDecs decs
; let (ats', bind_sig_decs') = partitionWith is_tyfam_inst decs'
; let (adts', no_ats') = partitionWith is_datafam_inst bind_sig_decs'
; let (sigs', prob_binds') = partitionWith is_sig no_ats'
; let (binds', prob_fams') = partitionWith is_bind prob_binds'
; let (fams', bads) = partitionWith is_fam_decl prob_fams'
; unless (null bads) (failWith (mkBadDecMsg doc bads))
--We use FromSource as the origin of the bind
-- because the TH declaration is user-written
; return (listToBag binds', sigs', fams', ats', adts') }
----------------
cvt_tycl_hdr :: TH.Cxt -> TH.Name -> [TH.TyVarBndr]
-> CvtM ( LHsContext RdrName
, Located RdrName
, LHsTyVarBndrs RdrName)
cvt_tycl_hdr cxt tc tvs
= do { cxt' <- cvtContext cxt
; tc' <- tconNameL tc
; tvs' <- cvtTvs tvs
; return (cxt', tc', tvs')
}
cvt_tyinst_hdr :: TH.Cxt -> TH.Name -> [TH.Type]
-> CvtM ( LHsContext RdrName
, Located RdrName
, HsWithBndrs RdrName [LHsType RdrName])
cvt_tyinst_hdr cxt tc tys
= do { cxt' <- cvtContext cxt
; tc' <- tconNameL tc
; tys' <- mapM cvtType tys
; return (cxt', tc', mkHsWithBndrs tys') }
-------------------------------------------------------------------
-- Partitioning declarations
-------------------------------------------------------------------
is_fam_decl :: LHsDecl RdrName -> Either (LFamilyDecl RdrName) (LHsDecl RdrName)
is_fam_decl (L loc (TyClD (FamDecl { tcdFam = d }))) = Left (L loc d)
is_fam_decl decl = Right decl
is_tyfam_inst :: LHsDecl RdrName -> Either (LTyFamInstDecl RdrName) (LHsDecl RdrName)
is_tyfam_inst (L loc (Hs.InstD (TyFamInstD { tfid_inst = d }))) = Left (L loc d)
is_tyfam_inst decl = Right decl
is_datafam_inst :: LHsDecl RdrName -> Either (LDataFamInstDecl RdrName) (LHsDecl RdrName)
is_datafam_inst (L loc (Hs.InstD (DataFamInstD { dfid_inst = d }))) = Left (L loc d)
is_datafam_inst decl = Right decl
is_sig :: LHsDecl RdrName -> Either (LSig RdrName) (LHsDecl RdrName)
is_sig (L loc (Hs.SigD sig)) = Left (L loc sig)
is_sig decl = Right decl
is_bind :: LHsDecl RdrName -> Either (LHsBind RdrName) (LHsDecl RdrName)
is_bind (L loc (Hs.ValD bind)) = Left (L loc bind)
is_bind decl = Right decl
mkBadDecMsg :: Outputable a => MsgDoc -> [a] -> MsgDoc
mkBadDecMsg doc bads
= sep [ ptext (sLit "Illegal declaration(s) in") <+> doc <> colon
, nest 2 (vcat (map Outputable.ppr bads)) ]
---------------------------------------------------
-- Data types
-- Can't handle GADTs yet
---------------------------------------------------
cvtConstr :: TH.Con -> CvtM (LConDecl RdrName)
cvtConstr (NormalC c strtys)
= do { c' <- cNameL c
; cxt' <- returnL []
; tys' <- mapM cvt_arg strtys
; returnL $ mkSimpleConDecl c' noExistentials cxt' (PrefixCon tys') }
cvtConstr (RecC c varstrtys)
= do { c' <- cNameL c
; cxt' <- returnL []
; args' <- mapM cvt_id_arg varstrtys
; returnL $ mkSimpleConDecl c' noExistentials cxt'
(RecCon (noLoc args')) }
cvtConstr (InfixC st1 c st2)
= do { c' <- cNameL c
; cxt' <- returnL []
; st1' <- cvt_arg st1
; st2' <- cvt_arg st2
; returnL $ mkSimpleConDecl c' noExistentials cxt' (InfixCon st1' st2') }
cvtConstr (ForallC tvs ctxt con)
= do { tvs' <- cvtTvs tvs
; L loc ctxt' <- cvtContext ctxt
; L _ con' <- cvtConstr con
; returnL $ con' { con_qvars = mkHsQTvs (hsQTvBndrs tvs' ++ hsQTvBndrs (con_qvars con'))
, con_cxt = L loc (ctxt' ++ (unLoc $ con_cxt con')) } }
cvt_arg :: (TH.Strict, TH.Type) -> CvtM (LHsType RdrName)
cvt_arg (NotStrict, ty) = cvtType ty
cvt_arg (IsStrict, ty)
= do { ty' <- cvtType ty
; returnL $ HsBangTy (HsSrcBang Nothing NoSrcUnpack SrcStrict) ty' }
cvt_arg (Unpacked, ty)
= do { ty' <- cvtType ty
; returnL $ HsBangTy (HsSrcBang Nothing SrcUnpack SrcStrict) ty' }
cvt_id_arg :: (TH.Name, TH.Strict, TH.Type) -> CvtM (LConDeclField RdrName)
cvt_id_arg (i, str, ty)
= do { i' <- vNameL i
; ty' <- cvt_arg (str,ty)
; return $ noLoc (ConDeclField { cd_fld_names = [i']
, cd_fld_type = ty'
, cd_fld_doc = Nothing}) }
cvtDerivs :: [TH.Name] -> CvtM (Maybe (Located [LHsType RdrName]))
cvtDerivs [] = return Nothing
cvtDerivs cs = do { cs' <- mapM cvt_one cs
; return (Just (noLoc cs')) }
where
cvt_one c = do { c' <- tconName c
; returnL $ HsTyVar c' }
cvt_fundep :: FunDep -> CvtM (Located (Class.FunDep (Located RdrName)))
cvt_fundep (FunDep xs ys) = do { xs' <- mapM tName xs
; ys' <- mapM tName ys
; returnL (map noLoc xs', map noLoc ys') }
noExistentials :: [LHsTyVarBndr RdrName]
noExistentials = []
------------------------------------------
-- Foreign declarations
------------------------------------------
cvtForD :: Foreign -> CvtM (ForeignDecl RdrName)
cvtForD (ImportF callconv safety from nm ty)
-- the prim and javascript calling conventions do not support headers
-- and are inserted verbatim, analogous to mkImport in RdrHsSyn
| callconv == TH.Prim || callconv == TH.JavaScript
= mk_imp (CImport (noLoc (cvt_conv callconv)) (noLoc safety') Nothing
(CFunction (StaticTarget from (mkFastString from) Nothing
True))
(noLoc from))
| Just impspec <- parseCImport (noLoc (cvt_conv callconv)) (noLoc safety')
(mkFastString (TH.nameBase nm))
from (noLoc from)
= mk_imp impspec
| otherwise
= failWith $ text (show from) <+> ptext (sLit "is not a valid ccall impent")
where
mk_imp impspec
= do { nm' <- vNameL nm
; ty' <- cvtType ty
; return (ForeignImport nm' ty' noForeignImportCoercionYet impspec)
}
safety' = case safety of
Unsafe -> PlayRisky
Safe -> PlaySafe
Interruptible -> PlayInterruptible
cvtForD (ExportF callconv as nm ty)
= do { nm' <- vNameL nm
; ty' <- cvtType ty
; let e = CExport (noLoc (CExportStatic as
(mkFastString as)
(cvt_conv callconv)))
(noLoc as)
; return $ ForeignExport nm' ty' noForeignExportCoercionYet e }
cvt_conv :: TH.Callconv -> CCallConv
cvt_conv TH.CCall = CCallConv
cvt_conv TH.StdCall = StdCallConv
cvt_conv TH.CApi = CApiConv
cvt_conv TH.Prim = PrimCallConv
cvt_conv TH.JavaScript = JavaScriptCallConv
------------------------------------------
-- Pragmas
------------------------------------------
cvtPragmaD :: Pragma -> CvtM (Maybe (LHsDecl RdrName))
cvtPragmaD (InlineP nm inline rm phases)
= do { nm' <- vNameL nm
; let dflt = dfltActivation inline
; let ip = InlinePragma { inl_src = "{-# INLINE"
, inl_inline = cvtInline inline
, inl_rule = cvtRuleMatch rm
, inl_act = cvtPhases phases dflt
, inl_sat = Nothing }
; returnJustL $ Hs.SigD $ InlineSig nm' ip }
cvtPragmaD (SpecialiseP nm ty inline phases)
= do { nm' <- vNameL nm
; ty' <- cvtType ty
; let (inline', dflt) = case inline of
Just inline1 -> (cvtInline inline1, dfltActivation inline1)
Nothing -> (EmptyInlineSpec, AlwaysActive)
; let ip = InlinePragma { inl_src = "{-# INLINE"
, inl_inline = inline'
, inl_rule = Hs.FunLike
, inl_act = cvtPhases phases dflt
, inl_sat = Nothing }
; returnJustL $ Hs.SigD $ SpecSig nm' [ty'] ip }
cvtPragmaD (SpecialiseInstP ty)
= do { ty' <- cvtType ty
; returnJustL $ Hs.SigD $ SpecInstSig "{-# SPECIALISE" ty' }
cvtPragmaD (RuleP nm bndrs lhs rhs phases)
= do { let nm' = mkFastString nm
; let act = cvtPhases phases AlwaysActive
; bndrs' <- mapM cvtRuleBndr bndrs
; lhs' <- cvtl lhs
; rhs' <- cvtl rhs
; returnJustL $ Hs.RuleD
$ HsRules "{-# RULES" [noLoc $ HsRule (noLoc (nm,nm')) act bndrs'
lhs' placeHolderNames
rhs' placeHolderNames]
}
cvtPragmaD (AnnP target exp)
= do { exp' <- cvtl exp
; target' <- case target of
ModuleAnnotation -> return ModuleAnnProvenance
TypeAnnotation n -> do
n' <- tconName n
return (TypeAnnProvenance (noLoc n'))
ValueAnnotation n -> do
n' <- vcName n
return (ValueAnnProvenance (noLoc n'))
; returnJustL $ Hs.AnnD $ HsAnnotation "{-# ANN" target' exp'
}
cvtPragmaD (LineP line file)
= do { setL (srcLocSpan (mkSrcLoc (fsLit file) line 1))
; return Nothing
}
dfltActivation :: TH.Inline -> Activation
dfltActivation TH.NoInline = NeverActive
dfltActivation _ = AlwaysActive
cvtInline :: TH.Inline -> Hs.InlineSpec
cvtInline TH.NoInline = Hs.NoInline
cvtInline TH.Inline = Hs.Inline
cvtInline TH.Inlinable = Hs.Inlinable
cvtRuleMatch :: TH.RuleMatch -> RuleMatchInfo
cvtRuleMatch TH.ConLike = Hs.ConLike
cvtRuleMatch TH.FunLike = Hs.FunLike
cvtPhases :: TH.Phases -> Activation -> Activation
cvtPhases AllPhases dflt = dflt
cvtPhases (FromPhase i) _ = ActiveAfter i
cvtPhases (BeforePhase i) _ = ActiveBefore i
cvtRuleBndr :: TH.RuleBndr -> CvtM (Hs.LRuleBndr RdrName)
cvtRuleBndr (RuleVar n)
= do { n' <- vNameL n
; return $ noLoc $ Hs.RuleBndr n' }
cvtRuleBndr (TypedRuleVar n ty)
= do { n' <- vNameL n
; ty' <- cvtType ty
; return $ noLoc $ Hs.RuleBndrSig n' $ mkHsWithBndrs ty' }
---------------------------------------------------
-- Declarations
---------------------------------------------------
cvtLocalDecs :: MsgDoc -> [TH.Dec] -> CvtM (HsLocalBinds RdrName)
cvtLocalDecs doc ds
| null ds
= return EmptyLocalBinds
| otherwise
= do { ds' <- cvtDecs ds
; let (binds, prob_sigs) = partitionWith is_bind ds'
; let (sigs, bads) = partitionWith is_sig prob_sigs
; unless (null bads) (failWith (mkBadDecMsg doc bads))
; return (HsValBinds (ValBindsIn (listToBag binds) sigs)) }
cvtClause :: TH.Clause -> CvtM (Hs.LMatch RdrName (LHsExpr RdrName))
cvtClause (Clause ps body wheres)
= do { ps' <- cvtPats ps
; g' <- cvtGuard body
; ds' <- cvtLocalDecs (ptext (sLit "a where clause")) wheres
; returnL $ Hs.Match Nothing ps' Nothing (GRHSs g' ds') }
-------------------------------------------------------------------
-- Expressions
-------------------------------------------------------------------
cvtl :: TH.Exp -> CvtM (LHsExpr RdrName)
cvtl e = wrapL (cvt e)
where
cvt (VarE s) = do { s' <- vName s; return $ HsVar s' }
cvt (ConE s) = do { s' <- cName s; return $ HsVar s' }
cvt (LitE l)
| overloadedLit l = do { l' <- cvtOverLit l; return $ HsOverLit l' }
| otherwise = do { l' <- cvtLit l; return $ HsLit l' }
cvt (AppE x y) = do { x' <- cvtl x; y' <- cvtl y; return $ HsApp x' y' }
cvt (LamE ps e) = do { ps' <- cvtPats ps; e' <- cvtl e
; return $ HsLam (mkMatchGroup FromSource [mkSimpleMatch ps' e']) }
cvt (LamCaseE ms) = do { ms' <- mapM cvtMatch ms
; return $ HsLamCase placeHolderType
(mkMatchGroup FromSource ms')
}
cvt (TupE [e]) = do { e' <- cvtl e; return $ HsPar e' }
-- Note [Dropping constructors]
-- Singleton tuples treated like nothing (just parens)
cvt (TupE es) = do { es' <- mapM cvtl es
; return $ ExplicitTuple (map (noLoc . Present) es')
Boxed }
cvt (UnboxedTupE es) = do { es' <- mapM cvtl es
; return $ ExplicitTuple
(map (noLoc . Present) es') Unboxed }
cvt (CondE x y z) = do { x' <- cvtl x; y' <- cvtl y; z' <- cvtl z;
; return $ HsIf (Just noSyntaxExpr) x' y' z' }
cvt (MultiIfE alts)
| null alts = failWith (ptext (sLit "Multi-way if-expression with no alternatives"))
| otherwise = do { alts' <- mapM cvtpair alts
; return $ HsMultiIf placeHolderType alts' }
cvt (LetE ds e) = do { ds' <- cvtLocalDecs (ptext (sLit "a let expression")) ds
; e' <- cvtl e; return $ HsLet ds' e' }
cvt (CaseE e ms) = do { e' <- cvtl e; ms' <- mapM cvtMatch ms
; return $ HsCase e' (mkMatchGroup FromSource ms') }
cvt (DoE ss) = cvtHsDo DoExpr ss
cvt (CompE ss) = cvtHsDo ListComp ss
cvt (ArithSeqE dd) = do { dd' <- cvtDD dd; return $ ArithSeq noPostTcExpr Nothing dd' }
cvt (ListE xs)
| Just s <- allCharLs xs = do { l' <- cvtLit (StringL s); return (HsLit l') }
-- Note [Converting strings]
| otherwise = do { xs' <- mapM cvtl xs
; return $ ExplicitList placeHolderType Nothing xs'
}
-- Infix expressions
cvt (InfixE (Just x) s (Just y)) = do { x' <- cvtl x; s' <- cvtl s; y' <- cvtl y
; wrapParL HsPar $
OpApp (mkLHsPar x') s' undefined (mkLHsPar y') }
-- Parenthesise both arguments and result,
-- to ensure this operator application does
-- does not get re-associated
-- See Note [Operator association]
cvt (InfixE Nothing s (Just y)) = do { s' <- cvtl s; y' <- cvtl y
; wrapParL HsPar $ SectionR s' y' }
-- See Note [Sections in HsSyn] in HsExpr
cvt (InfixE (Just x) s Nothing ) = do { x' <- cvtl x; s' <- cvtl s
; wrapParL HsPar $ SectionL x' s' }
cvt (InfixE Nothing s Nothing ) = do { s' <- cvtl s; return $ HsPar s' }
-- Can I indicate this is an infix thing?
-- Note [Dropping constructors]
cvt (UInfixE x s y) = do { x' <- cvtl x
; let x'' = case x' of
L _ (OpApp {}) -> x'
_ -> mkLHsPar x'
; cvtOpApp x'' s y } -- Note [Converting UInfix]
cvt (ParensE e) = do { e' <- cvtl e; return $ HsPar e' }
cvt (SigE e t) = do { e' <- cvtl e; t' <- cvtType t
; return $ ExprWithTySig e' t' PlaceHolder }
cvt (RecConE c flds) = do { c' <- cNameL c
; flds' <- mapM cvtFld flds
; return $ RecordCon c' noPostTcExpr (HsRecFields flds' Nothing)}
cvt (RecUpdE e flds) = do { e' <- cvtl e
; flds' <- mapM cvtFld flds
; return $ RecordUpd e' (HsRecFields flds' Nothing) [] [] [] }
cvt (StaticE e) = fmap HsStatic $ cvtl e
{- Note [Dropping constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When we drop constructors from the input (for instance, when we encounter @TupE [e]@)
we must insert parentheses around the argument. Otherwise, @UInfix@ constructors in @e@
could meet @UInfix@ constructors containing the @TupE [e]@. For example:
UInfixE x * (TupE [UInfixE y + z])
If we drop the singleton tuple but don't insert parentheses, the @UInfixE@s would meet
and the above expression would be reassociated to
OpApp (OpApp x * y) + z
which we don't want.
-}
cvtFld :: (TH.Name, TH.Exp) -> CvtM (LHsRecField RdrName (LHsExpr RdrName))
cvtFld (v,e)
= do { v' <- vNameL v; e' <- cvtl e
; return (noLoc $ HsRecField { hsRecFieldId = v', hsRecFieldArg = e'
, hsRecPun = False}) }
cvtDD :: Range -> CvtM (ArithSeqInfo RdrName)
cvtDD (FromR x) = do { x' <- cvtl x; return $ From x' }
cvtDD (FromThenR x y) = do { x' <- cvtl x; y' <- cvtl y; return $ FromThen x' y' }
cvtDD (FromToR x y) = do { x' <- cvtl x; y' <- cvtl y; return $ FromTo x' y' }
cvtDD (FromThenToR x y z) = do { x' <- cvtl x; y' <- cvtl y; z' <- cvtl z; return $ FromThenTo x' y' z' }
{- Note [Operator assocation]
We must be quite careful about adding parens:
* Infix (UInfix ...) op arg Needs parens round the first arg
* Infix (Infix ...) op arg Needs parens round the first arg
* UInfix (UInfix ...) op arg No parens for first arg
* UInfix (Infix ...) op arg Needs parens round first arg
Note [Converting UInfix]
~~~~~~~~~~~~~~~~~~~~~~~~
When converting @UInfixE@, @UInfixP@, and @UInfixT@ values, we want to readjust
the trees to reflect the fixities of the underlying operators:
UInfixE x * (UInfixE y + z) ---> (x * y) + z
This is done by the renamer (see @mkOppAppRn@, @mkConOppPatRn@, and
@mkHsOpTyRn@ in RnTypes), which expects that the input will be completely
right-biased for types and left-biased for everything else. So we left-bias the
trees of @UInfixP@ and @UInfixE@ and right-bias the trees of @UInfixT@.
Sample input:
UInfixE
(UInfixE x op1 y)
op2
(UInfixE z op3 w)
Sample output:
OpApp
(OpApp
(OpApp x op1 y)
op2
z)
op3
w
The functions @cvtOpApp@, @cvtOpAppP@, and @cvtOpAppT@ are responsible for this
biasing.
-}
{- | @cvtOpApp x op y@ converts @op@ and @y@ and produces the operator application @x `op` y@.
The produced tree of infix expressions will be left-biased, provided @x@ is.
We can see that @cvtOpApp@ is correct as follows. The inductive hypothesis
is that @cvtOpApp x op y@ is left-biased, provided @x@ is. It is clear that
this holds for both branches (of @cvtOpApp@), provided we assume it holds for
the recursive calls to @cvtOpApp@.
When we call @cvtOpApp@ from @cvtl@, the first argument will always be left-biased
since we have already run @cvtl@ on it.
-}
cvtOpApp :: LHsExpr RdrName -> TH.Exp -> TH.Exp -> CvtM (HsExpr RdrName)
cvtOpApp x op1 (UInfixE y op2 z)
= do { l <- wrapL $ cvtOpApp x op1 y
; cvtOpApp l op2 z }
cvtOpApp x op y
= do { op' <- cvtl op
; y' <- cvtl y
; return (OpApp x op' undefined y') }
-------------------------------------
-- Do notation and statements
-------------------------------------
cvtHsDo :: HsStmtContext Name.Name -> [TH.Stmt] -> CvtM (HsExpr RdrName)
cvtHsDo do_or_lc stmts
| null stmts = failWith (ptext (sLit "Empty stmt list in do-block"))
| otherwise
= do { stmts' <- cvtStmts stmts
; let Just (stmts'', last') = snocView stmts'
; last'' <- case last' of
L loc (BodyStmt body _ _ _) -> return (L loc (mkLastStmt body))
_ -> failWith (bad_last last')
; return $ HsDo do_or_lc (stmts'' ++ [last'']) placeHolderType }
where
bad_last stmt = vcat [ ptext (sLit "Illegal last statement of") <+> pprAStmtContext do_or_lc <> colon
, nest 2 $ Outputable.ppr stmt
, ptext (sLit "(It should be an expression.)") ]
cvtStmts :: [TH.Stmt] -> CvtM [Hs.LStmt RdrName (LHsExpr RdrName)]
cvtStmts = mapM cvtStmt
cvtStmt :: TH.Stmt -> CvtM (Hs.LStmt RdrName (LHsExpr RdrName))
cvtStmt (NoBindS e) = do { e' <- cvtl e; returnL $ mkBodyStmt e' }
cvtStmt (TH.BindS p e) = do { p' <- cvtPat p; e' <- cvtl e; returnL $ mkBindStmt p' e' }
cvtStmt (TH.LetS ds) = do { ds' <- cvtLocalDecs (ptext (sLit "a let binding")) ds
; returnL $ LetStmt ds' }
cvtStmt (TH.ParS dss) = do { dss' <- mapM cvt_one dss; returnL $ ParStmt dss' noSyntaxExpr noSyntaxExpr }
where
cvt_one ds = do { ds' <- cvtStmts ds; return (ParStmtBlock ds' undefined noSyntaxExpr) }
cvtMatch :: TH.Match -> CvtM (Hs.LMatch RdrName (LHsExpr RdrName))
cvtMatch (TH.Match p body decs)
= do { p' <- cvtPat p
; g' <- cvtGuard body
; decs' <- cvtLocalDecs (ptext (sLit "a where clause")) decs
; returnL $ Hs.Match Nothing [p'] Nothing (GRHSs g' decs') }
cvtGuard :: TH.Body -> CvtM [LGRHS RdrName (LHsExpr RdrName)]
cvtGuard (GuardedB pairs) = mapM cvtpair pairs
cvtGuard (NormalB e) = do { e' <- cvtl e; g' <- returnL $ GRHS [] e'; return [g'] }
cvtpair :: (TH.Guard, TH.Exp) -> CvtM (LGRHS RdrName (LHsExpr RdrName))
cvtpair (NormalG ge,rhs) = do { ge' <- cvtl ge; rhs' <- cvtl rhs
; g' <- returnL $ mkBodyStmt ge'
; returnL $ GRHS [g'] rhs' }
cvtpair (PatG gs,rhs) = do { gs' <- cvtStmts gs; rhs' <- cvtl rhs
; returnL $ GRHS gs' rhs' }
cvtOverLit :: Lit -> CvtM (HsOverLit RdrName)
cvtOverLit (IntegerL i)
= do { force i; return $ mkHsIntegral (show i) i placeHolderType}
cvtOverLit (RationalL r)
= do { force r; return $ mkHsFractional (cvtFractionalLit r) placeHolderType}
cvtOverLit (StringL s)
= do { let { s' = mkFastString s }
; force s'
; return $ mkHsIsString s s' placeHolderType
}
cvtOverLit _ = panic "Convert.cvtOverLit: Unexpected overloaded literal"
-- An Integer is like an (overloaded) '3' in a Haskell source program
-- Similarly 3.5 for fractionals
{- Note [Converting strings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If we get (ListE [CharL 'x', CharL 'y']) we'd like to convert to
a string literal for "xy". Of course, we might hope to get
(LitE (StringL "xy")), but not always, and allCharLs fails quickly
if it isn't a literal string
-}
allCharLs :: [TH.Exp] -> Maybe String
-- Note [Converting strings]
-- NB: only fire up this setup for a non-empty list, else
-- there's a danger of returning "" for [] :: [Int]!
allCharLs xs
= case xs of
LitE (CharL c) : ys -> go [c] ys
_ -> Nothing
where
go cs [] = Just (reverse cs)
go cs (LitE (CharL c) : ys) = go (c:cs) ys
go _ _ = Nothing
cvtLit :: Lit -> CvtM HsLit
cvtLit (IntPrimL i) = do { force i; return $ HsIntPrim (show i) i }
cvtLit (WordPrimL w) = do { force w; return $ HsWordPrim (show w) w }
cvtLit (FloatPrimL f) = do { force f; return $ HsFloatPrim (cvtFractionalLit f) }
cvtLit (DoublePrimL f) = do { force f; return $ HsDoublePrim (cvtFractionalLit f) }
cvtLit (CharL c) = do { force c; return $ HsChar (show c) c }
cvtLit (CharPrimL c) = do { force c; return $ HsCharPrim (show c) c }
cvtLit (StringL s) = do { let { s' = mkFastString s }
; force s'
; return $ HsString s s' }
cvtLit (StringPrimL s) = do { let { s' = BS.pack s }
; force s'
; return $ HsStringPrim (w8ToString s) s' }
cvtLit _ = panic "Convert.cvtLit: Unexpected literal"
-- cvtLit should not be called on IntegerL, RationalL
-- That precondition is established right here in
-- Convert.hs, hence panic
w8ToString :: [Word8] -> String
w8ToString ws = map (\w -> chr (fromIntegral w)) ws
cvtPats :: [TH.Pat] -> CvtM [Hs.LPat RdrName]
cvtPats pats = mapM cvtPat pats
cvtPat :: TH.Pat -> CvtM (Hs.LPat RdrName)
cvtPat pat = wrapL (cvtp pat)
cvtp :: TH.Pat -> CvtM (Hs.Pat RdrName)
cvtp (TH.LitP l)
| overloadedLit l = do { l' <- cvtOverLit l
; return (mkNPat (noLoc l') Nothing) }
-- Not right for negative patterns;
-- need to think about that!
| otherwise = do { l' <- cvtLit l; return $ Hs.LitPat l' }
cvtp (TH.VarP s) = do { s' <- vName s; return $ Hs.VarPat s' }
cvtp (TupP [p]) = do { p' <- cvtPat p; return $ ParPat p' } -- Note [Dropping constructors]
cvtp (TupP ps) = do { ps' <- cvtPats ps; return $ TuplePat ps' Boxed [] }
cvtp (UnboxedTupP ps) = do { ps' <- cvtPats ps; return $ TuplePat ps' Unboxed [] }
cvtp (ConP s ps) = do { s' <- cNameL s; ps' <- cvtPats ps
; return $ ConPatIn s' (PrefixCon ps') }
cvtp (InfixP p1 s p2) = do { s' <- cNameL s; p1' <- cvtPat p1; p2' <- cvtPat p2
; wrapParL ParPat $
ConPatIn s' (InfixCon (mkParPat p1') (mkParPat p2')) }
-- See Note [Operator association]
cvtp (UInfixP p1 s p2) = do { p1' <- cvtPat p1; cvtOpAppP p1' s p2 } -- Note [Converting UInfix]
cvtp (ParensP p) = do { p' <- cvtPat p; return $ ParPat p' }
cvtp (TildeP p) = do { p' <- cvtPat p; return $ LazyPat p' }
cvtp (BangP p) = do { p' <- cvtPat p; return $ BangPat p' }
cvtp (TH.AsP s p) = do { s' <- vNameL s; p' <- cvtPat p; return $ AsPat s' p' }
cvtp TH.WildP = return $ WildPat placeHolderType
cvtp (RecP c fs) = do { c' <- cNameL c; fs' <- mapM cvtPatFld fs
; return $ ConPatIn c'
$ Hs.RecCon (HsRecFields fs' Nothing) }
cvtp (ListP ps) = do { ps' <- cvtPats ps
; return $ ListPat ps' placeHolderType Nothing }
cvtp (SigP p t) = do { p' <- cvtPat p; t' <- cvtType t
; return $ SigPatIn p' (mkHsWithBndrs t') }
cvtp (ViewP e p) = do { e' <- cvtl e; p' <- cvtPat p
; return $ ViewPat e' p' placeHolderType }
cvtPatFld :: (TH.Name, TH.Pat) -> CvtM (LHsRecField RdrName (LPat RdrName))
cvtPatFld (s,p)
= do { s' <- vNameL s; p' <- cvtPat p
; return (noLoc $ HsRecField { hsRecFieldId = s', hsRecFieldArg = p'
, hsRecPun = False}) }
{- | @cvtOpAppP x op y@ converts @op@ and @y@ and produces the operator application @x `op` y@.
The produced tree of infix patterns will be left-biased, provided @x@ is.
See the @cvtOpApp@ documentation for how this function works.
-}
cvtOpAppP :: Hs.LPat RdrName -> TH.Name -> TH.Pat -> CvtM (Hs.Pat RdrName)
cvtOpAppP x op1 (UInfixP y op2 z)
= do { l <- wrapL $ cvtOpAppP x op1 y
; cvtOpAppP l op2 z }
cvtOpAppP x op y
= do { op' <- cNameL op
; y' <- cvtPat y
; return (ConPatIn op' (InfixCon x y')) }
-----------------------------------------------------------
-- Types and type variables
cvtTvs :: [TH.TyVarBndr] -> CvtM (LHsTyVarBndrs RdrName)
cvtTvs tvs = do { tvs' <- mapM cvt_tv tvs; return (mkHsQTvs tvs') }
cvt_tv :: TH.TyVarBndr -> CvtM (LHsTyVarBndr RdrName)
cvt_tv (TH.PlainTV nm)
= do { nm' <- tName nm
; returnL $ UserTyVar nm' }
cvt_tv (TH.KindedTV nm ki)
= do { nm' <- tName nm
; ki' <- cvtKind ki
; returnL $ KindedTyVar (noLoc nm') ki' }
cvtRole :: TH.Role -> Maybe Coercion.Role
cvtRole TH.NominalR = Just Coercion.Nominal
cvtRole TH.RepresentationalR = Just Coercion.Representational
cvtRole TH.PhantomR = Just Coercion.Phantom
cvtRole TH.InferR = Nothing
cvtContext :: TH.Cxt -> CvtM (LHsContext RdrName)
cvtContext tys = do { preds' <- mapM cvtPred tys; returnL preds' }
cvtPred :: TH.Pred -> CvtM (LHsType RdrName)
cvtPred = cvtType
cvtType :: TH.Type -> CvtM (LHsType RdrName)
cvtType = cvtTypeKind "type"
cvtTypeKind :: String -> TH.Type -> CvtM (LHsType RdrName)
cvtTypeKind ty_str ty
= do { (head_ty, tys') <- split_ty_app ty
; case head_ty of
TupleT n
| length tys' == n -- Saturated
-> if n==1 then return (head tys') -- Singleton tuples treated
-- like nothing (ie just parens)
else returnL (HsTupleTy HsBoxedOrConstraintTuple tys')
| n == 1
-> failWith (ptext (sLit ("Illegal 1-tuple " ++ ty_str ++ " constructor")))
| otherwise
-> mk_apps (HsTyVar (getRdrName (tupleTyCon Boxed n))) tys'
UnboxedTupleT n
| length tys' == n -- Saturated
-> if n==1 then return (head tys') -- Singleton tuples treated
-- like nothing (ie just parens)
else returnL (HsTupleTy HsUnboxedTuple tys')
| otherwise
-> mk_apps (HsTyVar (getRdrName (tupleTyCon Unboxed n))) tys'
ArrowT
| [x',y'] <- tys' -> returnL (HsFunTy x' y')
| otherwise -> mk_apps (HsTyVar (getRdrName funTyCon)) tys'
ListT
| [x'] <- tys' -> returnL (HsListTy x')
| otherwise -> mk_apps (HsTyVar (getRdrName listTyCon)) tys'
VarT nm -> do { nm' <- tName nm; mk_apps (HsTyVar nm') tys' }
ConT nm -> do { nm' <- tconName nm; mk_apps (HsTyVar nm') tys' }
ForallT tvs cxt ty
| null tys'
-> do { tvs' <- cvtTvs tvs
; cxt' <- cvtContext cxt
; ty' <- cvtType ty
; returnL $ mkExplicitHsForAllTy (hsQTvBndrs tvs') cxt' ty'
}
SigT ty ki
-> do { ty' <- cvtType ty
; ki' <- cvtKind ki
; mk_apps (HsKindSig ty' ki') tys'
}
LitT lit
-> returnL (HsTyLit (cvtTyLit lit))
WildCardT Nothing
-> mk_apps mkAnonWildCardTy tys'
WildCardT (Just nm)
-> do { nm' <- tName nm; mk_apps (mkNamedWildCardTy nm') tys' }
InfixT t1 s t2
-> do { s' <- tconName s
; t1' <- cvtType t1
; t2' <- cvtType t2
; mk_apps (HsTyVar s') [t1', t2']
}
UInfixT t1 s t2
-> do { t2' <- cvtType t2
; cvtOpAppT t1 s t2'
} -- Note [Converting UInfix]
ParensT t
-> do { t' <- cvtType t
; returnL $ HsParTy t'
}
PromotedT nm -> do { nm' <- cName nm; mk_apps (HsTyVar nm') tys' }
-- Promoted data constructor; hence cName
PromotedTupleT n
| n == 1
-> failWith (ptext (sLit ("Illegal promoted 1-tuple " ++ ty_str)))
| m == n -- Saturated
-> do { let kis = replicate m placeHolderKind
; returnL (HsExplicitTupleTy kis tys')
}
where
m = length tys'
PromotedNilT
-> returnL (HsExplicitListTy placeHolderKind [])
PromotedConsT -- See Note [Representing concrete syntax in types]
-- in Language.Haskell.TH.Syntax
| [ty1, L _ (HsExplicitListTy _ tys2)] <- tys'
-> returnL (HsExplicitListTy placeHolderKind (ty1:tys2))
| otherwise
-> mk_apps (HsTyVar (getRdrName consDataCon)) tys'
StarT
-> returnL (HsTyVar (getRdrName liftedTypeKindTyCon))
ConstraintT
-> returnL (HsTyVar (getRdrName constraintKindTyCon))
EqualityT
| [x',y'] <- tys' -> returnL (HsEqTy x' y')
| otherwise -> mk_apps (HsTyVar (getRdrName eqPrimTyCon)) tys'
_ -> failWith (ptext (sLit ("Malformed " ++ ty_str)) <+> text (show ty))
}
mk_apps :: HsType RdrName -> [LHsType RdrName] -> CvtM (LHsType RdrName)
mk_apps head_ty [] = returnL head_ty
mk_apps head_ty (ty:tys) = do { head_ty' <- returnL head_ty
; mk_apps (HsAppTy head_ty' ty) tys }
split_ty_app :: TH.Type -> CvtM (TH.Type, [LHsType RdrName])
split_ty_app ty = go ty []
where
go (AppT f a) as' = do { a' <- cvtType a; go f (a':as') }
go f as = return (f,as)
cvtTyLit :: TH.TyLit -> HsTyLit
cvtTyLit (NumTyLit i) = HsNumTy (show i) i
cvtTyLit (StrTyLit s) = HsStrTy s (fsLit s)
{- | @cvtOpAppT x op y@ converts @op@ and @y@ and produces the operator
application @x `op` y@. The produced tree of infix types will be right-biased,
provided @y@ is.
See the @cvtOpApp@ documentation for how this function works.
-}
cvtOpAppT :: TH.Type -> TH.Name -> LHsType RdrName -> CvtM (LHsType RdrName)
cvtOpAppT (UInfixT x op2 y) op1 z
= do { l <- cvtOpAppT y op1 z
; cvtOpAppT x op2 l }
cvtOpAppT x op y
= do { op' <- tconNameL op
; x' <- cvtType x
; returnL (mkHsOpTy x' op' y) }
cvtKind :: TH.Kind -> CvtM (LHsKind RdrName)
cvtKind = cvtTypeKind "kind"
-- | Convert Maybe Kind to a type family result signature. Used with data
-- families where naming of the result is not possible (thus only kind or no
-- signature is possible).
cvtMaybeKindToFamilyResultSig :: Maybe TH.Kind
-> CvtM (LFamilyResultSig RdrName)
cvtMaybeKindToFamilyResultSig Nothing = returnL Hs.NoSig
cvtMaybeKindToFamilyResultSig (Just ki) = do { ki' <- cvtKind ki
; returnL (Hs.KindSig ki') }
-- | Convert type family result signature. Used with both open and closed type
-- families.
cvtFamilyResultSig :: TH.FamilyResultSig -> CvtM (Hs.LFamilyResultSig RdrName)
cvtFamilyResultSig TH.NoSig = returnL Hs.NoSig
cvtFamilyResultSig (TH.KindSig ki) = do { ki' <- cvtKind ki
; returnL (Hs.KindSig ki') }
cvtFamilyResultSig (TH.TyVarSig bndr) = do { tv <- cvt_tv bndr
; returnL (Hs.TyVarSig tv) }
-- | Convert injectivity annotation of a type family.
cvtInjectivityAnnotation :: TH.InjectivityAnn
-> CvtM (Hs.LInjectivityAnn RdrName)
cvtInjectivityAnnotation (TH.InjectivityAnn annLHS annRHS)
= do { annLHS' <- tNameL annLHS
; annRHS' <- mapM tNameL annRHS
; returnL (Hs.InjectivityAnn annLHS' annRHS') }
-----------------------------------------------------------
cvtFixity :: TH.Fixity -> Hs.Fixity
cvtFixity (TH.Fixity prec dir) = Hs.Fixity prec (cvt_dir dir)
where
cvt_dir TH.InfixL = Hs.InfixL
cvt_dir TH.InfixR = Hs.InfixR
cvt_dir TH.InfixN = Hs.InfixN
-----------------------------------------------------------
-----------------------------------------------------------
-- some useful things
overloadedLit :: Lit -> Bool
-- True for literals that Haskell treats as overloaded
overloadedLit (IntegerL _) = True
overloadedLit (RationalL _) = True
overloadedLit _ = False
cvtFractionalLit :: Rational -> FractionalLit
cvtFractionalLit r = FL { fl_text = show (fromRational r :: Double), fl_value = r }
--------------------------------------------------------------------
-- Turning Name back into RdrName
--------------------------------------------------------------------
-- variable names
vNameL, cNameL, vcNameL, tNameL, tconNameL :: TH.Name -> CvtM (Located RdrName)
vName, cName, vcName, tName, tconName :: TH.Name -> CvtM RdrName
-- Variable names
vNameL n = wrapL (vName n)
vName n = cvtName OccName.varName n
-- Constructor function names; this is Haskell source, hence srcDataName
cNameL n = wrapL (cName n)
cName n = cvtName OccName.dataName n
-- Variable *or* constructor names; check by looking at the first char
vcNameL n = wrapL (vcName n)
vcName n = if isVarName n then vName n else cName n
-- Type variable names
tNameL n = wrapL (tName n)
tName n = cvtName OccName.tvName n
-- Type Constructor names
tconNameL n = wrapL (tconName n)
tconName n = cvtName OccName.tcClsName n
cvtName :: OccName.NameSpace -> TH.Name -> CvtM RdrName
cvtName ctxt_ns (TH.Name occ flavour)
| not (okOcc ctxt_ns occ_str) = failWith (badOcc ctxt_ns occ_str)
| otherwise
= do { loc <- getL
; let rdr_name = thRdrName loc ctxt_ns occ_str flavour
; force rdr_name
; return rdr_name }
where
occ_str = TH.occString occ
okOcc :: OccName.NameSpace -> String -> Bool
okOcc ns str
| OccName.isVarNameSpace ns = okVarOcc str
| OccName.isDataConNameSpace ns = okConOcc str
| otherwise = okTcOcc str
-- Determine the name space of a name in a type
--
isVarName :: TH.Name -> Bool
isVarName (TH.Name occ _)
= case TH.occString occ of
"" -> False
(c:_) -> startsVarId c || startsVarSym c
badOcc :: OccName.NameSpace -> String -> SDoc
badOcc ctxt_ns occ
= ptext (sLit "Illegal") <+> pprNameSpace ctxt_ns
<+> ptext (sLit "name:") <+> quotes (text occ)
thRdrName :: SrcSpan -> OccName.NameSpace -> String -> TH.NameFlavour -> RdrName
-- This turns a TH Name into a RdrName; used for both binders and occurrences
-- See Note [Binders in Template Haskell]
-- The passed-in name space tells what the context is expecting;
-- use it unless the TH name knows what name-space it comes
-- from, in which case use the latter
--
-- We pass in a SrcSpan (gotten from the monad) because this function
-- is used for *binders* and if we make an Exact Name we want it
-- to have a binding site inside it. (cf Trac #5434)
--
-- ToDo: we may generate silly RdrNames, by passing a name space
-- that doesn't match the string, like VarName ":+",
-- which will give confusing error messages later
--
-- The strict applications ensure that any buried exceptions get forced
thRdrName loc ctxt_ns th_occ th_name
= case th_name of
TH.NameG th_ns pkg mod -> thOrigRdrName th_occ th_ns pkg mod
TH.NameQ mod -> (mkRdrQual $! mk_mod mod) $! occ
TH.NameL uniq -> nameRdrName $! (((Name.mkInternalName $! mk_uniq uniq) $! occ) loc)
TH.NameU uniq -> nameRdrName $! (((Name.mkSystemNameAt $! mk_uniq uniq) $! occ) loc)
TH.NameS | Just name <- isBuiltInOcc_maybe occ -> nameRdrName $! name
| otherwise -> mkRdrUnqual $! occ
-- We check for built-in syntax here, because the TH
-- user might have written a (NameS "(,,)"), for example
where
occ :: OccName.OccName
occ = mk_occ ctxt_ns th_occ
thOrigRdrName :: String -> TH.NameSpace -> PkgName -> ModName -> RdrName
thOrigRdrName occ th_ns pkg mod = (mkOrig $! (mkModule (mk_pkg pkg) (mk_mod mod))) $! (mk_occ (mk_ghc_ns th_ns) occ)
thRdrNameGuesses :: TH.Name -> [RdrName]
thRdrNameGuesses (TH.Name occ flavour)
-- This special case for NameG ensures that we don't generate duplicates in the output list
| TH.NameG th_ns pkg mod <- flavour = [ thOrigRdrName occ_str th_ns pkg mod]
| otherwise = [ thRdrName noSrcSpan gns occ_str flavour
| gns <- guessed_nss]
where
-- guessed_ns are the name spaces guessed from looking at the TH name
guessed_nss | isLexCon (mkFastString occ_str) = [OccName.tcName, OccName.dataName]
| otherwise = [OccName.varName, OccName.tvName]
occ_str = TH.occString occ
-- The packing and unpacking is rather turgid :-(
mk_occ :: OccName.NameSpace -> String -> OccName.OccName
mk_occ ns occ = OccName.mkOccName ns occ
mk_ghc_ns :: TH.NameSpace -> OccName.NameSpace
mk_ghc_ns TH.DataName = OccName.dataName
mk_ghc_ns TH.TcClsName = OccName.tcClsName
mk_ghc_ns TH.VarName = OccName.varName
mk_mod :: TH.ModName -> ModuleName
mk_mod mod = mkModuleName (TH.modString mod)
mk_pkg :: TH.PkgName -> PackageKey
mk_pkg pkg = stringToPackageKey (TH.pkgString pkg)
mk_uniq :: Int -> Unique
mk_uniq u = mkUniqueGrimily u
{-
Note [Binders in Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this TH term construction:
do { x1 <- TH.newName "x" -- newName :: String -> Q TH.Name
; x2 <- TH.newName "x" -- Builds a NameU
; x3 <- TH.newName "x"
; let x = mkName "x" -- mkName :: String -> TH.Name
-- Builds a NameS
; return (LamE (..pattern [x1,x2]..) $
LamE (VarPat x3) $
..tuple (x1,x2,x3,x)) }
It represents the term \[x1,x2]. \x3. (x1,x2,x3,x)
a) We don't want to complain about "x" being bound twice in
the pattern [x1,x2]
b) We don't want x3 to shadow the x1,x2
c) We *do* want 'x' (dynamically bound with mkName) to bind
to the innermost binding of "x", namely x3.
d) When pretty printing, we want to print a unique with x1,x2
etc, else they'll all print as "x" which isn't very helpful
When we convert all this to HsSyn, the TH.Names are converted with
thRdrName. To achieve (b) we want the binders to be Exact RdrNames.
Achieving (a) is a bit awkward, because
- We must check for duplicate and shadowed names on Names,
not RdrNames, *after* renaming.
See Note [Collect binders only after renaming] in HsUtils
- But to achieve (a) we must distinguish between the Exact
RdrNames arising from TH and the Unqual RdrNames that would
come from a user writing \[x,x] -> blah
So in Convert.thRdrName we translate
TH Name RdrName
--------------------------------------------------------
NameU (arising from newName) --> Exact (Name{ System })
NameS (arising from mkName) --> Unqual
Notice that the NameUs generate *System* Names. Then, when
figuring out shadowing and duplicates, we can filter out
System Names.
This use of System Names fits with other uses of System Names, eg for
temporary variables "a". Since there are lots of things called "a" we
usually want to print the name with the unique, and that is indeed
the way System Names are printed.
There's a small complication of course; see Note [Looking up Exact
RdrNames] in RnEnv.
-}
| acowley/ghc | compiler/hsSyn/Convert.hs | bsd-3-clause | 55,913 | 1 | 18 | 17,653 | 16,071 | 8,085 | 7,986 | 884 | 28 |
-- Check that "->" is an instance of Eval
module ShouldSucceed where
instance Show (a->b)
instance (Eq b) => Eq (a -> b) where
(==) f g = error "attempt to compare functions"
-- Since Eval is a superclass of Num this fails
-- unless -> is an instance of Eval
instance (Num b) => Num (a -> b) where
f + g = \a -> f a + g a
f - g = \a -> f a - g a
f * g = \a -> f a * g a
negate f = \a -> negate (f a)
abs f = \a -> abs (f a)
signum f = \a -> signum (f a)
fromInteger n = \a -> fromInteger n
| hvr/jhc | regress/tests/1_typecheck/2_pass/ghc/tc088.hs | mit | 659 | 0 | 9 | 293 | 244 | 124 | 120 | 12 | 0 |
module Deriving where
data A = A1 A | A2 B deriving (Eq)
data B = B1 Int | B2 A deriving (Eq)
--s = show (B2 (A2 (B1 5)))
data T1 a b c = C1 a (T2 a b c) deriving (Eq,Show)
data T2 a b c = C2 (T3 a b c) | C2b (N b) deriving (Eq,Show)
data T3 a b c = C3 (N c) (T1 a c b) deriving (Eq,Show)
data N b = N b deriving Show
instance Eq (N b) where _==_=True
| forste/haReFork | tools/base/tests/Derivings.hs | bsd-3-clause | 360 | 0 | 8 | 98 | 205 | 116 | 89 | 8 | 0 |
{-# LANGUAGE Trustworthy #-}
-----------------------------------------------------------------------------
-- |
-- Module : Data.Array.ST.Safe
-- Copyright : (c) The University of Glasgow 2011
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : non-portable (uses Data.Array.MArray)
--
-- Mutable boxed and unboxed arrays in the 'Control.Monad.ST.ST' monad.
--
-- Safe API only of "Data.Array.ST".
--
-----------------------------------------------------------------------------
module Data.Array.ST.Safe (
-- * Boxed arrays
STArray, -- instance of: Eq, MArray
runSTArray,
-- * Unboxed arrays
STUArray, -- instance of: Eq, MArray
runSTUArray,
-- * Overloaded mutable array interface
module Data.Array.MArray.Safe,
) where
import Data.Array.ST
import Data.Array.MArray.Safe
| beni55/haste-compiler | libraries/ghc-7.8/array/Data/Array/ST/Safe.hs | bsd-3-clause | 941 | 0 | 5 | 175 | 65 | 52 | 13 | 9 | 0 |
-- Test trying to use a function bound in the list comprehension as the transform function
{-# OPTIONS_GHC -XRankNTypes -XTransformListComp #-}
module RnFail048 where
functions :: [forall a. [a] -> [a]]
functions = [take 4, take 5]
output = [() | f <- functions, then f]
| forked-upstream-packages-for-ghcjs/ghc | testsuite/tests/rename/should_fail/rnfail050.hs | bsd-3-clause | 277 | 0 | 8 | 52 | 70 | 41 | 29 | -1 | -1 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE PolyKinds #-}
module PolyKinds03 where
data Proxy t
data TypeRep = TypeRep
class MyTypeable t where
myTypeOf :: Proxy t -> TypeRep
instance MyTypeable Int where myTypeOf _ = TypeRep
instance MyTypeable [] where myTypeOf _ = TypeRep
| urbanslug/ghc | testsuite/tests/polykinds/PolyKinds03.hs | bsd-3-clause | 315 | 0 | 8 | 79 | 72 | 39 | 33 | -1 | -1 |
module Y2016.M06.D29.Exercise where
import Control.Scan.CSV
{--
Okay, today's problem is a tough one! What we have to do is to read in a file
with the following format:
ID|Count|Some other stupid number|Code|Description
And then write out the exact file we read as input, but instead named as:
ABC-VALID-97.txt
or as
XYZ-INVALID-22.txt
based on the Code value.
The Code values are G, B, I, W, D
For the values "G" and "I" write the output as ABC-VALID-97.txt, for the other
values write out the file as "XYZ-INVALID-22.txt"
... now I suppose you can run this in GHCI or some other Haskell interpreter (as
opposed to yesterday's exercise where you had to access the program from the
shell). But, either way, the input to the program is the input file name, and
the output of this exercise will be a copy of the file named as per above.
Have at it!
--}
dispatcher :: FilePath -> IO ()
dispatcher = undefined
-- hint: scanning the input file is easy using the above import.
-- With dispatcher defined, apply "goodie.txt"; dispatcher should created
-- an ABC-VALID file. Now apply "baddie.txt" (both in this repository),
-- dispatcher should create an XYZ-INVALID file.
-- goodie.txt URL:
-- https://raw.githubusercontent.com/geophf/1HaskellADay/master/exercises/HAD/Y2016/M06/D29/goodie.txt
-- baddie.txt URL:
-- https://raw.githubusercontent.com/geophf/1HaskellADay/master/exercises/HAD/Y2016/M06/D29/baddie.txt
| geophf/1HaskellADay | exercises/HAD/Y2016/M06/D29/Exercise.hs | mit | 1,429 | 0 | 7 | 227 | 42 | 29 | 13 | 4 | 1 |
{-# Language RecordWildCards #-}
{-# LANGUAGE OverloadedStrings #-}
module QC () where
import SymVector
import SymMap
import SymVerify
import SymBoilerPlate
import Test.QuickCheck
import Test.QuickCheck.Monadic
import Data.Aeson
import Data.Aeson.Encode.Pretty
import Control.Monad
import Data.ByteString.Lazy.Char8 as C (putStrLn)
main :: IO ()
main = quickCheck prop_runState
prop_runState :: State -> [Pid_pre] -> Property
prop_runState s plist = monadicIO $ do
let l = runState s emptyVec emptyVec plist []
if null l
then return ()
else run (log_states l)
assert True
log_states :: [State] -> IO ()
log_states l = forM_ l (C.putStrLn . encodePretty . toJSON)
instance (Arbitrary a) => Arbitrary (Val a) where
arbitrary = oneof [ return VUnit
, return VUnInit
, VInt <$> arbitrary
, VString <$> arbitrary
, VPid <$> arbitrary
, VInL <$> arbitrary
, VInR <$> arbitrary
, VPair <$> arbitrary <*> arbitrary ]
instance (Arbitrary a) => Arbitrary (Vec a) where
arbitrary = do a <- arbitrary
return $ mkVec a
instance Arbitrary Pid_pre where
arbitrary = elements [PIDR0, PIDR1]
instance Arbitrary State where
arbitrary
= State <$> return 0 <*> return 0 <*> return 0 <*> return 0 <*>
return 0
<*> return 0
<*> arbitrary
-- data State = State{pidR0Pc :: Int, pidR1Pc :: Int,
-- pidR0PtrR0 :: Int, pidR1PtrR0 :: Int, pidR0PtrW0 :: Int,
-- pidR1PtrW0 :: Int, x_3 :: Val Pid_pre}
-- deriving Show
instance FromJSON State where
parseJSON (Object s) = State <$>
s .: "pidR0Pc" <*>
s .: "pidR1Pc" <*>
s .: "pidR0PtrR0" <*>
s .: "pidR1PtrR0" <*>
s .: "pidR0PtrW0" <*>
s .: "pidR1PtrW0" <*>
s .: "x_3"
parseJSON _ = mzero
instance ToJSON State where
toJSON s@State{..} = object [ "pidR0Pc" .= pidR0Pc
, "pidR1Pc" .= pidR1Pc
, "pidR0PtrR0" .= pidR0PtrR0
, "pidR1PtrR0" .= pidR1PtrR0
, "pidR0PtrW0" .= pidR0PtrW0
, "pidR1PtrW0" .= pidR1PtrW0
, "x_3" .= x_3 ]
-- data Pid_pre = PIDR0
-- | PIDR1
-- deriving Show
instance FromJSON Pid_pre where
parseJSON (String s)
| s == "PIDR0" = return PIDR0
| s == "PIDR1" = return PIDR1
parseJSON _ = mzero
instance ToJSON Pid_pre where
toJSON PIDR0 = String "PIDR0"
toJSON PIDR1 = String "PIDR1"
-- data Pid_pre p1 = PIDR0
-- | PIDR2 p1
-- deriving Show
instance FromJSON p1 => FromJSON (Pid_pre p1) where
parseJSON (Object s)
= case H.toList s of
[(key,value)] | key == "PIDR0" -> return PIDR0
| key == "PIDR2" -> PIDR2 <$> parseJSON value
parseJSON _ = mzero
| abakst/symmetry | checker/tests/qc-log-test/QC.hs | mit | 3,256 | 0 | 19 | 1,273 | 787 | 408 | 379 | 76 | 2 |
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Network.Test.Common
( -- * Client server configuration
ClientServer(..)
, setClientAction
, setServerAction
, tcp
, unix
, unixWithUnlink
, udp
, withPort
-- * Run a ClientServer configuration
, test
, tcpTest
, udpTest
-- * Common constants
, serverAddr
, serverAddr6
, unixAddr
, testMsg
, lazyTestMsg
) where
import Control.Concurrent (ThreadId, forkIO, myThreadId)
import Control.Concurrent.MVar (MVar, newEmptyMVar, putMVar, takeMVar, readMVar)
import qualified Control.Exception as E
import Control.Monad
import Data.ByteString (ByteString)
import Network.Socket
import System.Directory
import qualified Data.ByteString.Lazy as L
import System.Timeout (timeout)
import Test.Hspec
serverAddr :: String
serverAddr = "127.0.0.1"
serverAddr6 :: String
serverAddr6 = "::1"
testMsg :: ByteString
testMsg = "This is a test message."
lazyTestMsg :: L.ByteString
lazyTestMsg = L.fromStrict "This is a test message."
unixAddr :: String
unixAddr = "/tmp/network-test"
-- | Establish a connection between client and server and then run
-- 'clientAct' and 'serverAct', in different threads. Both actions
-- get passed a connected 'Socket', used for communicating between
-- client and server. 'unixTest' makes sure that the 'Socket' is
-- closed after the actions have run.
unixWithUnlink :: String -> ((Socket, SockAddr) -> IO b) -> (ClientServer Socket b)
unixWithUnlink address = unix address unlink
where
unlink file = do
exist <- doesFileExist file
when exist $ removeFile file
unix
:: String -- ^ address
-> (String -> IO ()) -- ^ clean up action
-> ((Socket, SockAddr) -> IO b) -- ^ server action
-> (ClientServer Socket b)
unix address cleanupAct serverAct = defaultClientServer
{ clientSetup = do
sock <- socket AF_UNIX Stream defaultProtocol
connect sock (SockAddrUnix address)
return sock
, serverSetup = do
sock <- socket AF_UNIX Stream defaultProtocol
cleanupAct address -- just in case
bind sock (SockAddrUnix address)
listen sock 1
return sock
, serverAction = \sock ->
E.bracket (accept sock) (killClientSock . fst) serverAct
}
where
killClientSock sock = do
shutdown sock ShutdownBoth
close sock
cleanupAct address
-- | Establish a connection between client and server and then run
-- 'clientAct' and 'serverAct', in different threads. Both actions
-- get passed a connected 'Socket', used for communicating between
-- client and server. 'tcpTest' makes sure that the 'Socket' is
-- closed after the actions have run.
tcpTest :: (Socket -> IO a) -> (Socket -> IO b) -> IO ()
tcpTest client server = withPort $ test . setClientAction client . tcp server
tcp :: (Socket -> IO b) -> MVar PortNumber -> ClientServer Socket ()
tcp serverAct portVar = defaultClientServer
{ clientSetup = do
let hints = defaultHints { addrSocketType = Stream }
serverPort <- readMVar portVar
addr:_ <- getAddrInfo (Just hints) (Just serverAddr) (Just $ show serverPort)
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
#if !defined(mingw32_HOST_OS)
withFdSocket sock $ \fd -> do
getNonBlock fd `shouldReturn` True
getCloseOnExec fd `shouldReturn` False
#endif
connect sock $ addrAddress addr
return sock
, serverSetup = do
let hints = defaultHints {
addrFlags = [AI_PASSIVE]
, addrSocketType = Stream
}
addr:_ <- getAddrInfo (Just hints) (Just serverAddr) Nothing
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
withFdSocket sock $ \fd -> do
#if !defined(mingw32_HOST_OS)
getNonBlock fd `shouldReturn` True
getCloseOnExec fd `shouldReturn` False
#endif
setSocketOption sock ReuseAddr 1
setCloseOnExecIfNeeded fd
#if !defined(mingw32_HOST_OS)
getCloseOnExec fd `shouldReturn` True
#endif
bind sock $ addrAddress addr
listen sock 1
serverPort <- socketPort sock
putMVar portVar serverPort
return sock
, serverAction = \sock -> do
(clientSock, _) <- accept sock
#if !defined(mingw32_HOST_OS)
withFdSocket sock $ \fd -> do
getNonBlock fd `shouldReturn` True
getCloseOnExec fd `shouldReturn` True
#endif
_ <- serverAct clientSock
close clientSock
}
-- | Create an unconnected 'Socket' for sending UDP and receiving
-- datagrams and then run 'clientAct' and 'serverAct'.
udpTest :: (Socket -> PortNumber -> IO a) -> (Socket -> IO b) -> IO ()
udpTest client server =
withPort $ test . setServerAction server . udp client
udp
:: (Socket -> PortNumber -> IO a)
-> MVar PortNumber
-> ClientServer a Socket
udp clientAct portVar = defaultClientServer
{ clientSetup = socket AF_INET Datagram defaultProtocol
, clientAction = \sock -> do
serverPort <- readMVar portVar
clientAct sock serverPort
, serverSetup = do
let hints = defaultHints {
addrFlags = [AI_PASSIVE]
, addrSocketType = Datagram
}
addr:_ <- getAddrInfo (Just hints) (Just serverAddr) Nothing
sock <- socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
setSocketOption sock ReuseAddr 1
bind sock $ addrAddress addr
serverPort <- socketPort sock
putMVar portVar serverPort
return sock
}
data ClientServer a b
= ClientServer
{ clientSetup :: IO Socket
, clientAction :: Socket -> IO a
, serverSetup :: IO Socket
, serverAction :: Socket -> IO b
}
setClientAction
:: (Socket -> IO b)
-> ClientServer a c
-> ClientServer b c
setClientAction f c = c { clientAction = f }
setServerAction
:: (Socket -> IO c)
-> ClientServer a b
-> ClientServer a c
setServerAction f c = c { serverAction = f }
defaultClientServer :: ClientServer Socket Socket
defaultClientServer = ClientServer
{ clientSetup =
E.throwIO $ userError "no client setup defined"
, clientAction = return
, serverSetup = E.throwIO $ userError "no server setup defined"
, serverAction = return
}
-- | Run a client/server pair and synchronize them so that the server
-- is started before the client and the specified server action is
-- finished before the client closes the 'Socket'.
test :: ClientServer a b -> IO ()
test conf = do
tid <- myThreadId
barrier <- newEmptyMVar
_ <- forkIO $ server tid barrier
client tid barrier
where
server tid barrier =
bracketWithReraise tid (serverSetup conf) close $ \sock -> do
serverReady
Just _ <- timeout 1000000 $ (serverAction conf) sock
putMVar barrier ()
where
-- | Signal to the client that it can proceed.
serverReady = putMVar barrier ()
client tid barrier = do
takeMVar barrier
-- Transfer exceptions to the main thread.
bracketWithReraise tid (clientSetup conf) close $ \res -> do
Just _ <- timeout 1000000 $ (clientAction conf) res
takeMVar barrier
withPort :: (MVar PortNumber -> IO a) -> IO a
withPort f = f =<< newEmptyMVar
-- | Like 'bracket' but catches and reraises the exception in another
-- thread, specified by the first argument.
bracketWithReraise :: ThreadId -> IO a -> (a -> IO b) -> (a -> IO ()) -> IO ()
bracketWithReraise tid setup teardown thing =
E.bracket setup teardown thing
`E.catch` \ (e :: E.SomeException) -> E.throwTo tid e
| CloudI/CloudI | src/api/haskell/external/network-3.1.0.1/tests/Network/Test/Common.hs | mit | 7,755 | 0 | 17 | 1,954 | 2,001 | 1,017 | 984 | 175 | 1 |
{-# LANGUAGE DeriveDataTypeable, TemplateHaskell #-}
import Hypervisor.DomainInfo
import Hypervisor.XenStore
import Hypervisor.Debug
import Data.List
import Data.Maybe
import qualified Data.IntMap.Strict as M
import Control.Monad
import Control.Applicative
import Control.Distributed.Process
import Control.Distributed.Process.Node
import Control.Distributed.Process.Closure
import Control.Distributed.Process.Serializable
import Network.Transport.IVC
import Data.Binary (encode, decode)
import Data.Typeable
data Mapper a b = Mapper { unMapper :: a -> b }
deriving Typeable
data Folder b a = Folder { unFolder :: b -> a -> b }
deriving Typeable
exec :: ([Float],
Closure (Mapper Float (Float, Int)),
Closure (Folder (M.IntMap (Float, Int)) (Float, Int)),
M.IntMap (Float, Int),
ProcessId) -> Process ()
exec (xs, f, g, init, master) = do
f' <- unMapper <$> unClosure f
g' <- unFolder <$> unClosure g
send master (foldl' g' init (f' <$> xs))
f :: [Float] -> Mapper Float (Float, Int)
f means = Mapper (\x ->
minimumBy (\(x1, idx1) (x2, idx2) -> compare (abs (x1 - means !! idx1))
(abs (x2 - means !! idx2)))
(zip (cycle [x]) [0..length means - 1]))
g :: Folder (M.IntMap (Float, Int)) (Float, Int)
g = Folder (\m (x, idx) -> M.adjust (\(sum, cnt) -> (sum + x, cnt + 1)) idx m)
-- Reducer
h :: M.IntMap (Float, Int) -> M.IntMap (Float, Int) -> M.IntMap (Float, Int)
h = M.unionWith (\(sum1, cnt1) (sum2, cnt2) -> (sum1 + sum2, cnt1 + cnt2))
$(remotable ['exec, 'f, 'g])
data RDD = Raw [Float]
| Composed (Closure (Mapper Float (Float, Int))) RDD
deriving (Show)
parallelize :: [Float] -> RDD
parallelize xs = Raw xs
($$) :: Closure (Mapper Float (Float, Int)) -> RDD -> RDD
f $$ rdd = Composed f rdd
infixr 4 $$
fold :: [NodeId] -> RDD
-> Closure (Folder (M.IntMap (Float, Int)) (Float, Int))
-> (M.IntMap (Float, Int) -> M.IntMap (Float, Int) -> M.IntMap (Float, Int))
-> M.IntMap (Float, Int)
-> Process (M.IntMap (Float, Int))
fold workers (Composed f (Raw xs)) g h init = do
us <- getSelfPid
g' <- unFolder <$> unClosure g
cpids <- forM (zip workers [0..numOfWorkers-1]) $ \(worker, i) -> do
cpid <- spawn worker ($(mkClosure 'exec) (take slice (drop (i * slice) xs),
f, g, init, us))
-- monitor cpid
return cpid
let go :: (M.IntMap (Float, Int)) -> Int
-> [(ProcessId, Int)] -> Int -> Process (M.IntMap (Float, Int))
go partial finished map nworkerIdx | finished == numOfWorkers = return $ partial
| otherwise = do
(partial', finished', map', nworker') <- receiveWait [
match (\result ->
return (h partial result, finished + 1, map, nworkerIdx)),
matchIf (\(ProcessMonitorNotification _ _ reason) -> reason /= DiedNormal)
(\(ProcessMonitorNotification _ pid reason) -> do
liftIO . writeDebugConsole $ show reason ++ "\n"
let Just i = lookup pid map
nworker = workers !! nworkerIdx
cpid <- spawn nworker ($(mkClosure 'exec)
(take slice (drop (i * slice) xs), f, g, us))
monitor cpid
return (partial, finished, (cpid, i) : map,
(nworkerIdx + 1) `mod` numOfWorkers))]
go partial' finished' map' nworker'
go init 0 (zip cpids [0..numOfWorkers-1]) 0
where
numOfWorkers = length workers
slice = (length xs - 1) `div` numOfWorkers + 1
driver :: [NodeId] -> Process ()
driver workers = do
void $ go 0 ([0, 1] :: [Float])
where
go :: Int -> [Float] -> Process [Float]
go n means | n == 5 = return means
| otherwise = do
let rdd' = ($(mkClosure 'f) means) $$ rdd
result <- fold workers rdd' $(mkStaticClosure 'g) h
(M.fromList [(0, (0, 0)), (1, (0, 0))])
let means' = (\(_, (sum, cnt)) -> sum / fromIntegral cnt) <$> M.toList result
liftIO . writeDebugConsole $
"means at iteration " ++ show n ++ " : " ++ show means' ++ "\n"
go (n + 1) means'
rdd = parallelize [0..9]
initialProcess :: XenStore -> Int -> Int -> Process ()
initialProcess xs 0 num = do
workers <- liftIO $ do
keys <- waitForKeys xs "/process" (num-1) -- should be changed to /workers
forM keys $ \key -> decode . read <$> xsRead xs ("/process/" ++ key)
driver workers
initialProcess xs index num = do
us <- processNodeId <$> getSelfPid
liftIO $ xsWrite xs ("/process/worker" ++ show index) (show (encode us))
receiveWait []
main :: IO ()
main = do
xs <- initXenStore
Right transport <- createTransport xs
doms <- sort <$> waitForDoms xs 3
me <- xsGetDomId xs
let Just index = elemIndex me doms
node <- newLocalNode transport (Main.__remoteTable initRemoteTable)
runProcess node $ initialProcess xs index (length doms)
| hackern/network-transport-ivc | tests/KMeans/Main.hs | mit | 5,049 | 0 | 30 | 1,398 | 2,111 | 1,112 | 999 | 113 | 1 |
module Stations.FMR where
import Program
import Text.HTML.TagSoup as TagSoup
import Data.Char (isDigit)
import Data.List (isInfixOf)
import Debug.Trace (trace)
isProgramSection :: Tag String -> Bool
isProgramSection tag = (tag ~== "<div class=progLeft>") || (tag ~== "<div class=progRight>")
titleSection :: [Tag String] -> [Tag String]
titleSection tags = (partitions (~== "<p class=progTitle>") tags) !! 0
spanContent :: String -> [Tag String] -> String
spanContent className tags =
innerText $ take 1 $ filter isTagText parts
where parts = head (partitions (~== ("<span class=" ++ className)) tags)
progTitle :: [Tag String] -> String
progTitle tags = innerText $ take 1 $ drop 1 tags
progType :: [Tag String] -> String
progType tags = innerText $ take 1 $ drop 5 tags
progTimes :: String -> (String, String)
progTimes string =
let parts = words string
in (parts !! 0, parts !! 2)
isFortnightly :: String -> Bool
isFortnightly string = (length $ words string) > 3
makeProgram :: [Tag String] -> Program
makeProgram progTag =
let
timeString = (spanContent "hourTitle" progTag)
(startTime, endTime) = progTimes timeString
in
Program {
day = (spanContent "dayTitle" progTag)
, startTime = startTime
, endTime = endTime
, fortnightly = (isFortnightly timeString)
, title = (progTitle $ titleSection progTag)
, genre = (progType $ titleSection progTag)
}
programs :: String -> [Program]
programs contents =
let tags = TagSoup.parseTags contents
progList = TagSoup.partitions isProgramSection tags
in
map makeProgram progList
| amarandon/radio-ld | Stations/FMR.hs | mit | 1,672 | 0 | 12 | 380 | 546 | 290 | 256 | 41 | 1 |
main = putStrLn "Hello, World !!\n"
| skywind3000/language | haskell/hello.hs | mit | 41 | 0 | 5 | 10 | 9 | 4 | 5 | 1 | 1 |
{- |
Module : DataAssociation.Definitions
Description : Definitions for rules mining.
License : MIT
Stability : development
Definitions for rules mining.
-}
module DataAssociation.Definitions (
Itemset(..)
, MinSupport(..)
, MinConfidence(..)
, AssocRule(..)
) where
import Data.Function ( on )
import Control.Arrow ( (&&&) )
-- | An itemset.
class (Eq (set item), Show (set item), Show item) =>
Itemset set item where
-- | a `contains` b
contains :: set item -> set item -> Bool
containsItem :: set item -> item -> Bool
setSize :: set item -> Int
listItems :: set item -> [item]
-- | returns the elements contained in the first argument
-- and not the second
itemsetDiff :: set item -> set item -> [item]
insertItem :: item -> set item -> set item
deleteItemAt :: Int -> set item -> set item
-- | splits first (n-1) elements and the last
splitInit :: set item -> (set item, item)
-- | creates an itemset from a list of items
newItemset :: [item] -> set item
-- | A container for the /minimum support/ parameter.
newtype MinSupport = MinSupport Float deriving (Show, Read)
-- | A container for the /minimum confidence/ parameter.
newtype MinConfidence = MinConfidence Float deriving (Show, Read)
-- | Association Rule
data AssocRule set item = AssocRule{ ruleFrom :: set item -- ^ implicating itemset
, ruleFollows :: set item -- ^ implication
, confidence :: Float
, support :: Float
}
-- deriving (Ord, Eq)
instance (Eq (set item)) =>
Eq (AssocRule set item) where
(==) = (==) `on` (ruleFrom &&& ruleFollows)
instance (Ord (set item)) =>
Ord (AssocRule set item) where
compare = compare `on` (ruleFrom &&& ruleFollows)
instance (Show item, Itemset set item) =>
Show (AssocRule set item) where
show (AssocRule from follows _ _) = show (listItems from) ++ " ==> " ++ show (listItems follows)
| fehu/min-dat--a-priori | core/src/DataAssociation/Definitions.hs | mit | 2,203 | 0 | 10 | 719 | 533 | 297 | 236 | -1 | -1 |
{-# LANGUAGE NoImplicitPrelude, OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module IHaskell.Convert.LhsToIpynb (lhsToIpynb) where
import IHaskellPrelude
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.ByteString.Lazy as LBS
import Data.Aeson ((.=), encode, object, Value(Array, Bool, Number, String, Null))
import Data.Char (isSpace)
import qualified Data.Vector as V
import qualified Data.List as List
import IHaskell.Flags (LhsStyle(LhsStyle))
#if MIN_VERSION_aeson(2,0,0)
import qualified Data.Aeson.KeyMap as KeyMap
import qualified Data.Aeson.Key as Key
#else
#endif
lhsToIpynb :: LhsStyle LText -> FilePath -> FilePath -> IO ()
lhsToIpynb sty from to = do
classed <- classifyLines sty . LT.lines . LT.pack <$> readFile from
LBS.writeFile to . encode . encodeCells $ groupClassified classed
data CellLine a = CodeLine a
| OutputLine a
| MarkdownLine a
deriving Show
isCode :: CellLine t -> Bool
isCode (CodeLine _) = True
isCode _ = False
isOutput :: CellLine t -> Bool
isOutput (OutputLine _) = True
isOutput _ = False
isMD :: CellLine t -> Bool
isMD (MarkdownLine _) = True
isMD _ = False
isEmptyMD :: (Eq a, Monoid a) => CellLine a -> Bool
isEmptyMD (MarkdownLine a) = a == mempty
isEmptyMD _ = False
untag :: CellLine t -> t
untag (CodeLine a) = a
untag (OutputLine a) = a
untag (MarkdownLine a) = a
data Cell a = Code a a
| Markdown a
deriving Show
encodeCells :: [Cell [LText]] -> Value
encodeCells xs = object $
"cells" .= Array (V.fromList (map cellToVal xs)) : boilerplate
cellToVal :: Cell [LText] -> Value
cellToVal (Code i o) = object
[ "cell_type" .= String "code"
, "execution_count" .= Null
, "metadata" .= object ["collapsed" .= Bool False]
, "source" .= arrayFromTxt i
, "outputs" .= Array
(V.fromList
[object
[ "text" .= arrayFromTxt o
, "metadata" .= object []
, "output_type" .= String "display_data"
] | _ <- take 1 o])
]
cellToVal (Markdown txt) = object
[ "cell_type" .= String "markdown"
, "metadata" .= object ["hidden" .= Bool False]
, "source" .= arrayFromTxt txt
]
-- | arrayFromTxt makes a JSON array of string s
arrayFromTxt :: [LText] -> Value
arrayFromTxt i = Array (V.fromList $ map stringify i)
where
stringify = String . LT.toStrict . flip LT.snoc '\n'
-- | ihaskell needs this boilerplate at the upper level to interpret the json describing cells and
-- output correctly.
#if MIN_VERSION_aeson(2,0,0)
boilerplate :: [(Key.Key, Value)]
#else
boilerplate :: [(T.Text, Value)]
#endif
boilerplate =
["metadata" .= object [kernelspec, lang], "nbformat" .= Number 4, "nbformat_minor" .= Number 0]
where
kernelspec = "kernelspec" .= object
[ "display_name" .= String "Haskell"
, "language" .= String "haskell"
, "name" .= String "haskell"
]
lang = "language_info" .= object ["name" .= String "haskell", "version" .= String VERSION_ghc]
groupClassified :: [CellLine LText] -> [Cell [LText]]
groupClassified (CodeLine a:x)
| (c, x1) <- List.span isCode x,
(_, x2) <- List.span isEmptyMD x1,
(o, x3) <- List.span isOutput x2
= Code (a : map untag c) (map untag o) : groupClassified x3
groupClassified (MarkdownLine a:x)
| (m, x1) <- List.span isMD x = Markdown (a : map untag m) : groupClassified x1
groupClassified (OutputLine a:x) = Markdown [a] : groupClassified x
groupClassified [] = []
classifyLines :: LhsStyle LText -> [LText] -> [CellLine LText]
classifyLines sty@(LhsStyle c o _ _ _ _) (l:ls) =
case (sp c, sp o) of
(Just a, Nothing) -> CodeLine a : classifyLines sty ls
(Nothing, Just a) -> OutputLine a : classifyLines sty ls
(Nothing, Nothing) -> MarkdownLine l : classifyLines sty ls
_ -> error "IHaskell.Convert.classifyLines"
where
sp x = LT.stripPrefix (dropSpace x) (dropSpace l) `mplus` blankCodeLine x
blankCodeLine x = if LT.strip x == LT.strip l
then Just ""
else Nothing
dropSpace = LT.dropWhile isSpace
classifyLines _ [] = []
| gibiansky/IHaskell | src/IHaskell/Convert/LhsToIpynb.hs | mit | 4,754 | 0 | 16 | 1,541 | 1,446 | 762 | 684 | 93 | 5 |
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wno-orphans #-}
module Unison.Server.CodebaseServer where
import Control.Concurrent (newEmptyMVar, putMVar, readMVar)
import Control.Concurrent.Async (race)
import Data.ByteString.Char8 (unpack)
import Control.Exception (ErrorCall (..), throwIO)
import qualified Network.URI.Encode as URI
import Control.Lens ((&), (.~))
import Data.Aeson ()
import qualified Data.ByteString as Strict
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as C8
import qualified Data.ByteString.Lazy as Lazy
import qualified Data.ByteString.Lazy.UTF8 as BLU
import Data.OpenApi (Info (..), License (..), OpenApi, URL (..))
import qualified Data.OpenApi.Lens as OpenApi
import Data.Proxy (Proxy (..))
import qualified Data.Text as Text
import qualified Data.Text.Encoding as Text
import GHC.Generics ()
import Network.HTTP.Media ((//), (/:))
import Network.HTTP.Types.Status (ok200)
import Network.Wai (responseLBS)
import Network.Wai.Handler.Warp
( Port,
defaultSettings,
runSettings,
setBeforeMainLoop,
setHost,
setPort,
withApplicationSettings,
)
import Servant
( MimeRender (..),
serve,
throwError,
)
import Servant.API
( Accept (..),
Capture,
CaptureAll,
Get,
JSON,
Raw,
(:>),
type (:<|>) (..),
)
import Servant.Docs
( DocIntro (DocIntro),
ToSample (..),
docsWithIntros,
markdown,
singleSample,
)
import Servant.OpenApi (HasOpenApi (toOpenApi))
import Servant.Server
( Application,
Handler,
Server,
ServerError (..),
Tagged (Tagged),
err401,
err404,
)
import Servant.Server.StaticFiles (serveDirectoryWebApp)
import System.Directory (canonicalizePath, doesFileExist)
import System.Environment (getExecutablePath)
import System.FilePath ((</>))
import qualified System.FilePath as FilePath
import System.Random.Stateful (getStdGen, newAtomicGenM, uniformByteStringM)
import Unison.Codebase (Codebase)
import qualified Unison.Codebase.Runtime as Rt
import Unison.Parser (Ann)
import Unison.Prelude
import Unison.Server.Endpoints.FuzzyFind (FuzzyFindAPI, serveFuzzyFind)
import Unison.Server.Endpoints.GetDefinitions
( DefinitionsAPI,
serveDefinitions,
)
import Unison.Server.Endpoints.ListNamespace (NamespaceAPI, serveNamespace)
import Unison.Server.Types (mungeString)
import Unison.Var (Var)
-- HTML content type
data HTML = HTML
newtype RawHtml = RawHtml { unRaw :: Lazy.ByteString }
instance Accept HTML where
contentType _ = "text" // "html" /: ("charset", "utf-8")
instance MimeRender HTML RawHtml where
mimeRender _ = unRaw
type OpenApiJSON = "openapi.json" :> Get '[JSON] OpenApi
type DocAPI = UnisonAPI :<|> OpenApiJSON :<|> Raw
type UnisonAPI = NamespaceAPI :<|> DefinitionsAPI :<|> FuzzyFindAPI
type WebUI = CaptureAll "route" Text :> Get '[HTML] RawHtml
type ServerAPI = ("ui" :> WebUI) :<|> ("api" :> DocAPI)
type AuthedServerAPI = ("static" :> Raw) :<|> (Capture "token" Text :> ServerAPI)
instance ToSample Char where
toSamples _ = singleSample 'x'
-- BaseUrl and helpers
data BaseUrl = BaseUrl
{ urlHost :: String,
urlToken :: Strict.ByteString,
urlPort :: Port
}
data BaseUrlPath = UI | Api
instance Show BaseUrl where
show url = urlHost url <> ":" <> show (urlPort url) <> "/" <> (URI.encode . unpack . urlToken $ url)
urlFor :: BaseUrlPath -> BaseUrl -> String
urlFor path baseUrl =
case path of
UI -> show baseUrl <> "/ui"
Api -> show baseUrl <> "/api"
handleAuth :: Strict.ByteString -> Text -> Handler ()
handleAuth expectedToken gotToken =
if Text.decodeUtf8 expectedToken == gotToken
then pure ()
else throw401 "Authentication token missing or incorrect."
where throw401 msg = throwError $ err401 { errBody = msg }
openAPI :: OpenApi
openAPI = toOpenApi api & OpenApi.info .~ infoObject
infoObject :: Info
infoObject = mempty
{ _infoTitle = "Unison Codebase Manager API"
, _infoDescription =
Just "Provides operations for querying and manipulating a Unison codebase."
, _infoLicense = Just . License "MIT" . Just $ URL
"https://github.com/unisonweb/unison/blob/trunk/LICENSE"
, _infoVersion = "1.0"
}
docsBS :: Lazy.ByteString
docsBS = mungeString . markdown $ docsWithIntros [intro] api
where
intro = DocIntro (Text.unpack $ _infoTitle infoObject)
(toList $ Text.unpack <$> _infoDescription infoObject)
docAPI :: Proxy DocAPI
docAPI = Proxy
api :: Proxy UnisonAPI
api = Proxy
serverAPI :: Proxy AuthedServerAPI
serverAPI = Proxy
app
:: Var v
=> Rt.Runtime v
-> Codebase IO v Ann
-> FilePath
-> Strict.ByteString
-> Application
app rt codebase uiPath expectedToken =
serve serverAPI $ server rt codebase uiPath expectedToken
genToken :: IO Strict.ByteString
genToken = do
gen <- getStdGen
g <- newAtomicGenM gen
Base64.encode <$> uniformByteStringM 24 g
data Waiter a
= Waiter {
notify :: a -> IO (),
waitFor :: IO a
}
mkWaiter :: IO (Waiter a)
mkWaiter = do
mvar <- newEmptyMVar
return Waiter {
notify = putMVar mvar,
waitFor = readMVar mvar
}
ucmUIVar :: String
ucmUIVar = "UCM_WEB_UI"
ucmPortVar :: String
ucmPortVar = "UCM_PORT"
ucmHostVar :: String
ucmHostVar = "UCM_HOST"
ucmTokenVar :: String
ucmTokenVar = "UCM_TOKEN"
data CodebaseServerOpts = CodebaseServerOpts
{ token :: Maybe String
, host :: Maybe String
, port :: Maybe Int
, codebaseUIPath :: Maybe FilePath
} deriving (Show, Eq)
-- The auth token required for accessing the server is passed to the function k
startServer
:: Var v
=> CodebaseServerOpts
-> Rt.Runtime v
-> Codebase IO v Ann
-> (BaseUrl -> IO ())
-> IO ()
startServer opts rt codebase onStart = do
-- the `canonicalizePath` resolves symlinks
exePath <- canonicalizePath =<< getExecutablePath
envUI <- canonicalizePath $ fromMaybe (FilePath.takeDirectory exePath </> "ui") (codebaseUIPath opts)
token <- case token opts of
Just t -> return $ C8.pack t
_ -> genToken
let baseUrl = BaseUrl "http://127.0.0.1" token
let settings = defaultSettings
& maybe id setPort (port opts)
& maybe id (setHost . fromString) (host opts)
let a = app rt codebase envUI token
case port opts of
Nothing -> withApplicationSettings settings (pure a) (onStart . baseUrl)
Just p -> do
started <- mkWaiter
let settings' = setBeforeMainLoop (notify started ()) settings
result <- race (runSettings settings' a)
(waitFor started *> onStart (baseUrl p))
case result of
Left () -> throwIO $ ErrorCall "Server exited unexpectedly!"
Right x -> pure x
serveIndex :: FilePath -> Handler RawHtml
serveIndex path = do
let index = path </> "index.html"
exists <- liftIO $ doesFileExist index
if exists
then fmap RawHtml . liftIO . Lazy.readFile $ path </> "index.html"
else fail
where
fail = throwError $ err404
{ errBody =
BLU.fromString
$ "No codebase UI configured."
<> " Set the "
<> ucmUIVar
<> " environment variable to the directory where the UI is installed."
}
serveUI :: Handler () -> FilePath -> Server WebUI
serveUI tryAuth path _ = tryAuth *> serveIndex path
server
:: Var v
=> Rt.Runtime v
-> Codebase IO v Ann
-> FilePath
-> Strict.ByteString
-> Server AuthedServerAPI
server rt codebase uiPath token =
serveDirectoryWebApp (uiPath </> "static")
:<|> ((\t ->
serveUI (tryAuth t) uiPath
:<|> ( ( (serveNamespace (tryAuth t) codebase)
:<|> (serveDefinitions (tryAuth t) rt codebase)
:<|> (serveFuzzyFind (tryAuth t) codebase)
)
:<|> serveOpenAPI
:<|> Tagged serveDocs
)
)
)
where
serveDocs _ respond = respond $ responseLBS ok200 [plain] docsBS
serveOpenAPI = pure openAPI
plain = ("Content-Type", "text/plain")
tryAuth = handleAuth token
| unisonweb/platform | parser-typechecker/src/Unison/Server/CodebaseServer.hs | mit | 8,255 | 0 | 19 | 1,814 | 2,318 | 1,285 | 1,033 | -1 | -1 |
{- Generated by DrIFT (Automatic class derivations for Haskell) -}
{-# LINE 1 "src/FrontEnd/Class.hs" #-}
{-# LANGUAGE NoMonoLocalBinds, NamedFieldPuns #-}
module FrontEnd.Class(
ClassHierarchy(),
ClassRecord(..),
ClassType(..),
Inst(..),
InstanceEnv(..),
addInstanceToHierarchy,
asksClassRecord,
augmentClassHierarchy,
chToClassHead,
checkForCircularDeps,
checkForDuplicateInstaces,
classRecords,
defaultInstanceName,
derivableClasses,
emptyInstance,
enumDerivableClasses,
findClassInsts,
findClassRecord,
instanceName,
instanceToTopDecls,
makeClassHierarchy,
makeInstanceEnv,
noNewtypeDerivable,
printClassHierarchy,
printClassSummary,
scatterAliasInstances
) where
import Control.Monad.Identity
import Control.Monad.Writer(Monoid(..))
import Data.Generics(mkQ,something)
import Data.List(nub)
import Data.Maybe
import Debug.Trace
import Text.PrettyPrint.HughesPJ(render,Doc())
import Text.Printf
import Util.Graph
import qualified Data.List
import qualified Data.Map as Map
import qualified Text.PrettyPrint.HughesPJ as PPrint
import Data.Binary
import Doc.DocLike
import Doc.PPrint
import FrontEnd.HsSyn
import FrontEnd.KindInfer
import FrontEnd.SrcLoc
import FrontEnd.Syn.Traverse
import FrontEnd.Tc.Kind
import FrontEnd.Tc.Type
import FrontEnd.Warning
import Name.Names
import Options (verbose)
import Support.FreeVars
import Support.MapBinaryInstance
import Support.Tickle
import Util.Gen
import Util.Inst()
type Assump = (Name,Sigma)
data Inst = Inst {
instSrcLoc :: SrcLoc,
instDerived :: !Bool, -- ^ whether this instance was derived
instHead :: Qual Pred,
instAssocs :: [(Tycon,[Tyvar],[Tyvar],Sigma)]
} deriving(Eq,Ord,Show)
{-! derive: Binary !-}
instance PPrint a (Qual Pred) => PPrint a Inst where
pprint Inst { instHead = h, instAssocs = [], instDerived = d } = (if d then text "*" else text " ") <> pprint h
pprint Inst { instHead = h, instAssocs = as, instDerived = d } = (if d then text "*" else text " ") <> pprint h <+> text "where" <$> vcat [ text " type" <+> pprint n <+> text "_" <+> hsep (map pprint ts) <+> text "=" <+> pprint sigma | (n,_,ts,sigma) <- as]
emptyInstance = Inst { instDerived = False, instSrcLoc = bogusASrcLoc, instHead = error "emptyInstance", instAssocs = [] }
-- augment heirarchy with just instances with full class definitions
augmentClassHierarchy :: ClassHierarchy -> ClassHierarchy -> ClassHierarchy
augmentClassHierarchy (CH full _) (CH res is) = ans where
ans = CH (Map.mapWithKey f is) is
f cn _ = r where Just r = Map.lookup cn (Map.union res full)
data ClassType = ClassNormal | ClassTypeFamily | ClassDataFamily | ClassAlias
deriving(Eq,Ord)
-- Bool is true if data declaration instead of type declaration
data AssociatedType = Assoc !Tycon !Bool [Tyvar] Kind
deriving(Eq,Show)
{-! derive: Binary !-}
data ClassRecord = ClassRecord {
className :: !Class, -- ^ can be a TypeConstructor if we are a type or data family
classSrcLoc :: !SrcLoc,
classArgs :: [Tyvar],
classSupers :: [Class], -- TODO: should be Pred
classAlias :: Maybe (Qual [Pred]),
classAssumps :: [(Name,Sigma)], -- ^ method signatures
classAssocs :: [AssociatedType]
} deriving (Show,Eq)
{-! derive: Binary !-}
newtype InstanceEnv = InstanceEnv {
instanceEnv :: Map.Map (Name,Name) ([Tyvar],[Tyvar],Type) }
makeInstanceEnv :: ClassHierarchy -> InstanceEnv
makeInstanceEnv (CH _ is) = InstanceEnv $ Map.fromList (concatMap f (Map.toList is)) where
f (cr,is) = concatMap (g cr) is
g cr Inst { instHead = _ :=> IsIn _cname tt, instAssocs = as } | _cname == cr = ans where
ans = [ ((tyconName tc,getTypeHead tt),(is,rs,e)) | (tc,is,rs,e) <- as]
g cr x = error $ "makeInstanceEnv: " ++ show (cr,x)
getTypeHead th = case fromTAp th of
(TArrow {},_) -> tc_Arrow
(TCon c,_) -> tyconName c
_ -> error $ "getTypeHead: " ++ show th
data ClassHierarchy = CH {
chRecordMap :: Map.Map Class ClassRecord,
chInstMap :: Map.Map Class [Inst]
}
instance Binary ClassHierarchy where
get = do
m1 <- getMap
m2 <- getMap
return (CH m1 m2)
put (CH m1 m2) = do
putMap m1
putMap m2
instance Monoid ClassHierarchy where
mempty = CH mempty mempty
mappend (CH a b) (CH c d) =
CH (Map.union a c) (Map.unionWith Data.List.union b d)
classRecords :: ClassHierarchy -> [ClassRecord]
classRecords (CH ch _) = Map.elems ch
findClassRecord (CH ch _) cn = case Map.lookup cn ch of
Nothing -> error $ "findClassRecord: " ++ show cn
Just n -> n
asksClassRecord (CH ch _) cn f = case Map.lookup cn ch of
Nothing -> error $ "asksClassRecord: " ++ show cn
Just n -> f n
findClassInsts (CH _ is) cn = fromMaybe [] (Map.lookup cn is)
showInst :: Inst -> String
showInst = PPrint.render . pprint
aHsTypeSigToAssumps :: KindEnv -> HsDecl -> [(Name,Type)]
aHsTypeSigToAssumps kt ~sig@(HsTypeSig _ names qualType) = [ (toName Val n,typ) | n <- names] where
Identity typ = hsQualTypeToSigma kt qualType
qualifyMethod :: [HsAsst] -> HsDecl -> HsDecl
qualifyMethod ~[HsAsst c [n]] ~(HsTypeSig sloc names (HsQualType oc t))
= HsTypeSig sloc names (HsQualType (HsAsst c [n']:oc) t) where
Just n' = (something (mkQ mzero f)) t
f (HsTyVar n') | removeUniquifier n' == removeUniquifier n = return n'
f _ = mzero
printClassSummary :: ClassHierarchy -> IO ()
printClassSummary (CH h is) = mapM_ f (Map.toList h) where
--h' = [ (n,fromJust $ Map.lookup n h) | n <- (map fst [ (cn, classSupers ss) | (cn,ss) <- Map.toList h]) ]
f (cname, ClassRecord { .. }) = do
putStrLn $ "-- class: " ++ show cname
let insts = fromMaybe [] (Map.lookup cname is)
unless (null classSupers) $ putStrLn $ "super classes:" ++ unwords (map show classSupers)
unless (null insts) $ putStrLn $ "instances: " ++ (intercalate ", " (map showInst insts))
-- putStrLn ""
-- f (cname, (ClassAliasRecord { classSupers = supers, classInsts = insts, classClasses = classes })) = do
-- putStrLn $ "-- class: " ++ show cname
-- unless (null supers) $ putStrLn $ "super classes:" ++ unwords (map show supers)
-- unless (null insts) $ putStrLn $ "instances: " ++ (intercalate ", " (map showInst insts))
-- unless (null classes) $ putStrLn $ "alias for: " ++ unwords (map show classes)
-- putStrLn ""
printClassHierarchy :: ClassHierarchy -> IO ()
printClassHierarchy (CH h is) = mapM_ printClassDetails $ Map.toList h where
printClassDetails :: (Name, ClassRecord) -> IO ()
printClassDetails (cname, cr) = do
let args = classArgs cr; supers = classSupers cr;
methodAssumps = classAssumps cr
assocs = classAssocs cr
let insts = fromMaybe [] (Map.lookup cname is)
putStrLn "..........."
putStrLn $ "class: " ++ hsep (pprint cname:map pprintParen args)
putStr $ "super classes:"
pnone supers $ do putStrLn $ " " ++ (intercalate " " (map show supers))
putStr $ "instances:"
pnone insts $ putStr $ "\n" ++ (showListAndSepInWidth showInst 80 ", " insts)
when True $ do
putStr $ "method signatures:"
pnone methodAssumps $ putStr $ "\n" ++ (unlines $ map pretty methodAssumps)
putStr $ "associated types:"
pnone assocs $ putStrLn $ "\n" ++ (unlines $ map (render . passoc) assocs)
when (isJust (classAlias cr)) $ do
let Just x = classAlias cr
putStr $ "alias for:"
putStrLn (pprint x)
--Just $ --pnone classes $ do putStrLn $ " " ++ (intercalate " " (map show classes))
putStr "\n"
pnone [] f = putStrLn " none"
pnone xs f = f
passoc (Assoc nk isData as kt) = text (if isData then "data" else "type") <+>
pprint nk <+> hsep (map pprint as) <+> text "::" <+> pprint kt
-- this does not check for duplicates, use checkForDuplicateInstaces after all
-- instances have been added to do so.
addInstanceToHierarchy :: Inst -> ClassHierarchy -> ClassHierarchy
addInstanceToHierarchy inst@Inst { instHead = cntxt :=> ~(IsIn className _) } (CH r i) =
CH r (Map.insertWith Data.List.union className [inst] i)
-- Kind inference has already been done so we don't need to check for kind
-- errors here.
hsInstDeclToInst :: Monad m => KindEnv -> HsDecl -> m [Inst]
hsInstDeclToInst kt (HsInstDecl sloc qType decls)
= return [emptyInstance { instSrcLoc = sloc, instDerived = False,
instHead = cntxt :=> IsIn className convertedArgType, instAssocs = assocs }]
where
(cntxt, (className, [convertedArgType])) = chToClassHead kt qType
assocs = [ (tc,as,bs,s) | (tc,as,bs,~(Just s)) <- createInstAssocs kt decls ]
hsInstDeclToInst kt (HsDeclDeriving sloc qType)
= return [emptyInstance { instSrcLoc = sloc, instDerived = True,
instHead = cntxt :=> IsIn className convertedArgType }]
where (cntxt, (className, [convertedArgType])) = chToClassHead kt qType
hsInstDeclToInst _ _ = return []
vtrace s v | False && verbose = trace s v
vtrace s v | otherwise = v
chToClassHead :: KindEnv -> HsClassHead -> ([Pred],(Name,[Type]))
chToClassHead kt qt@HsClassHead { .. } =
vtrace ("chToClassHead" <+> show qt) $
let res = (map (hsAsstToPred kt) hsClassHeadContext,(hsClassHead,
map (runIdentity . hsTypeToType (kiHsQualType kt (HsQualType hsClassHeadContext (HsTyTuple [])))) hsClassHeadArgs))
in vtrace ("=" <+> show res) res
createClassAssocs kt decls = [ Assoc (ctc n) False (map ct as) (ctype t) | HsTypeDecl { hsDeclName = n, hsDeclTArgs = as, hsDeclType = t } <- decls ] where
ctc n = let nn = toName TypeConstructor n in Tycon nn (kindOf nn kt)
ct (HsTyVar n) = let nn = toName TypeVal n in tyvar nn (kindOf nn kt)
ct _ = error "Class.createClassAssocs: bad1."
ctype HsTyAssoc = kindStar
ctype _ = error "Class.createClassAssocs: bad2."
-- ctype t = Just $ runIdentity $ hsTypeToType kt t
createInstAssocs kt decls = [ (ctc n,map ct (czas ca),map ct as,ctype t) | HsTypeDecl { hsDeclName = n, hsDeclTArgs = (ca:as), hsDeclType = t } <- decls ] where
ctc n = let nn = toName TypeConstructor n in Tycon nn (kindOf nn kt)
ct (HsTyVar n) = let nn = toName TypeVal n in tyvar nn (kindOf nn kt)
ct _ = error "Class.createInstAssocs: bad."
czas ca = let (HsTyCon {},zas) = fromHsTypeApp ca in zas
ctype HsTyAssoc = Nothing
ctype t = Just $ runIdentity $ hsTypeToType kt t
fromHsTypeApp t = f t [] where
f (HsTyApp a b) rs = f a (b:rs)
f t rs = (t,rs)
instanceToTopDecls :: KindEnv -> ClassHierarchy -> HsDecl -> (([HsDecl],[Assump]))
instanceToTopDecls kt ch@(CH classHierarchy _) (HsInstDecl _ qualType methods)
= unzip $ concatMap (methodToTopDecls kt [] crecord qualType) $ methodGroups where
methodGroups = groupEquations (filter (not . isHsPragmaProps) methods)
(_,(className,_)) = chToClassHead kt qualType
crecord = case Map.lookup className classHierarchy of
Nothing -> error $ "instanceToTopDecls: could not find class " ++ show className ++ "in class hierarchy"
Just crecord -> crecord
instanceToTopDecls kt ch@(CH classHierarchy _) (HsClassDecl _ chead methods)
= unzip $ map (defaultMethodToTopDecls kt methodSigs chead) $ methodGroups where
className = hsClassHead chead
--HsQualType _ (HsTyApp (HsTyCon className) _) = qualType
methodGroups = groupEquations (filter (\x -> isHsPatBind x || isHsFunBind x) methods)
methodSigs = case Map.lookup (toName ClassName className) classHierarchy of
Nothing -> error $ "defaultInstanceToTopDecls: could not find class " ++ show className ++ "in class hierarchy"
Just sigs -> classAssumps sigs
instanceToTopDecls _ _ _ = mempty
instanceName n t = toName Val ("Instance@",'i':show n ++ "." ++ show t)
defaultInstanceName n = toName Val ("Instance@",'i':show n ++ ".default")
-- aliasDefaultInstanceName :: Name -> Class -> Name
-- aliasDefaultInstanceName n ca = toName Val ("Instance@",'i':show n ++ ".default."++show ca)
methodToTopDecls :: Monad m
=> KindEnv -- ^ the kindenv
-> [Pred] -- ^ random extra predicates to add
-> ClassRecord -- ^ the class we are lifting methods from
-> HsClassHead
-> (Name, HsDecl)
-> m (HsDecl,Assump)
methodToTopDecls kt preds crecord qt (methodName, methodDecls) = do
let (cntxt,(className,[argType])) = chToClassHead kt qt
newMethodName = instanceName methodName (getTypeHead argType)
sigFromClass <- case [ s | (n, s) <- classAssumps crecord, n == methodName] of
[x] -> return x
_ -> fail $ "sigFromClass: " ++ (pprint className <+> pprint (classAssumps crecord))
++ " " ++ show methodName
let instantiatedSig = newMethodSig' kt methodName (preds ++ cntxt) sigFromClass argType
renamedMethodDecls = renameOneDecl newMethodName methodDecls
return (renamedMethodDecls,(newMethodName, instantiatedSig))
defaultMethodToTopDecls :: KindEnv -> [Assump] -> HsClassHead -> (Name, HsDecl) -> (HsDecl,Assump)
defaultMethodToTopDecls kt methodSigs HsClassHead { .. } (methodName, methodDecls)
= (renamedMethodDecls,(newMethodName,sigFromClass)) where
newMethodName = defaultInstanceName methodName
sigFromClass = case [ s | (n, s) <- methodSigs, n == methodName] of
[x] -> x
_ -> error $ "sigFromClass: " ++ show methodSigs ++ " " ++ show methodName
-- = newMethodSig cntxt newMethodName sigFromClass argType
renamedMethodDecls = renameOneDecl newMethodName methodDecls
{-
aliasDefaultMethodToTopDecls :: KindEnv -> [Assump] -> Class -> (Name, HsDecl) -> (HsDecl,Assump)
aliasDefaultMethodToTopDecls kt methodSigs aliasName (methodName, methodDecls)
= (renamedMethodDecls,(newMethodName,sigFromClass)) where
newMethodName = aliasDefaultInstanceName methodName aliasName
sigFromClass = case [ s | (n, s) <- methodSigs, n == methodName] of
[x] -> x
_ -> error $ "sigFromClass: " ++ show methodSigs ++ " " ++ show methodName
-- = newMethodSig cntxt newMethodName sigFromClass argType
renamedMethodDecls = renameOneDecl newMethodName methodDecls
-}
renameOneDecl :: Name -> HsDecl -> HsDecl
renameOneDecl newName (HsFunBind matches)
= HsFunBind (map (renameOneMatch newName) matches)
-- all pattern bindings are simple by this stage
-- (ie no compound patterns)
renameOneDecl newName (HsPatBind sloc (HsPVar patName) rhs wheres)
= HsPatBind sloc (HsPVar (nameName newName)) rhs wheres
renameOneDecl _ _ = error "Class.renameOneDecl"
renameOneMatch :: Name -> HsMatch -> HsMatch
renameOneMatch newName (HsMatch sloc oldName pats rhs wheres)
= HsMatch sloc (nameName newName) pats rhs wheres
newMethodSig' :: KindEnv -> Name -> [Pred] -> Sigma -> Type -> Sigma
newMethodSig' kt methodName newCntxt qt' instanceType = newQualType where
TForAll _ ((IsIn _ classArg:restContext) :=> t) = qt'
-- the assumption is that the context is non-empty and that
-- the class and variable that we are interested in are at the
-- front of the old context - the method of inserting instance types into
-- the class hierarchy should ensure this
--((className, classArg):restContxt) = cntxt
foo = "_" ++ (show methodName ++ show (getTypeHead instanceType)) ++ "@@"
-- newQualType = everywhere (mkT at) $ tForAll (nub $ freeVars qt) qt
newQualType = tForAll vs nqt where
vs = nub $ freeVars nqt
nqt = map (tickle f) (newCntxt ++ restContext) :=> f t
f t | t == classArg = f instanceType
f (TVar t) = TVar (at t)
f (TForAll ta (ps :=> t)) = tickle f (TForAll (map at ta) (ps :=> t))
f (TExists ta (ps :=> t)) = tickle f (TExists (map at ta) (ps :=> t))
f t = tickle f t
at (Tyvar n k) = tyvar (updateName (++ foo) n) k
updateName f n = toName nt (md,f nm) where
(nt,(md::String,nm)) = fromName n
-- qt = (newCntxt ++ restContext) :=> t
{-
qt = (newCntxt ++ restContext) :=> (everywhere (mkT ct) t)
ct n | n == classArg = instanceType
ct n = n
-}
-- collect assumptions of all class methods
--classMethodAssumps :: ClassHierarchy -> [Assump]
--classMethodAssumps hierarchy = concatMap classAssumps $ classRecords hierarchy
--------------------------------------------------------------------------------
scatterAliasInstances :: ClassHierarchy -> ClassHierarchy
scatterAliasInstances = id
{-
scatterAliasInstances ch =
let cas = [cr | cr@(ClassAliasRecord {}) <- classRecords ch]
--ch `seq` liftIO $ putStrLn ("scatterAliasInstances: " ++ show cas)
instances = concatMap scatterInstancesOf cas
ret = foldr (modifyClassRecord $ \cr -> cr
{ classInsts = [],
classMethodMap = Map.fromList [(meth, cls) | cls <- classClasses cr,
(meth,_) <- classAssumps (findClassRecord ch cls)]
})
(ch `mappend` classHierarchyFromRecords instances)
(map className cas)
-- liftIO $ mapM_ print (classRecords ret)
in ret
scatterInstancesOf :: ClassRecord -> [ClassRecord]
scatterInstancesOf cr = map extract (classClasses cr)
where
extract c =
(newClassRecord c) { classInsts =
[Inst sl d ((cxt ++ [IsIn c2 xs | c2 <- classClasses cr, c2 /= c]) :=> IsIn c xs) []
| Inst sl d (cxt :=> IsIn _ xs) [] <- classInsts cr] }
-}
--------------------------------------------------------------------------------
--failSl sl m = fail $ show sl ++ ": " ++ m
classHierarchyFromRecords rs =
CH (Map.fromList [ (className x,x)| x <- rs ]) mempty
fromHsTyVar (HsTyVar v) = return v
fromHsTyVar (HsTyExpKind (Located _ t) _) = fromHsTyVar t
fromHsTyVar _ = fail "fromHsTyVar"
-- We give all instance declarations the benefit of the doubt here, assuming
-- they are correct. It is up to the typechecking pass to find any errors.
makeClassHierarchy :: MonadWarn m
=> ClassHierarchy -> KindEnv -> [HsDecl] -> m ClassHierarchy
makeClassHierarchy (CH ch _is) kt ds = mconcat `liftM` mapM f ds where
f HsClassDecl { .. } = do
let qualifiedMethodAssumps = concatMap (aHsTypeSigToAssumps kt . qualifyMethod newClassContext) (filter isHsTypeSig hsDeclDecls)
newClassContext = [HsAsst hsClassHead args]
args = [ a | ~(Just a) <- map fromHsTyVar hsClassHeadArgs ]
return $ classHierarchyFromRecords [ClassRecord {
classArgs,
classAssocs,
classAlias = Nothing,
className = toName ClassName hsClassHead,
classSrcLoc = hsDeclSrcLoc,
classSupers = [ toName ClassName x | ~(HsAsst x _) <- hsClassHeadContext],
classAssumps = qualifiedMethodAssumps }]
where
--cntxt = hsClassHeadContext chead
HsClassHead { .. } = hsDeclClassHead
classAssocs = createClassAssocs kt hsDeclDecls
(_,(_,classArgs')) = chToClassHead kt hsDeclClassHead
classArgs = [ v | ~(TVar v) <- classArgs' ]
f decl@(HsInstDecl {}) = hsInstDeclToInst kt decl >>= \insts -> do
return $ foldl (flip addInstanceToHierarchy) mempty insts
f decl@(HsDeclDeriving {}) = hsInstDeclToInst kt decl >>= \insts -> do
return $ foldl (flip addInstanceToHierarchy) mempty insts
f _ = return mempty
-- f decl@(HsClassAliasDecl {}) = trace ("makeClassHierarchy: "++show decl) $ do
-- tell [ClassAliasRecord { className = toName ClassName (hsDeclName decl),
-- classArgs = [v | ~(TVar v) <- map (runIdentity . hsTypeToType kt) (hsDeclTypeArgs decl)],
-- classSrcLoc = hsDeclSrcLoc decl,
-- classSupers = [toName ClassName n | HsAsst n _ <- (hsDeclContext decl)],
-- classClasses = [toName ClassName n | HsAsst n _ <- (hsDeclClasses decl)],
-- classInsts = [],
-- classMethodMap = Map.empty
-- }]
checkForCircularDeps :: MonadWarn m
=> ClassHierarchy -> m (Graph ClassRecord)
checkForCircularDeps CH { .. } = do
let g = newGraph (Map.elems chRecordMap) className classSupers
s ClassRecord { .. } = show classSrcLoc ++ ": class " ++ show classSupers ++ " => " ++ show className
f (Right (c:cs)) = do
warn (classSrcLoc c) InvalidDecl $ "Superclasses form cycle:" ++ unlines (map s (c:cs))
f _ = return ()
mapM_ f (scc g)
return g
checkForDuplicateInstaces :: MonadWarn m
=> ClassHierarchy -- ^ imported class hierarchy
-> ClassHierarchy -- ^ locally defined hierarchy
-> m ClassHierarchy -- ^ possibly simplified local hierarchy
checkForDuplicateInstaces iCh (CH ch is) = mapM_ f (Map.toList is) >> return (CH ch is) where
f (className,is) = do
let is' = findClassInsts iCh className ++ is
sgu = sortGroupUnderFG fst snd [ ((cn,getTypeHead tt), i) |
i@Inst { instSrcLoc = sl, instHead = _ :=> IsIn cn tt } <- is' ]
mapM_ g sgu
g (_,[_]) = return ()
g (_,sls) | all instDerived sls = return ()
g ((ch,th),sls) = warn (instSrcLoc $ head sls) DuplicateInstances $
printf "instance (%s (%s ..)) defined multiple times: %s"
(show ch) (show th) (show $ map instSrcLoc sls)
accLen :: Int -> [[a]] -> [(Int, [a])]
accLen width [] = []
accLen width (x:xs) = let newWidth = length x + width in (newWidth, x) : accLen newWidth xs
groupStringsToWidth :: Int -> [String] -> [String]
groupStringsToWidth width ss = groupStringsToWidth' width (accLen 0 ss) where
groupStringsToWidth' :: Int -> [(Int,String)] -> [String]
groupStringsToWidth' width [] = []
groupStringsToWidth' width xs
= headString : groupStringsToWidth' width (accLen 0 $ map snd rest)
where
(headSegments, rest)
= case span ((<=width).fst) xs of
([], ss) -> ([head ss], tail ss)
anythingElse -> anythingElse
headString = concatMap snd headSegments
showListAndSepInWidth :: (a -> String) -> Int -> String -> [a] -> String
showListAndSepInWidth _ _ _ [] = []
showListAndSepInWidth f width sep things = unlines $ groupStringsToWidth width newThings where
newThings = (map ((\t -> t ++ sep).f) (init things)) ++ [f (last things)]
pretty :: PPrint Doc a => a -> String
pretty = render . pprint
{-
nameOfTyCon :: NameType -> HsType -> Name
nameOfTyCon t (HsTyCon n) = toName t n
nameOfTyCon t (HsTyTuple xs) = nameTuple t (length xs)
nameOfTyCon t (HsTyFun _ _) = tc_Arrow
nameOfTyCon _ t = error $ "nameOfTyCon: " ++ show t
-}
groupEquations :: [HsDecl] -> [(Name, HsDecl)]
groupEquations [] = []
groupEquations (HsTypeDecl {}:ds) = groupEquations ds
groupEquations (d:ds) = (getDeclName d, d) : groupEquations ds
derivableClasses :: [Name]
derivableClasses = [
class_Eq,
class_Ord,
class_Enum,
class_Bounded,
class_Show,
class_Read,
class_Ix
]
-- can be automatically derived when
-- the class is an enumeration
enumDerivableClasses :: [Name]
enumDerivableClasses = [
class_Eq,
class_Ord,
class_Enum,
class_Ix
]
-- classes that cannot be derived by the generalized
-- newtype deriving mechanism.
noNewtypeDerivable :: [Name]
noNewtypeDerivable = [
class_Show,
class_Read
]
-- classes that behave identically to their component when they have a single
-- unary constructor but are not newtypes
{-
unaryPassDerivable :: [Name]
unaryPassDerivable = [
class_Ix,
class_Eq,
class_Ord,
class_Bounded
]
-}
{-* Generated by DrIFT : Look, but Don't Touch. *-}
instance Data.Binary.Binary Inst where
put (Inst aa ab ac ad) = do
Data.Binary.put aa
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
get = do
aa <- get
ab <- get
ac <- get
ad <- get
return (Inst aa ab ac ad)
instance Data.Binary.Binary AssociatedType where
put (Assoc aa ab ac ad) = do
Data.Binary.put aa
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
get = do
aa <- get
ab <- get
ac <- get
ad <- get
return (Assoc aa ab ac ad)
instance Data.Binary.Binary ClassRecord where
put (ClassRecord aa ab ac ad ae af ag) = do
Data.Binary.put aa
Data.Binary.put ab
Data.Binary.put ac
Data.Binary.put ad
Data.Binary.put ae
Data.Binary.put af
Data.Binary.put ag
get = do
aa <- get
ab <- get
ac <- get
ad <- get
ae <- get
af <- get
ag <- get
return (ClassRecord aa ab ac ad ae af ag)
-- Imported from other files :-
| m-alvarez/jhc | drift_processed/FrontEnd/Class.hs | mit | 24,974 | 211 | 22 | 5,994 | 6,956 | 3,689 | 3,267 | -1 | -1 |
import Test.QuickCheck
data Color = Red | Green | Blue deriving Show
instance Arbitrary Color where
arbitrary = do
n <- choose (0,2) :: Gen Int
return $ case n of
0 -> Red
1 -> Green
2 -> Blue
example1 :: IO [Color]
example1 = sample' arbitrary
-- [Red,Green,Red,Blue,Red,Red,Red,Blue,Green,Red,Red]
| riwsky/wiwinwlh | src/arbitrary.hs | mit | 331 | 0 | 11 | 81 | 106 | 56 | 50 | 11 | 1 |
{-# LANGUAGE OverloadedStrings #-}
import Control.Monad.State
import Data.Text
import Pipes ((~>))
import Pipes.Core hiding (Client)
import Network.Discord.Types
import Network.Discord.Rest
import Network.Discord.Gateway
data LogClient = LogClient
instance Client LogClient where
getAuth _ = Bot "TOKEN"
main :: IO ()
main = do
gateway <- getGateway
runWebsocket gateway LogClient $ do
DiscordState {getWebSocket=ws} <- get
(eventCore ~> \event -> case event of
Ready (Init v u _ _ _) -> liftIO . putStrLn $ "Connected to gateway v"++show v
++ " as user " ++ show u
MessageCreate msg@(Message {messageAuthor = User{userIsBot = bot}}) ->
unless bot $
fetch'
$ CreateMessage 188134500411244545 (pack $ show msg) Nothing
ev -> fetch'
$ CreateMessage 188134500411244545 (pack $ show ev) Nothing
) ws
| jano017/Discord.hs | examples/logbot.hs | mit | 884 | 1 | 25 | 206 | 290 | 149 | 141 | 26 | 3 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE TemplateHaskell #-}
module Db.Internal where
import BasePrelude
import Control.Error (headMay)
import Control.Lens (makeLenses, makePrisms, view)
import Control.Monad.Except (ExceptT, MonadError, runExceptT)
import Control.Monad.Reader (MonadReader, ReaderT, runReaderT)
import Control.Monad.Trans (MonadIO)
import Data.Profunctor.Product.Default (Default)
import Database.PostgreSQL.Simple (Connection, QueryError, SqlError, close)
import Opaleye (Column, PGBool, Query, QueryRunner,
Table, Unpackspec, runDelete, runInsert,
runInsertReturning, runQuery, runUpdate)
import Utils (wrapExceptions)
data DbError
= DbQueryError QueryError
| DbSqlError SqlError
deriving (Show)
makePrisms ''DbError
data DbEnv = DbEnv
{ _dbEnvConnection :: Connection
}
makeLenses ''DbEnv
newtype Db a = Db
{ unDb :: ExceptT DbError (ReaderT DbEnv IO) a
} deriving
( Functor
, Applicative
, Monad
, MonadReader DbEnv
, MonadError DbError
, MonadIO
)
runDb :: DbEnv -> Db a -> IO (Either DbError a)
runDb e = flip runReaderT e . runExceptT . unDb
closeDbEnv :: DbEnv -> IO ()
closeDbEnv = close . view dbEnvConnection
liftQuery
:: ( Default QueryRunner columnsW haskells )
=> Query columnsW
-> Db [haskells]
liftQuery q = withConnection (`runQuery` q)
liftQueryFirst
:: ( Default QueryRunner columnsW haskells )
=> Query columnsW
-> Db (Maybe haskells)
liftQueryFirst = fmap headMay . liftQuery
liftInsert
:: Table columnsW columnsR
-> columnsW
-> Db Int64
liftInsert t c = withConnection (\ con -> runInsert con t c)
liftInsertReturning
:: ( Default QueryRunner returned haskells
, Default Unpackspec returned returned
)
=> Table columnsW columnsR
-> (columnsR -> returned)
-> columnsW
-> Db [haskells]
liftInsertReturning t f c = withConnection (\ con -> runInsertReturning con t c f)
liftInsertReturningFirst
:: ( Default QueryRunner returned haskells
, Default Unpackspec returned returned
)
=> Table columnsW columnsR
-> (columnsR -> returned)
-> columnsW
-> Db haskells
liftInsertReturningFirst t f = fmap head . liftInsertReturning t f
liftUpdate
:: Table columnsW columnsR
-> (columnsR -> columnsW)
-> (columnsR -> Column PGBool)
-> Db Int64
liftUpdate t f w = withConnection (\ con -> runUpdate con t f w)
liftDelete
:: Table columnsW columnsR
-> (columnsR -> Column PGBool)
-> Db Int64
liftDelete t w = withConnection (\ con -> runDelete con t w)
withConnection :: (Connection -> IO a) -> Db a
withConnection f = do
c <- view dbEnvConnection
wrapExceptions (f c)
[ Handler (pure . DbSqlError)
, Handler (pure . DbQueryError)
]
| benkolera/talk-stacking-your-monads | code/src/Db/Internal.hs | mit | 3,030 | 0 | 11 | 734 | 872 | 465 | 407 | 87 | 1 |
{-# LANGUAGE OverloadedStrings #-}
module YesodCoreTest.InternalRequest (internalRequestTest) where
import Data.List (nub)
import System.Random (StdGen, mkStdGen)
import Network.Wai as W
import Network.Wai.Test
import Yesod.Internal.TestApi (randomString, parseWaiRequest')
import Yesod.Request (Request (..))
import Test.Hspec
randomStringSpecs :: Spec
randomStringSpecs = describe "Yesod.Internal.Request.randomString" $ do
it "looks reasonably random" looksRandom
it "does not repeat itself" $ noRepeat 10 100
-- NOTE: this testcase may break on other systems/architectures if
-- mkStdGen is not identical everywhere (is it?).
looksRandom :: Bool
looksRandom = randomString 20 (mkStdGen 0) == "VH9SkhtptqPs6GqtofVg"
noRepeat :: Int -> Int -> Bool
noRepeat len n = length (nub $ map (randomString len . mkStdGen) [1..n]) == n
-- For convenience instead of "(undefined :: StdGen)".
g :: StdGen
g = error "test/YesodCoreTest/InternalRequest.g"
tokenSpecs :: Spec
tokenSpecs = describe "Yesod.Internal.Request.parseWaiRequest (reqToken)" $ do
it "is Nothing if sessions are disabled" noDisabledToken
it "ignores pre-existing token if sessions are disabled" ignoreDisabledToken
it "uses preexisting token in session" useOldToken
it "generates a new token for sessions without token" generateToken
noDisabledToken :: Bool
noDisabledToken = reqToken r == Nothing where
r = parseWaiRequest' defaultRequest [] False 0 g
ignoreDisabledToken :: Bool
ignoreDisabledToken = reqToken r == Nothing where
r = parseWaiRequest' defaultRequest [("_TOKEN", "old")] False 0 g
useOldToken :: Bool
useOldToken = reqToken r == Just "old" where
r = parseWaiRequest' defaultRequest [("_TOKEN", "old")] True 0 g
generateToken :: Bool
generateToken = reqToken r /= Nothing where
r = parseWaiRequest' defaultRequest [("_TOKEN", "old")] True 0 g
langSpecs :: Spec
langSpecs = describe "Yesod.Internal.Request.parseWaiRequest (reqLangs)" $ do
it "respects Accept-Language" respectAcceptLangs
it "respects sessions" respectSessionLang
it "respects cookies" respectCookieLang
it "respects queries" respectQueryLang
it "prioritizes correctly" prioritizeLangs
respectAcceptLangs :: Bool
respectAcceptLangs = reqLangs r == ["en-US", "es", "en"] where
r = parseWaiRequest' defaultRequest
{ requestHeaders = [("Accept-Language", "en-US, es")] } [] False 0 g
respectSessionLang :: Bool
respectSessionLang = reqLangs r == ["en"] where
r = parseWaiRequest' defaultRequest [("_LANG", "en")] False 0 g
respectCookieLang :: Bool
respectCookieLang = reqLangs r == ["en"] where
r = parseWaiRequest' defaultRequest
{ requestHeaders = [("Cookie", "_LANG=en")]
} [] False 0 g
respectQueryLang :: Bool
respectQueryLang = reqLangs r == ["en-US", "en"] where
r = parseWaiRequest' defaultRequest { queryString = [("_LANG", Just "en-US")] } [] False 0 g
prioritizeLangs :: Bool
prioritizeLangs = reqLangs r == ["en-QUERY", "en-COOKIE", "en-SESSION", "en", "es"] where
r = parseWaiRequest' defaultRequest
{ requestHeaders = [ ("Accept-Language", "en, es")
, ("Cookie", "_LANG=en-COOKIE")
]
, queryString = [("_LANG", Just "en-QUERY")]
} [("_LANG", "en-SESSION")] False 0 g
internalRequestTest :: Spec
internalRequestTest = describe "Test.InternalRequestTest" $ do
randomStringSpecs
tokenSpecs
langSpecs
| piyush-kurur/yesod | yesod-core/test/YesodCoreTest/InternalRequest.hs | mit | 3,455 | 0 | 12 | 623 | 856 | 462 | 394 | 71 | 1 |
import Suck
import Data.Map
import Test.HUnit
bodyWords = words . extractBody
htmlToPrimModel = toPrimModel . extractBody
htmlToFreqModel = toFreqModel . htmlToPrimModel
htmlToProcessedModel = toProcessedModel . htmlToFreqModel
main = runTestTT $ TestList [
testBody1 ~=? bodyWords testString1
, testBody2 ~=? bodyWords testString2
, testBody3 ~=? bodyWords testString3
, testModel1 ~=? htmlToPrimModel testString1
, testModel2 ~=? htmlToPrimModel testString2
, testFreqModel1 ~=? htmlToFreqModel testString1
, testFreqModel2 ~=? htmlToFreqModel testString2
, testProcessedModel2 ~=? htmlToProcessedModel testString2
, testModel1Double ~=? mergePrimModels [testModel1, testModel1]
, testBody3 ~=? bodyWords testString4]
testString1 = unlines [
"<div id=\"body\">"
, "<p>Hello World Today</p>"
, "</div>"]
testBody1 = ["Hello", "World", "Today"]
testModel1 = fromList [(("Hello", "World"), ["Today"])]
testModel1Double = fromList [(("Hello", "World"), ["Today", "Today"])]
testFreqModel1 = fromList [(("Hello", "World"), [(1, "Today")])]
testString2 = unlines [
"<div id=\"body\">"
, "<p>Hello World Today I</p>"
, "</div>"]
testBody2 = ["Hello", "World", "Today", "I"]
testModel2 = fromList [
(("Hello", "World"), ["Today"])
, (("World", "Today"), ["I"])]
testFreqModel2 = fromList [
(("Hello", "World"), [(1, "Today")])
, (("World", "Today"), [(1, "I")])]
testProcessedModel2 = [("World", [(1, 1)]), ("Today", [])]
testString3 = unlines [
"<div class=\"contrib\">"
, "<a href=\"/\">Author</a>"
, "<div class=\"abstract\">"
, "<strong>Abstract</strong><p> if not the unconscious. </p>"
, "</div> "
, "<div id=\"body\">"
, "<blockquote class=\"disp-quote\">"
, "<p>Human soul, let us see.</p>"
, "</div>"]
testBody3 = words "Human soul, let us see."
testString4 = unlines [
"<div class=\"contrib\">"
, "<a href=\"/\">Author</a>"
, "<div class=\"abstract\">"
, "<strong>Abstract</strong><p> if not the unconscious. </p>"
, "</div> "
, "<div>"
, "<div id=\"body\">"
, "<blockquote class=\"disp-quote\">"
, "<p>Human soul, let us see.</p>"
, "</div>"
, "</div>"]
| pscollins/cmsc-22311-lab-2 | test_suck.hs | gpl-2.0 | 2,549 | 1 | 10 | 743 | 541 | 320 | 221 | 61 | 1 |
{-# LANGUAGE TemplateHaskell, DeriveDataTypeable #-}
{-# LANGUAGE DeriveDataTypeable #-}
module PL.Struktur where
import PL.Signatur
import PL.Util
import Autolib.TES.Identifier
import Autolib.FiniteMap
import Autolib.Size
import Autolib.Set
import Autolib.Reader
import Autolib.ToDoc
import Autolib.Reporter
import Data.Typeable
data Predicate u = Predicate { unPredicate :: Set [u] }
deriving ( Eq, Ord, Typeable )
instance Size ( Predicate u ) where
size ( Predicate s ) = cardinality s
instance ToDoc u => ToDoc ( Predicate u ) where
toDoc ( Predicate ts ) = as_set ts
data Tuple a = Tuple [a]
instance ToDoc a => ToDoc ( Tuple a ) where
toDoc ( Tuple xs ) = parens $ Autolib.ToDoc.sepBy comma $ map toDoc xs
as_set ts = braces $ Autolib.ToDoc.sepBy comma $ do
t <- setToList $ ts
return $ toDoc $ Tuple t
instance ( Ord u, Reader u ) => Reader ( Predicate u ) where
reader = fmap Predicate from_set
data Function u = Function { unFunction :: FiniteMap [u] u }
deriving ( Eq, Ord, Typeable )
instance Size ( Function u ) where
size ( Function f ) = sizeFM f
instance ( Ord u, ToDoc u ) => ToDoc ( Function u ) where
toDoc ( Function fm ) = as_set $ mkSet $ do
( xs, y ) <- fmToList fm
return $ xs ++ [y]
instance ( Ord u, Reader u ) => Reader ( Function u ) where
reader = do
tuples <- from_set
return $ Function $ listToFM $ do
tu <- setToList $ tuples
return ( init tu, last tu )
from_set :: ( Ord u, Reader u ) => Parser ( Set [u] )
from_set = fmap mkSet
$ my_braces $ ( `Autolib.Reader.sepBy` my_comma )
$ tuple
tuple :: Reader u => Parser [u]
tuple = do my_parens $ ( `Autolib.Reader.sepBy` my_comma ) $ reader
<|> do x <- reader ; return [x]
instance Reader u => Reader ( Tuple u ) where
reader = fmap Tuple tuple
data Ord u => Struktur u =
Struktur { universum :: Set u
, predicates :: FiniteMap Identifier ( Predicate u )
, functions :: FiniteMap Identifier ( Function u )
}
deriving ( Eq, Ord, Typeable )
instance Ord u => Size ( Struktur u ) where
size s = cardinality ( universum s )
+ sum ( do ( r, rr ) <- fmToList $ predicates s ; return $ size rr )
+ sum ( do ( f, ff ) <- fmToList $ functions s ; return $ size ff )
instance ( ToDoc u, Ord u ) => Signed ( Struktur u ) where
check sig s = do
let dom = universum s
checkit "Funktionssymbol"
( keysFM . unFunction ) ( funktionen sig ) ( functions s ) dom
checkit "Relationssymbol"
( setToList . unPredicate ) ( relationen sig ) ( predicates s ) dom
checkit tag get_args arities values dom = do
required_symbols_are_present tag get_args arities values
no_additional_symbols tag arities values
check_domain_for tag dom values
check_domain_for tag dom values = sequence_ $ do
( k, v ) <- fmToList values
let msg = vcat
[ text "Interpretation für" <+> text tag <+> toDoc k
, text "verwendet Elemente außerhalb des Universums:"
]
return $ check_domain msg dom v
class Check_Domain con where
check_domain :: ( Ord u, ToDoc u )
=> Doc -> Set u -> con u
-> Reporter ()
instance Check_Domain Function where
check_domain tag dom fun = sequence_ $ do
( ks, v ) <- fmToList $ unFunction fun
let ok = all ( `elementOf` dom ) $ v : ks
return $ when ( not ok ) $ reject $ vcat
[ tag
, nest 4 $ toDoc ( Tuple $ ks ++ [v] )
]
instance Check_Domain Predicate where
check_domain tag dom pred = sequence_ $ do
ks <- setToList $ unPredicate pred
let ok = all ( `elementOf` dom ) ks
return $ when ( not ok ) $ reject $ vcat
[ tag
, nest 4 $ toDoc ( Tuple ks )
]
no_additional_symbols tag arities values = sequence_ $ do
( f, _ ) <- fmToList values
return $ case lookupFM arities f of
Just _ -> return ()
Nothing -> reject $ hsep [ text tag, toDoc f, text "ist nicht in Signatur" ]
required_symbols_are_present tag get_args arities values = sequence_ $ do
( f, arity ) <- fmToList $ arities
return $ do
this <- find_or_complain tag values f
sequence_ $ do
arg <- get_args this
return $ do
when ( length arg /= arity ) $ reject $ vcat
[ text "Interpretation für" <+> text tag <+> toDoc f
, text "Falsche Stelligkeit für" <+> toDoc arg
]
empty :: Ord u
=> Signatur
-> Set u
-> Struktur u
empty sig uni =
Struktur { universum = uni
, predicates = listToFM $ do
( p, a ) <- fmToList $ relationen sig
return ( p, Predicate emptySet )
, functions = listToFM $ do
( f, a ) <- fmToList $ funktionen sig
return ( f, Function emptyFM )
}
$(derives [makeReader, makeToDoc] [''Struktur])
-- local variables:
-- mode: haskell
-- end:
| Erdwolf/autotool-bonn | src/PL/Struktur.hs | gpl-2.0 | 4,954 | 170 | 13 | 1,443 | 1,729 | 923 | 806 | 122 | 2 |
module LexML.Linker.Municipios where
import Data.Char
import qualified Data.Map as M
import qualified Data.Set as S
import Data.Map ( (!) )
import LexML.Linker.ParserBase
import Control.Monad
import Control.Monad.Trans
import LexML.URN.Atalhos
import LexML.Linker.LexerPrim
type ComponenteNome = String
type NomeComposto = [ComponenteNome]
type AbreviacaoEstado = String
lista1 :: [(NomeComposto,AbreviacaoEstado)]
lista1 = [
(["abadia","de","goiás"],"go")
, (["abadia","dos","dourados"],"mg")
, (["abadiânia"],"go")
, (["abaeté"],"mg")
, (["abaetetuba"],"pa")
, (["abaiara"],"ce")
, (["abaíra"],"ba")
, (["abaré"],"ba")
, (["abatiá"],"pr")
, (["abdon","batista"],"sc")
, (["abel","figueiredo"],"pa")
, (["abelardo","luz"],"sc")
, (["abre","campo"],"mg")
, (["abreu","e","lima"],"pe")
, (["abreulândia"],"to")
, (["acaiaca"],"mg")
, (["açailândia"],"ma")
, (["acajutiba"],"ba")
, (["acará"],"pa")
, (["acarapé"],"ce")
, (["acaraú"],"ce")
, (["acari"],"rn")
, (["acauã"],"pi")
, (["aceguá"],"rs")
, (["acopiara"],"ce")
, (["acorizal"],"mt")
, (["acrelândia"],"ac")
, (["acreúna"],"go")
, (["açucena"],"mg")
, (["açu"],"rn")
, (["adamantina"],"sp")
, (["adelândia"],"go")
, (["adolfo"],"sp")
, (["adrianópolis"],"pr")
, (["adustina"],"ba")
, (["afogados","da","ingazeira"],"pe")
, (["afonso","bezerra"],"rn")
, (["afonso","cláudio"],"es")
, (["afonso","cunha"],"ma")
, (["afrânio"],"pe")
, (["afuá"],"pa")
, (["agrestina"],"pe")
, (["agricolândia"],"pi")
, (["agrolândia"],"sc")
, (["agronômica"],"sc")
, (["água","azul","do","norte"],"pa")
, (["água","boa"],"mg")
, (["água","boa"],"mt")
, (["água","branca"],"al")
, (["água","branca"],"pb")
, (["água","branca"],"pi")
, (["água","clara"],"ms")
, (["água","comprida"],"mg")
, (["água","doce","do","norte"],"es")
, (["água","doce"],"sc")
, (["água","doce","do","maranhão"],"ma")
, (["água","fria"],"ba")
, (["água","fria","de","goiás"],"go")
, (["aguaí"],"sp")
, (["água","limpa"],"go")
, (["aguanil"],"mg")
, (["água","nova"],"rn")
, (["água","preta"],"pe")
, (["água","santa"],"rs")
, (["águas","belas"],"pe")
, (["águas","da","prata"],"sp")
, (["águas","de","chapecó"],"sc")
, (["águas","de","lindóia"],"sp")
, (["águas","de","santa","bárbara"],"sp")
, (["águas","de","são","pedro"],"sp")
, (["águas","formosas"],"mg")
, (["águas","frias"],"sc")
, (["águas","lindas"],"go")
, (["águas","mornas"],"sc")
, (["águas","vermelhas"],"mg")
, (["agudo"],"rs")
, (["agudos"],"sp")
, (["agudos","do","sul"],"pr")
, (["águia","branca"],"es")
, (["aguiar"],"pb")
, (["aguiarnópolis"],"to")
, (["aimorés"],"mg")
, (["aiquara"],"ba")
, (["aiuaba"],"ce")
, (["aiuruoca"],"mg")
, (["ajuricaba"],"rs")
, (["alagoa"],"mg")
, (["alagoa","grande"],"pb")
, (["alagoa","nova"],"pb")
, (["alagoinha"],"pb")
, (["alagoinha"],"pe")
, (["alagoinha"],"pi")
, (["alagoinhas"],"ba")
, (["alambari"],"sp")
, (["albertina"],"mg")
, (["alcântara"],"ma")
, (["alcântaras"],"ce")
, (["alcantil"],"pb")
, (["alcinópolis"],"ms")
, (["alcobaça"],"ba")
, (["aldeias","altas"],"ma")
, (["alecrim"],"rs")
, (["alegre"],"es")
, (["alegrete"],"rs")
, (["alegrete","do","piauí"],"pi")
, (["alegria"],"rs")
, (["além","paraíba"],"mg")
, (["alenquer"],"pa")
, (["alexandria"],"rn")
, (["alexânia"],"go")
, (["alfenas"],"mg")
, (["alfredo","chaves"],"es")
, (["alfredo","marcondes"],"sp")
, (["alfredo","vasconcelos"],"mg")
, (["alfredo","wagner"],"sc")
, (["algodão","de","jandaíra"],"pb")
, (["alhandra"],"pb")
, (["aliança"],"pe")
, (["aliança","do","tocantins"],"to")
, (["almadina"],"ba")
, (["almas"],"to")
, (["almenara"],"mg")
, (["almeirim"],"pa")
, (["almino","afonso"],"rn")
, (["almirante","tamandaré"],"pr")
, (["almirante","tamandaré","do","sul"],"rs")
, (["aloândia"],"go")
, (["alpercata"],"mg")
, (["alpestre"],"rs")
, (["alpinópolis"],"mg")
, (["alta","floresta"],"mt")
, (["alta","floresta","d'oeste"],"ro")
, (["altair"],"sp")
, (["altamira"],"pa")
, (["altamira","do","maranhão"],"ma")
, (["altamira","do","paraná"],"pr")
, (["altaneira"],"ce")
, (["alterosa"],"mg")
, (["altinho"],"pe")
, (["altinópolis"],"sp")
, (["alto","alegre","do","maranhão"],"ma")
, (["alto","alegre","do","pindaré"],"ma")
, (["alto","alegre","dos","parecis"],"ro")
, (["alto","alegre"],"rr")
, (["alto","alegre"],"rs")
, (["alto","alegre"],"sp")
, (["alto","araguaia"],"mt")
, (["alto","bela","vista"],"sc")
, (["alto","caparaó"],"mg")
, (["alto","da","boa","vista"],"mt")
, (["alto","do","rodrigues"],"rn")
, (["alto","feliz"],"rs")
, (["alto","garças"],"mt")
, (["alto","horizonte"],"go")
, (["alto","jequitibá"],"mg")
, (["alto","longá"],"pi")
, (["altônia"],"pr")
, (["alto","paraguai"],"mt")
, (["alto","paraíso"],"pr")
, (["alto","paraíso"],"ro")
, (["alto","paraíso","de","goiás"],"go")
, (["alto","paraná"],"pr")
, (["alto","parnaíba"],"ma")
, (["alto","piquiri"],"pr")
, (["alto","rio","doce"],"mg")
, (["alto","rio","novo"],"es")
, (["alto","santo"],"ce")
, (["altos"],"pi")
, (["alto","taquari"],"mt")
, (["alumínio"],"sp")
, (["alvarães"],"am")
, (["alvarenga"],"mg")
, (["álvares","florence"],"sp")
, (["álvares","machado"],"sp")
, (["álvaro","de","carvalho"],"sp")
, (["alvinlândia"],"sp")
, (["alvinópolis"],"mg")
, (["alvorada"],"rs")
, (["alvorada"],"to")
, (["alvorada","de","minas"],"mg")
, (["alvorada","d'oeste"],"ro")
, (["alvorada","do","gurguéia"],"pi")
, (["alvorada","do","norte"],"go")
, (["alvorada","do","sul"],"pr")
, (["amajari"],"rr")
, (["amambaí"],"ms")
, (["amapá"],"ap")
, (["amapá","do","maranhão"],"ma")
, (["amapari"],"ap")
, (["amaporã"],"pr")
, (["amaraji"],"pe")
, (["amaral","ferrador"],"rs")
, (["amaralina"],"go")
, (["amarante"],"pi")
, (["amarante","do","maranhão"],"ma")
, (["amargosa"],"ba")
, (["amaturá"],"am")
, (["amélia","rodrigues"],"ba")
, (["américa","dourada"],"ba")
, (["americana"],"sp")
, (["americano","do","brasil"],"go")
, (["américo","brasiliense"],"sp")
, (["américo","de","campos"],"sp")
, (["ametista","do","sul"],"rs")
, (["amontada"],"ce")
, (["amorinópolis"],"go")
, (["amparo","de","são","francisco"],"se")
, (["amparo","do","serra"],"mg")
, (["amparo"],"pb")
, (["amparo"],"sp")
, (["ampére"],"pr")
, (["anadia"],"al")
, (["anagé"],"ba")
, (["anahy"],"pr")
, (["anajás"],"pa")
, (["anajatuba"],"ma")
, (["analândia"],"sp")
, (["anamã"],"am")
, (["ananás"],"to")
, (["ananindeua"],"pa")
, (["anápolis"],"go")
, (["anapu"],"pa")
, (["anapurus"],"ma")
, (["anastácio"],"ms")
, (["anaurilândia"],"ms")
, (["anchieta"],"es")
, (["anchieta"],"sc")
, (["andaraí"],"ba")
, (["andirá"],"pr")
, (["andorinha"],"ba")
, (["andradas"],"mg")
, (["andradina"],"sp")
, (["andré","da","rocha"],"rs")
, (["andrelândia"],"mg")
, (["angatuba"],"sp")
, (["angelândia"],"mg")
, (["angélica"],"ms")
, (["angelim"],"pe")
, (["angelina"],"sc")
, (["angical"],"ba")
, (["angical","do","piauí"],"pi")
, (["angico"],"to")
, (["angicos"],"rn")
, (["angra","dos","reis"],"rj")
, (["anguera"],"ba")
, (["ângulo"],"pr")
, (["anhangüera"],"go")
, (["anhembi"],"sp")
, (["anhumas"],"sp")
, (["anicuns"],"go")
, (["anísio","de","abreu"],"pi")
, (["anita","garibaldi"],"sc")
, (["anitápolis"],"sc")
, (["anori"],"am")
, (["anta","gorda"],"rs")
, (["antas"],"ba")
, (["antonina"],"pr")
, (["antonina","do","norte"],"ce")
, (["antônio","almeida"],"pi")
, (["antônio","cardoso"],"ba")
, (["antônio","carlos"],"mg")
, (["antônio","carlos"],"sc")
, (["antônio","dias"],"mg")
, (["antônio","gonçalves"],"ba")
, (["antônio","joão"],"ms")
, (["antônio","martins"],"rn")
, (["antônio","olinto"],"pr")
, (["antônio","prado"],"rs")
, (["antônio","prado","de","minas"],"mg")
, (["aparecida"],"pb")
, (["aparecida"],"sp")
, (["aparecida","de","goiânia"],"go")
, (["aparecida","d'oeste"],"sp")
, (["aparecida","do","rio","doce"],"go")
, (["aparecida","do","rio","negro"],"to")
, (["aparecida","do","taboado"],"ms")
, (["aperibé"],"rj")
, (["apiacá"],"es")
, (["apiacás"],"mt")
, (["apiaí"],"sp")
, (["apicum","-","açu"],"ma")
, (["apiúna"],"sc")
, (["apodi"],"rn")
, (["aporá"],"ba")
, (["aporé"],"go")
, (["apuarema"],"ba")
, (["apucarana"],"pr")
, (["apuí"],"am")
, (["apuiarés"],"ce")
, (["aquidabã"],"se")
, (["aquidauana"],"ms")
, (["aquiraz"],"ce")
, (["arabutã"],"sc")
, (["araçagi"],"pb")
, (["araçaí"],"mg")
, (["aracaju"],"se")
, (["araçariguama"],"sp")
, (["araças"],"ba")
, (["aracati"],"ce")
, (["aracatu"],"ba")
, (["araçatuba"],"sp")
, (["araci"],"ba")
, (["aracitaba"],"mg")
, (["aracoiaba"],"ce")
, (["araçoiaba","da","serra"],"sp")
, (["aracoiaba"],"pe")
, (["aracruz"],"es")
, (["araçuaí"],"mg")
, (["araçu"],"go")
, (["aragarças"],"go")
, (["aragoiânia"],"go")
, (["aragominas"],"to")
, (["araguacema"],"to")
, (["araguaçu"],"to")
, (["araguaiana"],"mt")
, (["araguaína"],"to")
, (["araguainha"],"mt")
, (["araguanã"],"ma")
, (["araguanã"],"to")
, (["araguapaz"],"go")
, (["araguari"],"mg")
, (["araguatins"],"to")
, (["araioses"],"ma")
, (["aral","moreira"],"ms")
, (["aramari"],"ba")
, (["arambaré"],"rs")
, (["arame"],"ma")
, (["aramina"],"sp")
, (["arandu"],"sp")
, (["arantina"],"mg")
, (["arapeí"],"sp")
, (["arapiraca"],"al")
, (["arapoema"],"to")
, (["araponga"],"mg")
, (["arapongas"],"pr")
, (["araporã"],"mg")
, (["arapoti"],"pr")
, (["arapuá"],"mg")
, (["arapuã"],"pr")
, (["araputanga"],"mt")
, (["araquari"],"sc")
, (["araranguá"],"sc")
, (["arara"],"pb")
, (["araraquara"],"sp")
, (["araras"],"sp")
, (["ararendá"],"ce")
, (["araricá"],"rs")
, (["arari"],"ma")
, (["araripe"],"ce")
, (["araripina"],"pe")
, (["araruama"],"rj")
, (["araruna"],"pb")
, (["araruna"],"pr")
, (["arataca"],"ba")
, (["aratiba"],"rs")
, (["aratuba"],"ce")
, (["aratuípe"],"ba")
, (["arauá"],"se")
, (["araucária"],"pr")
, (["araújos"],"mg")
, (["araxá"],"mg")
, (["arceburgo"],"mg")
, (["arco","íris"],"sp")
, (["arcos"],"mg")
, (["arcoverde"],"pe")
, (["areado"],"mg")
, (["areal"],"rj")
, (["arealva"],"sp")
, (["areia"],"pb")
, (["areia","branca"],"rn")
, (["areia","branca"],"se")
, (["areia","de","baraúnas"],"pb")
, (["areial"],"pb")
, (["areias"],"sp")
, (["areiópolis"],"sp")
, (["arenápolis"],"mt")
, (["arenópolis"],"go")
, (["arês"],"rn")
, (["argirita"],"mg")
, (["aricanduva"],"mg")
, (["arinos"],"mg")
, (["aripuanã"],"mt")
, (["ariquemes"],"ro")
, (["ariranha"],"sp")
, (["ariranha","do","ivaí"],"pr")
, (["armação","dos","búzios"],"rj")
, (["armazém"],"sc")
, (["arneiroz"],"ce")
, (["aroazes"],"pi")
, (["aroeiras"],"pb")
, (["arraial"],"pi")
, (["arraial","do","cabo"],"rj")
, (["arraias"],"to")
, (["arroio","do","meio"],"rs")
, (["arroio","do","padre"],"rs")
, (["arroio","do","sal"],"rs")
, (["arroio","dos","ratos"],"rs")
, (["arroio","do","tigre"],"rs")
, (["arroio","grande"],"rs")
, (["arroio","trinta"],"sc")
, (["artur","nogueira"],"sp")
, (["aruanã"],"go")
, (["arujá"],"sp")
, (["arvoredo"],"sc")
, (["arvorezinha"],"rs")
, (["ascurra"],"sc")
, (["aspásia"],"sp")
, (["assaí"],"pr")
, (["assaré"],"ce")
, (["assis"],"sp")
, (["assis","brasil"],"ac")
, (["assis","chateaubriand"],"pr")
, (["assunção","do","piauí"],"pi")
, (["assunção"],"pb")
, (["astolfo","dutra"],"mg")
, (["astorga"],"pr")
, (["atalaia"],"al")
, (["atalaia"],"pr")
, (["atalaia","do","norte"],"am")
, (["atalanta"],"sc")
, (["ataléia"],"mg")
, (["atibaia"],"sp")
, (["atílio","vivacqua"],"es")
, (["augustinópolis"],"to")
, (["augusto","corrêa"],"pa")
, (["augusto","de","lima"],"mg")
, (["augusto","pestana"],"rs")
, (["áurea"],"rs")
, (["aurelino","leal"],"ba")
, (["auriflama"],"sp")
, (["aurilândia"],"go")
, (["aurora"],"ce")
, (["aurora"],"sc")
, (["aurora","do","pará"],"pa")
, (["aurora","do","tocantins"],"to")
, (["autazes"],"am")
, (["avaí"],"sp")
, (["avanhandava"],"sp")
, (["avaré"],"sp")
, (["aveiro"],"pa")
, (["avelino","lopes"],"pi")
, (["avelinópolis"],"go")
, (["axixá"],"ma")
, (["axixá","do","tocantins"],"to")
, (["babaçulândia"],"to")
, (["bacabal"],"ma")
, (["bacabeira"],"ma")
, (["bacuri"],"ma")
, (["bacurituba"],"ma")
, (["bady","bassitt"],"sp")
, (["baependi"],"mg")
, (["bagé"],"rs")
, (["bagre"],"pa")
, (["baía","da","traição"],"pb")
, (["baía","formosa"],"rn")
, (["baianópolis"],"ba")
, (["baião"],"pa")
, (["baixa","grande"],"ba")
, (["baixa","grande","do","ribeiro"],"pi")
, (["baixio"],"ce")
, (["baixo","guandu"],"es")
, (["balbinos"],"sp")
, (["baldim"],"mg")
, (["baliza"],"go")
, (["balneário","arroio","do","silva"],"sc")
, (["balneário","barra","do","sul"],"sc")
, (["balneário","camboriú"],"sc")
, (["balneário","gaivota"],"sc")
, (["balneário","pinhal"],"rs")
, (["bálsamo"],"sp")
, (["balsa","nova"],"pr")
, (["balsas"],"ma")
, (["bambuí"],"mg")
, (["banabuiú"],"ce")
, (["bananal"],"sp")
, (["bananeiras"],"pb")
, (["bandeira"],"mg")
, (["bandeira","do","sul"],"mg")
, (["bandeirante"],"sc")
, (["bandeirantes","do","tocantins"],"to")
, (["bandeirantes"],"ms")
, (["bandeirantes"],"pr")
, (["bannach"],"pa")
, (["banzaê"],"ba")
, (["barão"],"rs")
, (["barão","de","antonina"],"sp")
, (["barão","de","cocais"],"mg")
, (["barão","de","cotegipe"],"rs")
, (["barão","de","grajaú"],"ma")
, (["barão","de","melgaço"],"mt")
, (["barão","de","monte","alto"],"mg")
, (["barão","do","triunfo"],"rs")
, (["baraúna"],"pb")
, (["baraúna"],"rn")
, (["barbacena"],"mg")
, (["barbalha"],"ce")
, (["barbosa"],"sp")
, (["barbosa","ferraz"],"pr")
, (["barcarena"],"pa")
, (["barcelona"],"rn")
, (["barcelos"],"am")
, (["bariri"],"sp")
, (["barra"],"ba")
, (["barra","bonita"],"sc")
, (["barra","bonita"],"sp")
, (["barracão"],"pr")
, (["barracão"],"rs")
, (["barra","da","estiva"],"ba")
, (["barra","d'alcântara"],"pi")
, (["barra","de","guabiraba"],"pe")
, (["barra","de","santana"],"pb")
, (["barra","de","santa","rosa"],"pb")
, (["barra","de","santo","antônio"],"al")
, (["barra","de","são","francisco"],"es")
, (["barra","de","são","miguel"],"al")
, (["barra","de","são","miguel"],"pb")
, (["barra","do","bugres"],"mt")
, (["barra","do","chapéu"],"sp")
, (["barra","do","choça"],"ba")
, (["barra","do","corda"],"ma")
, (["barra","do","garças"],"mt")
, (["barra","do","guarita"],"rs")
, (["barra","do","jacaré"],"pr")
, (["barra","do","mendes"],"ba")
, (["barra","do","ouro"],"to")
, (["barra","do","piraí"],"rj")
, (["barra","do","quaraí"],"rs")
, (["barra","do","ribeiro"],"rs")
, (["barra","do","rio","azul"],"rs")
, (["barra","do","rocha"],"ba")
, (["barra","dos","coqueiros"],"se")
, (["barra","do","turvo"],"sp")
, (["barra","funda"],"rs")
, (["barra","longa"],"mg")
, (["barra","mansa"],"rj")
, (["barras"],"pi")
, (["barra","velha"],"sc")
, (["barreira"],"ce")
, (["barreiras"],"ba")
, (["barreiras","do","piauí"],"pi")
, (["barreirinha"],"am")
, (["barreirinhas"],"ma")
, (["barreiros"],"pe")
, (["barretos"],"sp")
, (["barrinha"],"sp")
, (["barro"],"ce")
, (["barro","alto"],"ba")
, (["barro","alto"],"go")
, (["barrocas"],"ba")
, (["barro","duro"],"pi")
, (["barrolândia"],"to")
, (["barroquinha"],"ce")
, (["barros","cassal"],"rs")
, (["barroso"],"mg")
, (["barueri"],"sp")
, (["bastos"],"sp")
, (["bataguaçu"],"ms")
, (["bataiporã"],"ms")
, (["batalha"],"al")
, (["batalha"],"pi")
, (["batatais"],"sp")
, (["baturité"],"ce")
, (["bauru"],"sp")
, (["bayeux"],"pb")
, (["bebedouro"],"sp")
, (["beberibe"],"ce")
, (["bela","cruz"],"ce")
, (["belágua"],"ma")
, (["bela","vista","do","maranhão"],"ma")
, (["bela","vista"],"ms")
, (["bela","vista","da","caroba"],"pr")
, (["bela","vista","de","goiás"],"go")
, (["bela","vista","de","minas"],"mg")
, (["bela","vista","do","paraíso"],"pr")
, (["bela","vista","do","piauí"],"pi")
, (["bela","vista","do","toldo"],"sc")
, (["belém"],"al")
, (["belém"],"pa")
, (["belém"],"pb")
, (["belém","de","maria"],"pe")
, (["belém","de","são","francisco"],"pe")
, (["belém","do","brejo","do","cruz"],"pb")
, (["belém","do","piauí"],"pi")
, (["belford","roxo"],"rj")
, (["belmiro","braga"],"mg")
, (["belmonte"],"ba")
, (["belmonte"],"sc")
, (["belo","campo"],"ba")
, (["belo","horizonte"],"mg")
, (["belo","jardim"],"pe")
, (["belo","monte"],"al")
, (["belo","oriente"],"mg")
, (["belo","vale"],"mg")
, (["belterra"],"pa")
, (["beneditinos"],"pi")
, (["benedito","leite"],"ma")
, (["benedito","novo"],"sc")
, (["benevides"],"pa")
, (["benjamin","constant"],"am")
, (["benjamin","constant","do","sul"],"rs")
, (["bento","de","abreu"],"sp")
, (["bento","fernandes"],"rn")
, (["bento","gonçalves"],"rs")
, (["bequimão"],"ma")
, (["berilo"],"mg")
, (["berizal"],"mg")
, (["bernardino","de","campos"],"sp")
, (["bernardino","batista"],"pb")
, (["bernardo","do","mearim"],"ma")
, (["bernardo","sayão"],"to")
, (["bertioga"],"sp")
, (["bertolínia"],"pi")
, (["bertópolis"],"mg")
, (["beruri"],"am")
, (["betânia"],"pe")
, (["betânia","do","piauí"],"pi")
, (["betim"],"mg")
, (["bezerros"],"pe")
, (["bias","fortes"],"mg")
, (["bicas"],"mg")
, (["biguaçu"],"sc")
, (["bilac"],"sp")
, (["biquinhas"],"mg")
, (["birigüi"],"sp")
, (["biritiba","-","mirim"],"sp")
, (["biritinga"],"ba")
, (["bituruna"],"pr")
, (["blumenau"],"sc")
, (["boa","esperança"],"es")
, (["boa","esperança"],"mg")
, (["boa","esperança"],"pr")
, (["boa","esperança","do","iguaçu"],"pr")
, (["boa","esperança","do","sul"],"sp")
, (["boa","hora"],"pi")
, (["boa","nova"],"ba")
, (["boa","ventura"],"pb")
, (["boa","ventura","de","são","roque"],"pr")
, (["boa","viagem"],"ce")
, (["boa","vista"],"pb")
, (["boa","vista"],"rr")
, (["boa","vista","da","aparecida"],"pr")
, (["boa","vista","das","missões"],"rs")
, (["boa","vista","do","buricá"],"rs")
, (["boa","vista","do","gurupi"],"ma")
, (["boa","vista","do","incra"],"rs")
, (["boa","vista","do","ramos"],"am")
, (["boa","vista","do","sul"],"rs")
, (["boa","vista","do","tupim"],"ba")
, (["boca","da","mata"],"al")
, (["boca","do","acre"],"am")
, (["bocaina","de","minas"],"mg")
, (["bocaina","do","sul"],"sc")
, (["bocaina"],"pi")
, (["bocaina"],"sp")
, (["bocaiúva"],"mg")
, (["bocaiúva","do","sul"],"pr")
, (["bodocó"],"pe")
, (["bodoquena"],"ms")
, (["bodo"],"rn")
, (["bofete"],"sp")
, (["boituva"],"sp")
, (["bombinhas"],"sc")
, (["bom","conselho"],"pe")
, (["bom","despacho"],"mg")
, (["bom","jardim"],"ma")
, (["bom","jardim"],"pe")
, (["bom","jardim"],"rj")
, (["bom","jardim","da","serra"],"sc")
, (["bom","jardim","de","goiás"],"go")
, (["bom","jardim","de","minas"],"mg")
, (["bom","jesus"],"pb")
, (["bom","jesus"],"pi")
, (["bom","jesus"],"rn")
, (["bom","jesus"],"rs")
, (["bom","jesus"],"sc")
, (["bom","jesus","da","lapa"],"ba")
, (["bom","jesus","da","penha"],"mg")
, (["bom","jesus","da","serra"],"ba")
, (["bom","jesus","das","selvas"],"ma")
, (["bom","jesus","de","goiás"],"go")
, (["bom","jesus","do","amparo"],"mg")
, (["bom","jesus","do","araguaia"],"mt")
, (["bom","jesus","do","galho"],"mg")
, (["bom","jesus","do","itabapoana"],"rj")
, (["bom","jesus","do","norte"],"es")
, (["bom","jesus","do","oeste"],"sc")
, (["bom","jesus","dos","perdões"],"sp")
, (["bom","jesus","do","sul"],"pr")
, (["bom","jesus","do","tocantins"],"pa")
, (["bom","jesus","do","tocantins"],"to")
, (["bom","lugar"],"ma")
, (["bom","princípio"],"rs")
, (["bom","princípio","do","piauí"],"pi")
, (["bom","progresso"],"rs")
, (["bom","repouso"],"mg")
, (["bom","retiro"],"sc")
, (["bom","retiro","do","sul"],"rs")
, (["bom","sucesso"],"mg")
, (["bom","sucesso"],"pb")
, (["bom","sucesso"],"pr")
, (["bom","sucesso","de","itararé"],"sp")
, (["bom","sucesso","do","sul"],"pr")
, (["bonfim"],"mg")
, (["bonfim"],"rr")
, (["bonfim","do","piauí"],"pi")
, (["bonfinópolis"],"go")
, (["bonfinópolis","de","minas"],"mg")
, (["boninal"],"ba")
, (["bonito"],"ba")
, (["bonito"],"ms")
, (["bonito"],"pa")
, (["bonito"],"pe")
, (["bonito","de","minas"],"mg")
, (["bonito","de","santa","fé"],"pb")
, (["bonópolis"],"go")
, (["boqueirão"],"pb")
, (["boqueirão","do","leão"],"rs")
, (["boqueirão","do","piauí"],"pi")
, (["boquim"],"se")
, (["boquira"],"ba")
, (["boracéia"],"sp")
, (["borá"],"sp")
, (["borba"],"am")
, (["borborema"],"pb")
, (["borborema","(são","paulo)"],"sp")
, (["borda","da","mata"],"mg")
, (["borebi"],"sp")
, (["borrazópolis"],"pr")
, (["bossoroca"],"rs")
, (["botelhos"],"mg")
, (["botucatu"],"sp")
, (["botumirim"],"mg")
, (["botuporã"],"ba")
, (["botuverá"],"sc")
, (["bozano"],"rs")
, (["braço","do","norte"],"sc")
, (["braço","do","trombudo"],"sc")
, (["braga"],"rs")
, (["bragança"],"pa")
, (["bragança","paulista"],"sp")
, (["braganey"],"pr")
, (["branquinha"],"al")
, (["brasilândia"],"ms")
, (["brasilândia","de","minas"],"mg")
, (["brasilândia","do","sul"],"pr")
, (["brasilândia","do","tocantins"],"to")
, (["brasiléia"],"ac")
, (["brasileira"],"pi")
, (["brasília"],"df")
, (["brasília","de","minas"],"mg")
, (["brasil","novo"],"pa")
, (["brasnorte"],"mt")
, (["brasópolis"],"mg")
, (["brás","pires"],"mg")
, (["braúna"],"sp")
, (["braúnas"],"mg")
, (["brazabrantes"],"go")
, (["brejão"],"pe")
, (["brejetuba"],"es")
, (["brejinho"],"pe")
, (["brejinho"],"rn")
, (["brejinho","de","nazaré"],"to")
, (["brejo"],"ma")
, (["brejo","alegre"],"sp")
, (["brejo","da","madre","de","deus"],"pe")
, (["brejo","de","areia"],"ma")
, (["brejo","do","cruz"],"pb")
, (["brejo","do","piauí"],"pi")
, (["brejo","dos","santos"],"pb")
, (["brejões"],"ba")
, (["brejo","grande","do","araguaia"],"pa")
, (["brejo","grande"],"se")
, (["brejolândia"],"ba")
, (["brejo","santo"],"ce")
, (["breu","branco"],"pa")
, (["breves"],"pa")
, (["britânia"],"go")
, (["brochier"],"rs")
, (["brodósqui"],"sp")
, (["brotas"],"sp")
, (["brotas","de","macaúbas"],"ba")
, (["brumadinho"],"mg")
, (["brumado"],"ba")
, (["brunópolis"],"sc")
, (["brusque"],"sc")
, (["bueno","brandão"],"mg")
, (["buenópolis"],"mg")
, (["buenos","aires"],"pe")
, (["buerarema"],"ba")
, (["bugre"],"mg")
, (["buíque"],"pe")
, (["bujari"],"ac")
, (["bujaru"],"pa")
, (["buri"],"sp")
, (["buritama"],"sp")
, (["buriti"],"ma")
, (["buriti","alegre"],"go")
, (["buriti","bravo"],"ma")
, (["buriticupu"],"ma")
, (["buriti","de","goiás"],"go")
, (["buriti","dos","lopes"],"pi")
, (["buriti","dos","montes"],"pi")
, (["buriti","do","tocantins"],"to")
, (["buritinópolis"],"go")
, (["buritirama"],"ba")
, (["buritirana"],"ma")
, (["buritis"],"mg")
, (["buritis"],"ro")
, (["buritizal"],"sp")
, (["buritizeiro"],"mg")
, (["butiá"],"rs")
, (["caapiranga"],"am")
, (["caaporã"],"pb")
, (["caarapó"],"ms")
, (["caatiba"],"ba")
, (["cabaceiras"],"pb")
, (["cabaceiras","do","paraguaçu"],"ba")
, (["cabeceiras"],"go")
, (["cabeceira","grande"],"mg")
, (["cabeceiras","do","piauí"],"pi")
, (["cabedelo"],"pb")
, (["cabixi"],"ro")
, (["cabo","de","santo","agostinho"],"pe")
, (["cabo","frio"],"rj")
, (["cabo","verde"],"mg")
, (["cabrália","paulista"],"sp")
, (["cabreúva"],"sp")
, (["cabrobó"],"pe")
, (["caçador"],"sc")
, (["caçapava"],"sp")
, (["caçapava","do","sul"],"rs")
, (["cacaulândia"],"ro")
, (["cacequi"],"rs")
, (["cáceres"],"mt")
, (["cachoeira"],"ba")
, (["cachoeira","alta"],"go")
, (["cachoeira","da","prata"],"mg")
, (["cachoeira","de","goiás"],"go")
, (["cachoeira","de","minas"],"mg")
, (["cachoeira","de","pajeú"],"mg")
, (["cachoeira","do","arari"],"pa")
, (["cachoeira","do","piriá"],"pa")
, (["cachoeira","dos","índios"],"pb")
, (["cachoeira","do","sul"],"rs")
, (["cachoeira","dourada"],"go")
, (["cachoeira","dourada"],"mg")
, (["cachoeira","grande"],"ma")
, (["cachoeira","paulista"],"sp")
, (["cachoeiras","de","macacu"],"rj")
, (["cachoeirinha"],"pe")
, (["cachoeirinha"],"rs")
, (["cachoeirinha","(tocantins)"],"to")
, (["cachoeiro","de","itapemirim"],"es")
, (["cacimba","de","areia"],"pb")
, (["cacimba","de","dentro"],"pb")
, (["cacimbas"],"pb")
, (["cacimbinhas"],"al")
, (["cacique","doble"],"rs")
, (["cacoal"],"ro")
, (["caconde"],"sp")
, (["caçu"],"go")
, (["caculé"],"ba")
, (["caém"],"ba")
, (["caetanópolis"],"mg")
, (["caetanos"],"ba")
, (["caeté"],"mg")
, (["caetés"],"pe")
, (["caetité"],"ba")
, (["cafarnaum"],"ba")
, (["cafeara"],"pr")
, (["cafelândia"],"pr")
, (["cafelândia"],"sp")
, (["cafezal","do","sul"],"pr")
, (["caiabu"],"sp")
, (["caiana"],"mg")
, (["caiapônia"],"go")
, (["caibaté"],"rs")
, (["caibi"],"sc")
, (["caiçara"],"pb")
, (["caiçara"],"rs")
, (["caiçara","do","norte"],"rn")
, (["caiçara","do","rio","do","vento"],"rn")
, (["caicó"],"rn")
, (["caieiras"],"sp")
, (["cairu"],"ba")
, (["caiuá"],"sp")
, (["cajamar"],"sp")
, (["cajapió"],"ma")
, (["cajari"],"ma")
, (["cajati"],"sp")
, (["cajazeiras"],"pb")
, (["cajazeiras","do","piauí"],"pi")
, (["cajazeirinhas"],"pb")
, (["cajobi"],"sp")
, (["cajueiro"],"al")
, (["cajueiro","da","praia"],"pi")
, (["cajuri"],"mg")
, (["cajuru"],"sp")
, (["calçado"],"pe")
, (["calçoene"],"ap")
, (["caldas"],"mg")
, (["caldas","brandão"],"pb")
, (["caldas","novas"],"go")
, (["caldazinha"],"go")
, (["caldeirão","grande"],"ba")
, (["caldeirão","grande","do","piauí"],"pi")
, (["califórnia"],"pr")
, (["calmon"],"sc")
, (["calumbi"],"pe")
, (["camacan"],"ba")
, (["camaçari"],"ba")
, (["camacho"],"mg")
, (["camalaú"],"pb")
, (["camamu"],"ba")
, (["camanducaia"],"mg")
, (["camapuã"],"ms")
, (["camaquã"],"rs")
, (["camaragibe"],"pe")
, (["camargo"],"rs")
, (["cambará"],"pr")
, (["cambará","do","sul"],"rs")
, (["cambé"],"pr")
, (["cambira"],"pr")
, (["camboriú"],"sc")
, (["cambuci"],"rj")
, (["cambuí"],"mg")
, (["cambuquira"],"mg")
, (["cametá"],"pa")
, (["camocim"],"ce")
, (["camocim","de","são","félix"],"pe")
, (["campanário"],"mg")
, (["campanha"],"mg")
, (["campestre"],"al")
, (["campestre"],"mg")
, (["campestre","da","serra"],"rs")
, (["campestre","de","goiás"],"go")
, (["campestre","do","maranhão"],"ma")
, (["campinaçu"],"go")
, (["campina","da","lagoa"],"pr")
, (["campina","das","missões"],"rs")
, (["campina","do","monte","alegre"],"sp")
, (["campina","do","simão"],"pr")
, (["campina","grande","do","sul"],"pr")
, (["campina","grande"],"pb")
, (["campinápolis"],"mt")
, (["campinas"],"sp")
, (["campinas","do","piauí"],"pi")
, (["campinas","do","sul"],"rs")
, (["campina","verde"],"mg")
, (["campinorte"],"go")
, (["campo","alegre"],"al")
, (["campo","alegre","de","goiás"],"go")
, (["campo","alegre","de","lourdes"],"ba")
, (["campo","alegre","do","fidalgo"],"pi")
, (["campo","alegre"],"sc")
, (["campo","azul"],"mg")
, (["campo","belo","do","sul"],"sc")
, (["campo","belo"],"mg")
, (["campo","bom"],"rs")
, (["campo","bonito"],"pr")
, (["campo","do","brito"],"se")
, (["campo","do","meio"],"mg")
, (["campo","do","tenente"],"pr")
, (["campo","erê"],"sc")
, (["campo","florido"],"mg")
, (["campo","formoso"],"ba")
, (["campo","grande"],"al")
, (["campo","grande","do","piauí"],"pi")
, (["campo","grande"],"ms")
, (["campo","grande"],"rn")
, (["campo","largo","do","piauí"],"pi")
, (["campo","largo"],"pr")
, (["campo","limpo","de","goiás"],"go")
, (["campo","limpo","paulista"],"sp")
, (["campo","magro"],"pr")
, (["campo","maior"],"pi")
, (["campo","mourão"],"pr")
, (["campo","novo","de","rondônia"],"ro")
, (["campo","novo","do","parecis"],"mt")
, (["campo","novo"],"rs")
, (["campo","redondo"],"rn")
, (["campos","altos"],"mg")
, (["campos","belos"],"go")
, (["campos","borges"],"rs")
, (["campos","de","júlio"],"mt")
, (["campos","do","jordão"],"sp")
, (["campos","dos","goytacazes"],"rj")
, (["campos","gerais"],"mg")
, (["campos","lindos"],"to")
, (["campos","novos","paulista"],"sp")
, (["campos","novos"],"sc")
, (["campos","sales"],"ce")
, (["campos","verdes"],"go")
, (["campo","verde"],"mt")
, (["camutanga"],"pe")
, (["canaã"],"mg")
, (["canaã","dos","carajás"],"pa")
, (["canabrava","do","norte"],"mt")
, (["cananéia"],"sp")
, (["canapi"],"al")
, (["canápolis"],"ba")
, (["canápolis"],"mg")
, (["canarana"],"ba")
, (["canarana"],"mt")
, (["canas"],"sp")
, (["cana","verde"],"mg")
, (["canavieira"],"pi")
, (["canavieiras"],"ba")
, (["candeal"],"ba")
, (["candeias"],"ba")
, (["candeias"],"mg")
, (["candeias","do","jamari"],"ro")
, (["candelária"],"rs")
, (["candiba"],"ba")
, (["cândido","de","abreu"],"pr")
, (["cândido","godói"],"rs")
, (["cândido","mendes"],"ma")
, (["cândido","mota"],"sp")
, (["cândido","rodrigues"],"sp")
, (["cândido","sales"],"ba")
, (["candiota"],"rs")
, (["candói"],"pr")
, (["canela"],"rs")
, (["canelinha"],"sc")
, (["canguaretama"],"rn")
, (["canguçu"],"rs")
, (["canhoba"],"se")
, (["canhotinho"],"pe")
, (["canindé"],"ce")
, (["canindé","de","são","francisco"],"se")
, (["canitar"],"sp")
, (["canoas"],"rs")
, (["canoinhas"],"sc")
, (["cansanção"],"ba")
, (["cantagalo"],"mg")
, (["cantagalo"],"pr")
, (["cantagalo"],"rj")
, (["cantanhede"],"ma")
, (["cantá"],"rr")
, (["canto","do","buriti"],"pi")
, (["canudos"],"ba")
, (["canudos","do","vale"],"rs")
, (["canutama"],"am")
, (["capanema"],"pa")
, (["capanema"],"pr")
, (["capão","alto"],"sc")
, (["capão","bonito"],"sp")
, (["capão","bonito","do","sul"],"rs")
, (["capão","da","canoa"],"rs")
, (["capão","do","cipó"],"rs")
, (["capão","do","leão"],"rs")
, (["caparaó"],"mg")
, (["capela"],"al")
, (["capela"],"se")
, (["capela","de","santana"],"rs")
, (["capela","do","alto","alegre"],"ba")
, (["capela","do","alto"],"sp")
, (["capela","nova"],"mg")
, (["capelinha"],"mg")
, (["capetinga"],"mg")
, (["capim","branco"],"mg")
, (["capim","grosso"],"ba")
, (["capim"],"pb")
, (["capinópolis"],"mg")
, (["capinzal"],"sc")
, (["capinzal","do","norte"],"ma")
, (["capistrano"],"ce")
, (["capitão"],"rs")
, (["capitão","andrade"],"mg")
, (["capitão","de","campos"],"pi")
, (["capitão","enéas"],"mg")
, (["capitão","gervásio","oliveira"],"pi")
, (["capitão","leônidas","marques"],"pr")
, (["capitão","poço"],"pa")
, (["capitólio"],"mg")
, (["capivari"],"sp")
, (["capivari","de","baixo"],"sc")
, (["capivari","do","sul"],"rs")
, (["capixaba"],"ac")
, (["capoeiras"],"pe")
, (["caputira"],"mg")
, (["caraá"],"rs")
, (["caracaraí"],"rr")
, (["caracol"],"ms")
, (["caracol"],"pi")
, (["caraguatatuba"],"sp")
, (["caraíbas"],"ba")
, (["caraí"],"mg")
, (["carambeí"],"pr")
, (["caranaíba"],"mg")
, (["carandaí"],"mg")
, (["carangola"],"mg")
, (["carapebus"],"rj")
, (["carapicuíba"],"sp")
, (["caratinga"],"mg")
, (["carauari"],"am")
, (["caraúbas"],"pb")
, (["caraúbas","do","piauí"],"pi")
, (["caravelas"],"ba")
, (["carazinho"],"rs")
, (["carbonita"],"mg")
, (["cardeal","da","silva"],"ba")
, (["cardoso"],"sp")
, (["cardoso","moreira"],"rj")
, (["careaçu"],"mg")
, (["careiro"],"am")
, (["careiro","da","várzea"],"am")
, (["cariacica"],"es")
, (["caridade"],"ce")
, (["caridade","do","piauí"],"pi")
, (["carinhanha"],"ba")
, (["carira"],"se")
, (["cariré"],"ce")
, (["caririaçu"],"ce")
, (["cariri","do","tocantins"],"to")
, (["cariús"],"ce")
, (["carlinda"],"mt")
, (["carlópolis"],"pr")
, (["carlos","barbosa"],"rs")
, (["carlos","chagas"],"mg")
, (["carlos","gomes"],"rs")
, (["carmésia"],"mg")
, (["carmo"],"rj")
, (["carmo","da","cachoeira"],"mg")
, (["carmo","da","mata"],"mg")
, (["carmo","de","minas"],"mg")
, (["carmo","do","cajuru"],"mg")
, (["carmo","do","paranaíba"],"mg")
, (["carmo","do","rio","claro"],"mg")
, (["carmo","do","rio","verde"],"go")
, (["carmolândia"],"to")
, (["carmópolis"],"se")
, (["carmópolis","de","minas"],"mg")
, (["carnaíba"],"pe")
, (["carnaúba","dos","dantas"],"rn")
, (["carnaubais"],"rn")
, (["carnaubal"],"ce")
, (["carnaubeira","da","penha"],"pe")
, (["carneirinho"],"mg")
, (["carneiros"],"al")
, (["caroebe"],"rr")
, (["carolina"],"ma")
, (["carpina"],"pe")
, (["carrancas"],"mg")
, (["carrapateira"],"pb")
, (["carrasco","bonito"],"to")
, (["caruaru"],"pe")
, (["carutapera"],"ma")
, (["carvalhópolis"],"mg")
, (["carvalhos"],"mg")
, (["casa","branca"],"sp")
, (["casa","grande"],"mg")
, (["casa","nova"],"ba")
, (["casca"],"rs")
, (["cascalho","rico"],"mg")
, (["cascavel"],"ce")
, (["cascavel"],"pr")
, (["caseara"],"to")
, (["caseiros"],"rs")
, (["casimiro","de","abreu"],"rj")
, (["casinhas"],"pe")
, (["casserengue"],"pb")
, (["cássia"],"mg")
, (["cássia","dos","coqueiros"],"sp")
, (["cassilândia"],"ms")
, (["castanhal"],"pa")
, (["castanheira"],"mt")
, (["castanheiras"],"ro")
, (["castelândia"],"go")
, (["castelo"],"es")
, (["castelo","do","piauí"],"pi")
, (["castilho"],"sp")
, (["castro"],"pr")
, (["castro","alves"],"ba")
, (["cataguases"],"mg")
, (["catalão"],"go")
, (["catanduva"],"sp")
, (["catanduvas"],"pr")
, (["catanduvas"],"sc")
, (["catarina"],"ce")
, (["catas","altas"],"mg")
, (["catas","altas","da","noruega"],"mg")
, (["catende"],"pe")
, (["catiguá"],"sp")
, (["catingueira"],"pb")
, (["catolândia"],"ba")
, (["catolé","do","rocha"],"pb")
, (["catu"],"ba")
, (["catuípe"],"rs")
, (["catuji"],"mg")
, (["caturaí"],"go")
, (["caturama"],"ba")
, (["caturité"],"pb")
, (["catuti"],"mg")
, (["caucaia"],"ce")
, (["cavalcante"],"go")
, (["caxambu"],"mg")
, (["caxambu","do","sul"],"sc")
, (["caxias"],"ma")
, (["caxias","do","sul"],"rs")
, (["caxingó"],"pi")
, (["ceará","-","mirim"],"rn")
, (["cedral"],"ma")
, (["cedral"],"sp")
, (["cedro"],"ce")
, (["cedro"],"pe")
, (["cedro","de","são","joão"],"se")
, (["cedro","do","abaeté"],"mg")
, (["celso","ramos"],"sc")
, (["centenário"],"rs")
, (["centenário"],"to")
, (["centenário","do","sul"],"pr")
, (["central"],"ba")
, (["central","de","minas"],"mg")
, (["central","do","maranhão"],"ma")
, (["centralina"],"mg")
, (["centro","do","guilherme"],"ma")
, (["centro","novo","do","maranhão"],"ma")
, (["cerejeiras"],"ro")
, (["ceres"],"go")
, (["cerqueira","césar"],"sp")
, (["cerquilho"],"sp")
, (["cerrito"],"rs")
, (["cerro","azul"],"pr")
, (["cerro","branco"],"rs")
, (["cerro","corá"],"rn")
, (["cerro","grande","do","sul"],"rs")
, (["cerro","grande"],"rs")
, (["cerro","largo"],"rs")
, (["cerro","negro"],"sc")
, (["cesário","lange"],"sp")
, (["céu","azul"],"pr")
, (["cezarina"],"go")
, (["chácara"],"mg")
, (["chã","de","alegria"],"pe")
, (["chã","grande"],"pe")
, (["chalé"],"mg")
, (["chapada"],"rs")
, (["chapada","de","areia"],"to")
, (["chapada","da","natividade"],"to")
, (["chapada","do","norte"],"mg")
, (["chapada","dos","guimarães"],"mt")
, (["chapada","gaúcha"],"mg")
, (["chapadão","do","céu"],"go")
, (["chapadão","do","lageado"],"sc")
, (["chapadão","do","sul"],"ms")
, (["chapadinha"],"ma")
, (["chapecó"],"sc")
, (["chã","preta"],"al")
, (["charqueada"],"sp")
, (["charqueadas"],"rs")
, (["charrua"],"rs")
, (["chaval"],"ce")
, (["chavantes"],"sp")
, (["chaves"],"pa")
, (["chiador"],"mg")
, (["chiapeta"],"rs")
, (["chopinzinho"],"pr")
, (["choró"],"ce")
, (["chorozinho"],"ce")
, (["chorrochó"],"ba")
, (["chuí"],"rs")
, (["chupinguaia"],"ro")
, (["chuvisca"],"rs")
, (["cianorte"],"pr")
, (["cícero","dantas"],"ba")
, (["cidade","gaúcha"],"pr")
, (["cidade","ocidental"],"go")
, (["cidelândia"],"ma")
, (["cidreira"],"rs")
, (["cipó"],"ba")
, (["cipotânea"],"mg")
, (["ciríaco"],"rs")
, (["claraval"],"mg")
, (["claro","dos","poções"],"mg")
, (["cláudia"],"mt")
, (["cláudio"],"mg")
, (["clementina"],"sp")
, (["clevelândia"],"pr")
, (["coaraci"],"ba")
, (["coari"],"am")
, (["cocal"],"pi")
, (["cocal","de","telha"],"pi")
, (["cocal","dos","alves"],"pi")
, (["cocal","do","sul"],"sc")
, (["cocalinho"],"mt")
, (["cocalzinho","de","goiás"],"go")
, (["cocos"],"ba")
, (["codajás"],"am")
, (["codó"],"ma")
, (["coelho","neto"],"ma")
, (["coimbra"],"mg")
, (["coité","do","nóia"],"al")
, (["coivaras"],"pi")
, (["colares"],"pa")
, (["colatina"],"es")
, (["colíder"],"mt")
, (["colina"],"sp")
, (["colinas"],"ma")
, (["colinas"],"rs")
, (["colinas","do","sul"],"go")
, (["colinas","do","tocantins"],"to")
, (["colméia"],"to")
, (["colniza"],"mt")
, (["colômbia"],"sp")
, (["colombo"],"pr")
, (["colônia","do","gurguéia"],"pi")
, (["colônia","do","piauí"],"pi")
, (["colônia","leopoldina"],"al")
, (["colorado"],"pr")
, (["colorado"],"rs")
, (["colorado","do","oeste"],"ro")
, (["coluna"],"mg")
, (["combinado"],"to")
, (["comendador","gomes"],"mg")
, (["comendador","levy","gasparian"],"rj")
, (["comercinho"],"mg")
, (["comodoro"],"mt")
, (["conceição"],"pb")
, (["conceição","da","aparecida"],"mg")
, (["conceição","da","barra","de","minas"],"mg")
, (["conceição","da","barra"],"es")
, (["conceição","da","feira"],"ba")
, (["conceição","das","alagoas"],"mg")
, (["conceição","das","pedras"],"mg")
, (["conceição","de","ipanema"],"mg")
, (["conceição","de","macabu"],"rj")
, (["conceição","do","almeida"],"ba")
, (["conceição","do","araguaia"],"pa")
, (["conceição","do","canindé"],"pi")
, (["conceição","do","castelo"],"es")
, (["conceição","do","coité"],"ba")
, (["conceição","do","jacuípe"],"ba")
, (["conceição","do","lago","-","açu"],"ma")
, (["conceição","do","mato","dentro"],"mg")
, (["conceição","do","pará"],"mg")
, (["conceição","do","rio","verde"],"mg")
, (["conceição","dos","ouros"],"mg")
, (["conceição","do","tocantins"],"to")
, (["conchal"],"sp")
, (["conchas"],"sp")
, (["concórdia"],"sc")
, (["concórdia","do","pará"],"pa")
, (["condado"],"pb")
, (["condado"],"pe")
, (["conde"],"ba")
, (["conde"],"pb")
, (["condeúba"],"ba")
, (["condor"],"rs")
, (["cônego","marinho"],"mg")
, (["confins"],"mg")
, (["confresa"],"mt")
, (["congonhal"],"mg")
, (["congonhas"],"mg")
, (["congonhas","do","norte"],"mg")
, (["congonhinhas"],"pr")
, (["congo"],"pb")
, (["conquista"],"mg")
, (["conquista","d'oeste"],"mt")
, (["conselheiro","lafaiete"],"mg")
, (["conselheiro","mairinck"],"pr")
, (["conselheiro","pena"],"mg")
, (["consolação"],"mg")
, (["constantina"],"rs")
, (["contagem"],"mg")
, (["contenda"],"pr")
, (["contendas","do","sincorá"],"ba")
, (["coqueiral"],"mg")
, (["coqueiro","baixo"],"rs")
, (["coqueiros","do","sul"],"rs")
, (["coqueiro","seco"],"al")
, (["coração","de","jesus"],"mg")
, (["coração","de","maria"],"ba")
, (["corbélia"],"pr")
, (["cordeirópolis"],"sp")
, (["cordeiro"],"rj")
, (["cordeiros"],"ba")
, (["cordilheira","alta"],"sc")
, (["cordisburgo"],"mg")
, (["cordislândia"],"mg")
, (["coreaú"],"ce")
, (["coremas"],"pb")
, (["corguinho"],"ms")
, (["coribe"],"ba")
, (["corinto"],"mg")
, (["cornélio","procópio"],"pr")
, (["coroaci"],"mg")
, (["coroados"],"sp")
, (["coroatá"],"ma")
, (["coromandel"],"mg")
, (["coronel","barros"],"rs")
, (["coronel","bicaco"],"rs")
, (["coronel","domingos","soares"],"pr")
, (["coronel","ezequiel"],"rn")
, (["coronel","fabriciano"],"mg")
, (["coronel","freitas"],"sc")
, (["coronel","joão","pessoa"],"rn")
, (["coronel","joão","sá"],"ba")
, (["coronel","josé","dias"],"pi")
, (["coronel","macedo"],"sp")
, (["coronel","martins"],"sc")
, (["coronel","murta"],"mg")
, (["coronel","pacheco"],"mg")
, (["coronel","pilar"],"rs")
, (["coronel","sapucaia"],"ms")
, (["coronel","vivida"],"pr")
, (["coronel","xavier","chaves"],"mg")
, (["córrego","danta"],"mg")
, (["córrego","do","bom","jesus"],"mg")
, (["córrego","do","ouro"],"go")
, (["córrego","fundo"],"mg")
, (["córrego","novo"],"mg")
, (["correia","pinto"],"sc")
, (["corrente"],"pi")
, (["correntes"],"pe")
, (["correntina"],"ba")
, (["cortês"],"pe")
, (["corumbá"],"ms")
, (["corumbá","de","goiás"],"go")
, (["corumbaíba"],"go")
, (["corumbataí"],"sp")
, (["corumbataí","do","sul"],"pr")
, (["corumbiara"],"ro")
, (["corupá"],"sc")
, (["coruripe"],"al")
, (["cosmópolis"],"sp")
, (["cosmorama"],"sp")
, (["costa","marques"],"ro")
, (["costa","rica"],"ms")
, (["cotegipe"],"ba")
, (["cotia"],"sp")
, (["cotiporã"],"rs")
, (["cotriguaçu"],"mt")
, (["couto","de","magalhães"],"to")
, (["couto","de","magalhães","de","minas"],"mg")
, (["coxilha"],"rs")
, (["coxim"],"ms")
, (["coxixola"],"pb")
, (["craíbas"],"al")
, (["crateús"],"ce")
, (["crato"],"ce")
, (["cravinhos"],"sp")
, (["cravolândia"],"ba")
, (["criciúma"],"sc")
, (["crisólita"],"mg")
, (["crisópolis"],"ba")
, (["crissiumal"],"rs")
, (["cristal"],"rs")
, (["cristais"],"mg")
, (["cristais","paulista"],"sp")
, (["cristalândia"],"to")
, (["cristalândia","do","piauí"],"pi")
, (["cristal","do","sul"],"rs")
, (["cristália"],"mg")
, (["cristalina"],"go")
, (["cristiano","otoni"],"mg")
, (["cristianópolis"],"go")
, (["cristina"],"mg")
, (["cristinápolis"],"se")
, (["cristino","castro"],"pi")
, (["cristópolis"],"ba")
, (["crixás","do","tocantins"],"to")
, (["crixás"],"go")
, (["croatá"],"ce")
, (["cromínia"],"go")
, (["crucilândia"],"mg")
, (["cruz"],"ce")
, (["cruzália"],"sp")
, (["cruz","alta"],"rs")
, (["cruzaltense"],"rs")
, (["cruz","das","almas"],"ba")
, (["cruz","do","espírito","santo"],"pb")
, (["cruzeiro"],"sp")
, (["cruzeiro","da","fortaleza"],"mg")
, (["cruzeiro","do","iguaçu"],"pr")
, (["cruzeiro","do","oeste"],"pr")
, (["cruzeiro","do","sul"],"ac")
, (["cruzeiro","do","sul","(paraná)"],"pr")
, (["cruzeiro","do","sul"],"rs")
, (["cruzeta"],"rn")
, (["cruzília"],"mg")
, (["cruz","machado"],"pr")
, (["cruzmaltina"],"pr")
, (["cubatão"],"sp")
, (["cubati"],"pb")
, (["cuiabá"],"mt")
, (["cuité"],"pb")
, (["cuité","de","mamanguape"],"pb")
, (["cuitegi"],"pb")
, (["cujubim"],"ro")
, (["cumari"],"go")
, (["cumaru"],"pe")
, (["cumaru","do","norte"],"pa")
, (["cumbe"],"se")
, (["cunha"],"sp")
, (["cunha","porã"],"sc")
, (["cunhataí"],"sc")
, (["cuparaque"],"mg")
, (["cupira"],"pe")
, (["curaçá"],"ba")
, (["curimatá"],"pi")
, (["curionópolis"],"pa")
, (["curitibanos"],"sc")
, (["curitiba"],"pr")
, (["curiúva"],"pr")
, (["currais"],"pi")
, (["currais","novos"],"rn")
, (["curral","de","cima"],"pb")
, (["curral","de","dentro"],"mg")
, (["curralinho"],"pa")
, (["curralinhos"],"pi")
, (["curral","novo","do","piauí"],"pi")
, (["curral","velho"],"pb")
, (["curuá"],"pa")
, (["curuçá"],"pa")
, (["cururupu"],"ma")
, (["curvelândia"],"mt")
, (["curvelo"],"mg")
, (["custódia"],"pe")
, (["cutias"],"ap")
, (["damianópolis"],"go")
, (["damião"],"pb")
, (["damolândia"],"go")
, (["darcinópolis"],"to")
, (["dário","meira"],"ba")
, (["datas"],"mg")
, (["david","canabarro"],"rs")
, (["davinópolis"],"go")
, (["davinópolis"],"ma")
, (["delfim","moreira"],"mg")
, (["delfinópolis"],"mg")
, (["delmiro","gouveia"],"al")
, (["delta"],"mg")
, (["demerval","lobão"],"pi")
, (["denise"],"mt")
, (["deodápolis"],"ms")
, (["deputado","irapuan","pinheiro"],"ce")
, (["derrubadas"],"rs")
, (["descalvado"],"sp")
, (["descanso"],"sc")
, (["descoberto"],"mg")
, (["desterro"],"pb")
, (["desterro","de","entre","rios"],"mg")
, (["desterro","do","melo"],"mg")
, (["dezesseis","de","novembro"],"rs")
, (["diadema"],"sp")
, (["diamante"],"pb")
, (["diamante","do","norte"],"pr")
, (["diamante","d'oeste"],"pr")
, (["diamante","do","sul"],"pr")
, (["diamantina"],"mg")
, (["diamantino"],"mt")
, (["dianópolis"],"to")
, (["dias","d'ávila"],"ba")
, (["dilermando","de","aguiar"],"rs")
, (["diogo","de","vasconcelos"],"mg")
, (["dionísio"],"mg")
, (["dionísio","cerqueira"],"sc")
, (["diorama"],"go")
, (["dirce","reis"],"sp")
, (["dirceu","arcoverde"],"pi")
, (["divina","pastora"],"se")
, (["divinésia"],"mg")
, (["divino"],"mg")
, (["divino","das","laranjeiras"],"mg")
, (["divino","de","são","lourenço"],"es")
, (["divinolândia","de","minas"],"mg")
, (["divinolândia"],"sp")
, (["divinópolis"],"mg")
, (["divinópolis"],"to")
, (["divinópolis","de","goiás"],"go")
, (["divisa","alegre"],"mg")
, (["divisa","nova"],"mg")
, (["divisópolis"],"mg")
, (["dobrada"],"sp")
, (["dois","córregos"],"sp")
, (["dois","irmãos"],"rs")
, (["dois","irmãos","das","missões"],"rs")
, (["dois","irmãos","do","buriti"],"ms")
, (["dois","irmãos","do","tocantins"],"to")
, (["dois","lajeados"],"rs")
, (["dois","riachos"],"al")
, (["dois","vizinhos"],"pr")
, (["dolcinópolis"],"sp")
, (["dom","aquino"],"mt")
, (["dom","basílio"],"ba")
, (["dom","bosco"],"mg")
, (["dom","cavati"],"mg")
, (["dom","eliseu"],"pa")
, (["dom","expedito","lopes"],"pi")
, (["dom","feliciano"],"rs")
, (["domingos","martins"],"es")
, (["domingos","mourão"],"pi")
, (["dom","inocêncio"],"pi")
, (["dom","joaquim"],"mg")
, (["dom","macedo","costa"],"ba")
, (["dom","pedrito"],"rs")
, (["dom","pedro","de","alcântara"],"rs")
, (["dom","pedro"],"ma")
, (["dom","silvério"],"mg")
, (["dom","viçoso"],"mg")
, (["dona","emma"],"sc")
, (["dona","eusébia"],"mg")
, (["dona","francisca"],"rs")
, (["dona","inês"],"pb")
, (["dores","de","campos"],"mg")
, (["dores","de","guanhães"],"mg")
, (["dores","do","indaiá"],"mg")
, (["dores","do","rio","preto"],"es")
, (["dores","do","turvo"],"mg")
, (["doresópolis"],"mg")
, (["dormentes"],"pe")
, (["douradina"],"ms")
, (["douradina"],"pr")
, (["douradoquara"],"mg")
, (["dourado"],"sp")
, (["dourados"],"ms")
, (["doutor","camargo"],"pr")
, (["doutor","maurício","cardoso"],"rs")
, (["doutor","pedrinho"],"sc")
, (["doutor","ricardo"],"rs")
, (["doutor","severiano"],"rn")
, (["doutor","ulysses"],"pr")
, (["doverlândia"],"go")
, (["dracena"],"sp")
, (["duartina"],"sp")
, (["duas","barras"],"rj")
, (["duas","estradas"],"pb")
, (["dueré"],"to")
, (["dumont"],"sp")
, (["duque","bacelar"],"ma")
, (["duque","de","caxias"],"rj")
, (["durandé"],"mg")
, (["echaporã"],"sp")
, (["ecoporanga"],"es")
, (["edealina"],"go")
, (["edéia"],"go")
, (["eirunepé"],"am")
, (["eldorado"],"ms")
, (["eldorado"],"sp")
, (["eldorado","dos","carajás"],"pa")
, (["eldorado","do","sul"],"rs")
, (["elesbão","veloso"],"pi")
, (["elias","fausto"],"sp")
, (["eliseu","martins"],"pi")
, (["elisiário"],"sp")
, (["elísio","medrado"],"ba")
, (["elói","mendes"],"mg")
, (["emas"],"pb")
, (["embaúba"],"sp")
, (["embu"],"sp")
, (["embu","-","guaçu"],"sp")
, (["emilianópolis"],"sp")
, (["encantado"],"rs")
, (["encanto"],"rn")
, (["encruzilhada"],"ba")
, (["encruzilhada","do","sul"],"rs")
, (["enéas","marques"],"pr")
, (["engenheiro","beltrão"],"pr")
, (["engenheiro","caldas"],"mg")
, (["engenheiro","coelho"],"sp")
, (["engenheiro","navarro"],"mg")
, (["engenheiro","paulo","de","frontin"],"rj")
, (["engenho","velho"],"rs")
, (["entre","folhas"],"mg")
, (["entre","ijuís"],"rs")
, (["entre","rios"],"ba")
, (["entre","rios"],"sc")
, (["entre","rios","de","minas"],"mg")
, (["entre","rios","do","oeste"],"pr")
, (["entre","rios","do","sul"],"rs")
, (["envira"],"am")
, (["epitaciolândia"],"ac")
, (["equador"],"rn")
, (["erebango"],"rs")
, (["ererê"],"ce")
, (["erexim"],"rs")
, (["érico","cardoso"],"ba")
, (["ermo"],"sc")
, (["ernestina"],"rs")
, (["erval","grande"],"rs")
, (["ervália"],"mg")
, (["erval","seco"],"rs")
, (["erval","velho"],"sc")
, (["escada"],"pe")
, (["esmeralda"],"rs")
, (["esmeraldas"],"mg")
, (["espera","feliz"],"mg")
, (["esperança"],"pb")
, (["esperança","do","sul"],"rs")
, (["esperança","nova"],"pr")
, (["esperantina"],"pi")
, (["esperantina"],"to")
, (["esperantinópolis"],"ma")
, (["espigão","alto","do","iguaçu"],"pr")
, (["espigão","d'oeste"],"ro")
, (["espinosa"],"mg")
, (["espírito","santo"],"rn")
, (["espírito","santo","do","dourado"],"mg")
, (["espírito","santo","do","pinhal"],"sp")
, (["espírito","santo","do","turvo"],"sp")
, (["esplanada"],"ba")
, (["espumoso"],"rs")
, (["estação"],"rs")
, (["estância"],"se")
, (["estância","velha"],"rs")
, (["esteio"],"rs")
, (["estiva"],"mg")
, (["estiva","gerbi"],"sp")
, (["estreito"],"ma")
, (["estrela"],"rs")
, (["estrela","dalva"],"mg")
, (["estrela","de","alagoas"],"al")
, (["estrela","d'oeste"],"sp")
, (["estrela","do","indaiá"],"mg")
, (["estrela","do","norte"],"go")
, (["estrela","do","norte"],"sp")
, (["estrela","do","sul"],"mg")
, (["estrela","velha"],"rs")
, (["euclides","da","cunha"],"ba")
, (["euclides","da","cunha","paulista"],"sp")
, (["eugênio","de","castro"],"rs")
, (["eugenópolis"],"mg")
, (["eunápolis"],"ba")
, (["eusébio"],"ce")
, (["ewbank","da","câmara"],"mg")
, (["extrema"],"mg")
, (["extremoz"],"rn")
, (["exu"],"pe")
, (["fagundes"],"pb")
, (["fagundes","varela"],"rs")
, (["faina"],"go")
, (["fama"],"mg")
, (["faria","lemos"],"mg")
, (["farias","brito"],"ce")
, (["farol"],"pr")
, (["faro"],"pa")
, (["farroupilha"],"rs")
, (["fartura"],"sp")
, (["fartura","do","piauí"],"pi")
, (["fátima"],"ba")
, (["fátima"],"to")
, (["fátima","do","sul"],"ms")
, (["faxinal"],"pr")
, (["faxinal","dos","guedes"],"sc")
, (["faxinal","do","soturno"],"rs")
, (["faxinalzinho"],"rs")
, (["fazenda","nova"],"go")
, (["fazenda","rio","grande"],"pr")
, (["fazenda","vilanova"],"rs")
, (["feijó"],"ac")
, (["feira","da","mata"],"ba")
, (["feira","de","santana"],"ba")
, (["feira","grande"],"al")
, (["feira","nova","do","maranhão"],"ma")
, (["feira","nova"],"pe")
, (["feira","nova"],"se")
, (["felício","dos","santos"],"mg")
, (["felipe","guerra"],"rn")
, (["felisburgo"],"mg")
, (["felixlândia"],"mg")
, (["feliz","deserto"],"al")
, (["feliz","natal"],"mt")
, (["feliz"],"rs")
, (["fênix"],"pr")
, (["fernandes","pinheiro"],"pr")
, (["fernandes","tourinho"],"mg")
, (["fernando","de","noronha"],"pe")
, (["fernando","falcão"],"ma")
, (["fernando","pedroza"],"rn")
, (["fernandópolis"],"sp")
, (["fernando","prestes"],"sp")
, (["fernão"],"sp")
, (["ferraz","de","vasconcelos"],"sp")
, (["ferreira","gomes"],"ap")
, (["ferreiros"],"pe")
, (["ferros"],"mg")
, (["fervedouro"],"mg")
, (["figueira"],"pr")
, (["figueirão"],"ms")
, (["figueirópolis"],"to")
, (["figueirópolis","d'oeste"],"mt")
, (["filadélfia"],"ba")
, (["filadélfia"],"to")
, (["firmino","alves"],"ba")
, (["firminópolis"],"go")
, (["flexeiras"],"al")
, (["floraí"],"pr")
, (["florânia"],"rn")
, (["flora","rica"],"sp")
, (["flor","da","serra","do","sul"],"pr")
, (["flor","do","sertão"],"sc")
, (["floreal"],"sp")
, (["flores"],"pe")
, (["flores","da","cunha"],"rs")
, (["flores","de","goiás"],"go")
, (["flores","do","piauí"],"pi")
, (["floresta","azul"],"ba")
, (["floresta","do","araguaia"],"pa")
, (["floresta","do","piauí"],"pi")
, (["floresta"],"pe")
, (["floresta","(paraná)"],"pr")
, (["florestal"],"mg")
, (["florestópolis"],"pr")
, (["floriano"],"pi")
, (["floriano","peixoto"],"rs")
, (["florianópolis"],"sc")
, (["flórida"],"pr")
, (["flórida","paulista"],"sp")
, (["florínia"],"sp")
, (["fonte","nova"],"am")
, (["fontoura","xavier"],"rs")
, (["formiga"],"mg")
, (["formigueiro"],"rs")
, (["formosa"],"go")
, (["formosa","da","serra","negra"],"ma")
, (["formosa","do","oeste"],"pr")
, (["formosa","do","rio","preto"],"ba")
, (["formosa","do","sul"],"sc")
, (["formoso"],"go")
, (["formoso"],"mg")
, (["formoso","do","araguaia"],"to")
, (["forquetinha"],"rs")
, (["forquilha"],"ce")
, (["forquilhinha"],"sc")
, (["fortaleza"],"ce")
, (["fortaleza","de","minas"],"mg")
, (["fortaleza","dos","nogueiras"],"ma")
, (["fortaleza","dos","valos"],"rs")
, (["fortaleza","do","tabocão"],"to")
, (["fortim"],"ce")
, (["fortuna","de","minas"],"mg")
, (["fortuna"],"ma")
, (["foz","do","iguaçu"],"pr")
, (["foz","do","jordão"],"pr")
, (["fraiburgo"],"sc")
, (["franca"],"sp")
, (["francinópolis"],"pi")
, (["francisco","alves"],"pr")
, (["francisco","ayres"],"pi")
, (["francisco","badaró"],"mg")
, (["francisco","beltrão"],"pr")
, (["francisco","dantas"],"rn")
, (["francisco","dumont"],"mg")
, (["francisco","macedo"],"pi")
, (["francisco","morato"],"sp")
, (["franciscópolis"],"mg")
, (["francisco","sá"],"mg")
, (["francisco","santos"],"pi")
, (["franco","da","rocha"],"sp")
, (["frecheirinha"],"ce")
, (["frederico","westphalen"],"rs")
, (["frei","gaspar"],"mg")
, (["frei","inocêncio"],"mg")
, (["frei","lagonegro"],"mg")
, (["frei","martinho"],"pb")
, (["frei","miguelinho"],"pe")
, (["frei","paulo"],"se")
, (["frei","rogério"],"sc")
, (["fronteira"],"mg")
, (["fronteira","dos","vales"],"mg")
, (["fronteiras"],"pi")
, (["fruta","de","leite"],"mg")
, (["frutal"],"mg")
, (["frutuoso","gomes"],"rn")
, (["fundão"],"es")
, (["funilândia"],"mg")
, (["gabriel","monteiro"],"sp")
, (["gado","bravo"],"pb")
, (["gália"],"sp")
, (["galiléia"],"mg")
, (["galinhos"],"rn")
, (["galvão"],"sc")
, (["gameleira"],"pe")
, (["gameleira","de","goiás"],"go")
, (["gameleiras"],"mg")
, (["gandu"],"ba")
, (["garanhuns"],"pe")
, (["gararu"],"se")
, (["garça"],"sp")
, (["garibaldi"],"rs")
, (["garopaba"],"sc")
, (["garrafão","do","norte"],"pa")
, (["garruchos"],"rs")
, (["garuva"],"sc")
, (["gaspar"],"sc")
, (["gastão","vidigal"],"sp")
, (["gaúcha","do","norte"],"mt")
, (["gaurama"],"rs")
, (["gavião"],"ba")
, (["gavião","peixoto"],"sp")
, (["geminiano"],"pi")
, (["general","câmara"],"rs")
, (["general","carneiro"],"mt")
, (["general","carneiro","(paraná)"],"pr")
, (["general","maynard"],"se")
, (["general","salgado"],"sp")
, (["general","sampaio"],"ce")
, (["gentil"],"rs")
, (["gentio","do","ouro"],"ba")
, (["getulina"],"sp")
, (["getúlio","vargas"],"rs")
, (["gilbués"],"pi")
, (["girau","do","ponciano"],"al")
, (["giruá"],"rs")
, (["glaucilândia"],"mg")
, (["glicério"],"sp")
, (["glória"],"ba")
, (["glória","de","dourados"],"ms")
, (["glória","d'oeste"],"mt")
, (["glória","do","goitá"],"pe")
, (["glorinha"],"rs")
, (["godofredo","viana"],"ma")
, (["godoy","moreira"],"pr")
, (["goiabeira"],"mg")
, (["goiana"],"pe")
, (["goianá"],"mg")
, (["goianápolis"],"go")
, (["goiandira"],"go")
, (["goianésia"],"go")
, (["goianésia","do","pará"],"pa")
, (["goiânia"],"go")
, (["goianinha"],"rn")
, (["goianira"],"go")
, (["goianorte"],"to")
, (["goiás"],"go")
, (["goiás"],"ma")
, (["goiatins"],"to")
, (["goiatuba"],"go")
, (["goioerê"],"pr")
, (["goioxim"],"pr")
, (["gonçalves"],"mg")
, (["gonçalves","dias"],"ma")
, (["gongogi"],"ba")
, (["gonzaga"],"mg")
, (["gouvêia"],"mg")
, (["gouvelândia"],"go")
, (["governador","archer"],"ma")
, (["governador","celso","ramos"],"sc")
, (["governador","dix","-","sept","rosado"],"rn")
, (["governador","edison","lobão"],"ma")
, (["governador","eugênio","barros"],"ma")
, (["governador","jorge","teixeira"],"ro")
, (["governador","lindenberg"],"es")
, (["governador","lomanto","júnior"],"ba")
, (["governador","luiz","rocha"],"ma")
, (["governador","mangabeira"],"ba")
, (["governador","newton","bello"],"ma")
, (["governador","nunes","freire"],"ma")
, (["governador","valadares"],"mg")
, (["graça"],"ce")
, (["graça","aranha"],"ma")
, (["gracho","cardoso"],"se")
, (["grajaú"],"ma")
, (["gramado","dos","loureiros"],"rs")
, (["gramado"],"rs")
, (["gramado","xavier"],"rs")
, (["grandes","rios"],"pr")
, (["granito"],"pe")
, (["granja"],"ce")
, (["granjeiro"],"ce")
, (["grão","mogol"],"mg")
, (["grão","pará"],"sc")
, (["gravatá"],"pe")
, (["gravataí"],"rs")
, (["gravatal"],"sc")
, (["groaíras"],"ce")
, (["grossos"],"rn")
, (["grupiara"],"mg")
, (["guabiju"],"rs")
, (["guabiruba"],"sc")
, (["guaçuí"],"es")
, (["guadalupe"],"pi")
, (["guaíba"],"rs")
, (["guaiçara"],"sp")
, (["guaimbê"],"sp")
, (["guairaçá"],"pr")
, (["guaíra"],"pr")
, (["guaíra"],"sp")
, (["guaiúba"],"ce")
, (["guajará"],"am")
, (["guajará","-","mirim"],"ro")
, (["guajeru"],"ba")
, (["guamaré"],"rn")
, (["guamiranga"],"pr")
, (["guanambi"],"ba")
, (["guanhães"],"mg")
, (["guapé"],"mg")
, (["guapiaçu"],"sp")
, (["guapiara"],"sp")
, (["guapimirim"],"rj")
, (["guapirama"],"pr")
, (["guapó"],"go")
, (["guaporema"],"pr")
, (["guaporé"],"rs")
, (["guarabira"],"pb")
, (["guaraçaí"],"sp")
, (["guaraciaba"],"mg")
, (["guaraciaba"],"sc")
, (["guaraciaba","do","norte"],"ce")
, (["guaraciama"],"mg")
, (["guaraci"],"pr")
, (["guaraci"],"sp")
, (["guaraíta"],"go")
, (["guaraí"],"to")
, (["guaramiranga"],"ce")
, (["guaramirim"],"sc")
, (["guaranésia"],"mg")
, (["guaraniaçu"],"pr")
, (["guarani"],"mg")
, (["guarani","das","missões"],"rs")
, (["guarani","de","goiás"],"go")
, (["guarani","d'oeste"],"sp")
, (["guarantã"],"sp")
, (["guarantã","do","norte"],"mt")
, (["guarapari"],"es")
, (["guarapuava"],"pr")
, (["guaraqueçaba"],"pr")
, (["guarará"],"mg")
, (["guararapes"],"sp")
, (["guararema"],"sp")
, (["guará"],"sp")
, (["guaratinga"],"ba")
, (["guaratinguetá"],"sp")
, (["guaratuba"],"pr")
, (["guarda","-","mor"],"mg")
, (["guareí"],"sp")
, (["guariba"],"sp")
, (["guaribas"],"pi")
, (["guarinos"],"go")
, (["guarujá"],"sp")
, (["guarujá","do","sul"],"sc")
, (["guarulhos"],"sp")
, (["guatambu"],"sc")
, (["guatapará"],"sp")
, (["guaxupé"],"mg")
, (["guia","lopes","da","laguna"],"ms")
, (["guidoval"],"mg")
, (["guimarães"],"ma")
, (["guimarânia"],"mg")
, (["guiratinga"],"mt")
, (["guiricema"],"mg")
, (["gurinhatã"],"mg")
, (["gurinhém"],"pb")
, (["gurjão"],"pb")
, (["gurupá"],"pa")
, (["gurupi"],"to")
, (["guzolândia"],"sp")
, (["harmonia"],"rs")
, (["heitoraí"],"go")
, (["heliodora"],"mg")
, (["heliópolis"],"ba")
, (["herculândia"],"sp")
, (["herval","d'oeste"],"sc")
, (["herveiras"],"rs")
, (["hidrolândia"],"ce")
, (["hidrolândia"],"go")
, (["hidrolina"],"go")
, (["holambra"],"sp")
, (["honório","serpa"],"pr")
, (["horizonte"],"ce")
, (["horizontina"],"rs")
, (["hortolândia"],"sp")
, (["hugo","napoleão"],"pi")
, (["hulha","negra"],"rs")
, (["humaitá"],"am")
, (["humaitá"],"rs")
, (["humberto","de","campos"],"ma")
, (["iacanga"],"sp")
, (["iaciara"],"go")
, (["iacri"],"sp")
, (["iaçu"],"ba")
, (["iapu"],"mg")
, (["iaras"],"sp")
, (["iati"],"pe")
, (["ibaiti"],"pr")
, (["ibarama"],"rs")
, (["ibaretama"],"ce")
, (["ibateguara"],"al")
, (["ibaté"],"sp")
, (["ibatiba"],"es")
, (["ibema"],"pr")
, (["ibertioga"],"mg")
, (["ibiaçá"],"rs")
, (["ibiá"],"mg")
, (["ibiaí"],"mg")
, (["ibiam"],"sc")
, (["ibiapina"],"ce")
, (["ibiara"],"pb")
, (["ibiassucê"],"ba")
, (["ibicaraí"],"ba")
, (["ibicaré"],"sc")
, (["ibicoara"],"ba")
, (["ibicuí"],"ba")
, (["ibicuitinga"],"ce")
, (["ibimirim"],"pe")
, (["ibipeba"],"ba")
, (["ibipitanga"],"ba")
, (["ibiporã"],"pr")
, (["ibiquera"],"ba")
, (["ibiracatu"],"mg")
, (["ibiraci"],"mg")
, (["ibiraçu"],"es")
, (["ibiraiaras"],"rs")
, (["ibirajuba"],"pe")
, (["ibirama"],"sc")
, (["ibirapitanga"],"ba")
, (["ibirapuã"],"ba")
, (["ibirapuitã"],"rs")
, (["ibirarema"],"sp")
, (["ibirá"],"sp")
, (["ibirataia"],"ba")
, (["ibirité"],"mg")
, (["ibirubá"],"rs")
, (["ibitiara"],"ba")
, (["ibitinga"],"sp")
, (["ibitirama"],"es")
, (["ibititá"],"ba")
, (["ibitiúra","de","minas"],"mg")
, (["ibituruna"],"mg")
, (["ibiúna"],"sp")
, (["ibotirama"],"ba")
, (["icapuí"],"ce")
, (["icaraí","de","minas"],"mg")
, (["icaraíma"],"pr")
, (["içara"],"sc")
, (["icatu"],"ma")
, (["icém"],"sp")
, (["ichu"],"ba")
, (["icó"],"ce")
, (["iconha"],"es")
, (["ielmo","marinho"],"rn")
, (["iepê"],"sp")
, (["igaci"],"al")
, (["igaporá"],"ba")
, (["igaraçu","do","tietê"],"sp")
, (["igaraci"],"pb")
, (["igarapava"],"sp")
, (["igarapé","-","açu"],"pa")
, (["igarapé","do","meio"],"ma")
, (["igarapé","grande"],"ma")
, (["igarapé"],"mg")
, (["igarapé","-","mirim"],"pa")
, (["igarassu"],"pe")
, (["igaratá"],"sp")
, (["igaratinga"],"mg")
, (["igrapiúna"],"ba")
, (["igreja","nova"],"al")
, (["igrejinha"],"rs")
, (["iguaba","grande"],"rj")
, (["iguaí"],"ba")
, (["iguape"],"sp")
, (["iguaraci"],"pe")
, (["iguaraçu"],"pr")
, (["iguatama"],"mg")
, (["iguatemi"],"ms")
, (["iguatu"],"ce")
, (["iguatu"],"pr")
, (["ijaci"],"mg")
, (["ijuí"],"rs")
, (["ilhabela"],"sp")
, (["ilha","comprida"],"sp")
, (["ilha","das","flores"],"se")
, (["ilha","grande"],"pi")
, (["ilha","solteira"],"sp")
, (["ilhéus"],"ba")
, (["ilhota"],"sc")
, (["ilicínea"],"mg")
, (["ilópolis"],"rs")
, (["imaculada"],"pb")
, (["imaruí"],"sc")
, (["imbaú"],"pr")
, (["imbé","de","minas"],"mg")
, (["imbé"],"rs")
, (["imbituba"],"sc")
, (["imbituva"],"pr")
, (["imbuia"],"sc")
, (["imigrante"],"rs")
, (["imperatriz"],"ma")
, (["inaciolândia"],"go")
, (["inácio","martins"],"pr")
, (["inajá"],"pe")
, (["inajá"],"pr")
, (["inconfidentes"],"mg")
, (["indaiabira"],"mg")
, (["indaial"],"sc")
, (["indaiatuba"],"sp")
, (["independência","(ceará)"],"ce")
, (["independência"],"rs")
, (["indiana"],"sp")
, (["indianópolis"],"mg")
, (["indianópolis"],"pr")
, (["indiaporã"],"sp")
, (["indiara"],"go")
, (["indiaroba"],"se")
, (["indiavaí"],"mt")
, (["ingá"],"pb")
, (["ingaí"],"mg")
, (["ingazeira"],"pe")
, (["inhacorá"],"rs")
, (["inhambupe"],"ba")
, (["inhangapi"],"pa")
, (["inhapi"],"al")
, (["inhapim"],"mg")
, (["inhaúma"],"mg")
, (["inhuma"],"pi")
, (["inhumas"],"go")
, (["inimutaba"],"mg")
, (["inocência"],"ms")
, (["inúbia","paulista"],"sp")
, (["iomerê"],"sc")
, (["ipaba"],"mg")
, (["ipameri"],"go")
, (["ipanema"],"mg")
, (["ipanguaçu"],"rn")
, (["ipaporanga"],"ce")
, (["ipatinga"],"mg")
, (["ipaussu"],"sp")
, (["ipaumirim"],"ce")
, (["ipecaetá"],"ba")
, (["iperó"],"sp")
, (["ipê"],"rs")
, (["ipeúna"],"sp")
, (["ipiaçu"],"mg")
, (["ipiaú"],"ba")
, (["ipiguá"],"sp")
, (["ipirá"],"ba")
, (["ipiranga"],"pr")
, (["ipiranga","de","goiás"],"go")
, (["ipiranga","do","norte"],"mt")
, (["ipiranga","do","piauí"],"pi")
, (["ipiranga","do","sul"],"rs")
, (["ipira"],"sc")
, (["ipixuna"],"am")
, (["ipixuna","do","pará"],"pa")
, (["ipojuca"],"pe")
, (["iporá"],"go")
, (["iporã"],"pr")
, (["iporã","do","oeste"],"sc")
, (["iporanga"],"sp")
, (["ipu"],"ce")
, (["ipuã"],"sp")
, (["ipuaçu"],"sc")
, (["ipubi"],"pe")
, (["ipueira"],"rn")
, (["ipueiras"],"ce")
, (["ipueiras","(tocantins)"],"to")
, (["ipuiúna"],"mg")
, (["ipumirim"],"sc")
, (["ipupiara"],"ba")
, (["iracema"],"ce")
, (["iracema"],"rr")
, (["iracema","do","oeste"],"pr")
, (["iracemápolis"],"sp")
, (["iraceminha"],"sc")
, (["iraí","de","minas"],"mg")
, (["iraí"],"rs")
, (["irajuba"],"ba")
, (["iramaia"],"ba")
, (["iranduba"],"am")
, (["irani"],"sc")
, (["irapuã"],"sp")
, (["irapuru"],"sp")
, (["iraquara"],"ba")
, (["irará"],"ba")
, (["irati"],"pr")
, (["irati"],"sc")
, (["irauçuba"],"ce")
, (["irecê"],"ba")
, (["iretama"],"pr")
, (["irineópolis"],"sc")
, (["irituia"],"pa")
, (["irupi"],"es")
, (["isaías","coelho"],"pi")
, (["israelândia"],"go")
, (["itá"],"sc")
, (["itaara"],"rs")
, (["itabaiana"],"pb")
, (["itabaiana"],"se")
, (["itabaianinha"],"se")
, (["itabela"],"ba")
, (["itaberaba"],"ba")
, (["itaberá"],"sp")
, (["itaberaí"],"go")
, (["itabi"],"se")
, (["itabira"],"mg")
, (["itabirinha","de","mantena"],"mg")
, (["itabirito"],"mg")
, (["itaboraí"],"rj")
, (["itabuna"],"ba")
, (["itacajá"],"to")
, (["itacambira"],"mg")
, (["itacarambi"],"mg")
, (["itacaré"],"ba")
, (["itacoatiara"],"am")
, (["itacuruba"],"pe")
, (["itacurubi"],"rs")
, (["itaeté"],"ba")
, (["itagi"],"ba")
, (["itagibá"],"ba")
, (["itagimirim"],"ba")
, (["itaguaçu","da","bahia"],"ba")
, (["itaguaçu"],"es")
, (["itaguajé"],"pr")
, (["itaguaí"],"rj")
, (["itaguara"],"mg")
, (["itaguari"],"go")
, (["itaguaru"],"go")
, (["itaguatins"],"to")
, (["itaíba"],"pe")
, (["itaiçaba"],"ce")
, (["itainópolis"],"pi")
, (["itaiópolis"],"sc")
, (["itaipava","do","grajaú"],"ma")
, (["itaipé"],"mg")
, (["itaipulândia"],"pr")
, (["itaí"],"sp")
, (["itaitinga"],"ce")
, (["itaituba"],"pa")
, (["itajá"],"go")
, (["itajá"],"rn")
, (["itajaí"],"sc")
, (["itajobi"],"sp")
, (["itajubá"],"mg")
, (["itaju"],"sp")
, (["itaju","do","colônia"],"ba")
, (["itajuípe"],"ba")
, (["italva"],"rj")
, (["itamaracá"],"pe")
, (["itamaraju"],"ba")
, (["itamarandiba"],"mg")
, (["itamarati"],"am")
, (["itamarati","de","minas"],"mg")
, (["itamari"],"ba")
, (["itambacuri"],"mg")
, (["itambaracá"],"pr")
, (["itambé"],"ba")
, (["itambé"],"pe")
, (["itambé"],"pr")
, (["itambé","do","mato","dentro"],"mg")
, (["itamogi"],"mg")
, (["itamonte"],"mg")
, (["itanagra"],"ba")
, (["itanhaém"],"sp")
, (["itanhandu"],"mg")
, (["itanhangá"],"mt")
, (["itanhém"],"ba")
, (["itanhomi"],"mg")
, (["itaobim"],"mg")
, (["itaocara"],"rj")
, (["itaóca"],"sp")
, (["itapaci"],"go")
, (["itapagé"],"ce")
, (["itapagipe"],"mg")
, (["itaparica"],"ba")
, (["itapé"],"ba")
, (["itapebi"],"ba")
, (["itapecerica"],"mg")
, (["itapecerica","da","serra"],"sp")
, (["itapecuru","mirim"],"ma")
, (["itapejara","d'oeste"],"pr")
, (["itapema"],"sc")
, (["itapemirim"],"es")
, (["itaperuçu"],"pr")
, (["itaperuna"],"rj")
, (["itapetim"],"pe")
, (["itapetinga"],"ba")
, (["itapetininga"],"sp")
, (["itapeva"],"mg")
, (["itapeva"],"sp")
, (["itapevi"],"sp")
, (["itapicuru"],"ba")
, (["itapipoca"],"ce")
, (["itapiranga"],"am")
, (["itapiranga"],"sc")
, (["itapirapuã"],"go")
, (["itapirapuã","paulista"],"sp")
, (["itapira"],"sp")
, (["itapiratins"],"to")
, (["itapissuma"],"pe")
, (["itapitanga"],"ba")
, (["itapiúna"],"ce")
, (["itapoá"],"sc")
, (["itápolis"],"sp")
, (["itaporã"],"ms")
, (["itaporã","do","tocantins"],"to")
, (["itaporanga"],"pb")
, (["itaporanga"],"sp")
, (["itaporanga","d'ajuda"],"se")
, (["itapororoca"],"pb")
, (["itapuã","do","oeste"],"ro")
, (["itapuca"],"rs")
, (["itapuí"],"sp")
, (["itapuranga"],"go")
, (["itapura"],"sp")
, (["itaquaquecetuba"],"sp")
, (["itaquara"],"ba")
, (["itaquiraí"],"ms")
, (["itaqui"],"rs")
, (["itaquitinga"],"pe")
, (["itarana"],"es")
, (["itarantim"],"ba")
, (["itararé"],"sp")
, (["itarema"],"ce")
, (["itariri"],"sp")
, (["itarumã"],"go")
, (["itati"],"rs")
, (["itatiaia"],"rj")
, (["itatiaiuçu"],"mg")
, (["itatiba"],"sp")
, (["itatiba","do","sul"],"rs")
, (["itatim"],"ba")
, (["itatinga"],"sp")
, (["itatira"],"ce")
, (["itatuba"],"pb")
, (["itaú"],"rn")
, (["itaubal"],"ap")
, (["itaúba"],"mt")
, (["itauçu"],"go")
, (["itaú","de","minas"],"mg")
, (["itaueira"],"pi")
, (["itaúna"],"mg")
, (["itaúna","do","sul"],"pr")
, (["itaverava"],"mg")
, (["itinga"],"mg")
, (["itinga","do","maranhão"],"ma")
, (["itiquira"],"mt")
, (["itirapina"],"sp")
, (["itirapuã"],"sp")
, (["itiruçu"],"ba")
, (["itiúba"],"ba")
, (["itobi"],"sp")
, (["itororó"],"ba")
, (["itu"],"sp")
, (["ituaçu"],"ba")
, (["ituberá"],"ba")
, (["itueta"],"mg")
, (["ituiutaba"],"mg")
, (["itumbiara"],"go")
, (["itumirim"],"mg")
, (["itupeva"],"sp")
, (["itupiranga"],"pa")
, (["ituporanga"],"sc")
, (["iturama"],"mg")
, (["itutinga"],"mg")
, (["ituverava"],"sp")
, (["iuiú"],"ba")
, (["iúna"],"es")
, (["ivaí"],"pr")
, (["ivaiporã"],"pr")
, (["ivaté"],"pr")
, (["ivatuba"],"pr")
, (["ivinhema"],"ms")
, (["ivolândia"],"go")
, (["ivorá"],"rs")
, (["ivoti"],"rs")
, (["jaboatão","dos","guararapes"],"pe")
, (["jaborá"],"sc")
, (["jaborandi"],"ba")
, (["jaborandi"],"sp")
, (["jaboticaba"],"rs")
, (["jaboticabal"],"sp")
, (["jaboticatubas"],"mg")
, (["jaboti"],"pr")
, (["jaçanã"],"rn")
, (["jacaraci"],"ba")
, (["jacaraú"],"pb")
, (["jacareacanga"],"pa")
, (["jacaré","dos","homens"],"al")
, (["jacareí"],"sp")
, (["jacarezinho"],"pr")
, (["jaci"],"sp")
, (["jaciara"],"mt")
, (["jacinto"],"mg")
, (["jacinto","machado"],"sc")
, (["jacobina"],"ba")
, (["jacobina","do","piauí"],"pi")
, (["jacuí"],"mg")
, (["jacuípe"],"al")
, (["jacuizinho"],"rs")
, (["jacundá"],"pa")
, (["jacupiranga"],"sp")
, (["jacutinga"],"mg")
, (["jacutinga"],"rs")
, (["jaguapitã"],"pr")
, (["jaguaquara"],"ba")
, (["jaguaraçu"],"mg")
, (["jaguarão"],"rs")
, (["jaguarari"],"ba")
, (["jaguaré"],"es")
, (["jaguaretama"],"ce")
, (["jaguariaíva"],"pr")
, (["jaguaribara"],"ce")
, (["jaguaribe"],"ce")
, (["jaguaripe"],"ba")
, (["jaguari"],"rs")
, (["jaguariúna"],"sp")
, (["jaguaruana"],"ce")
, (["jaguaruna"],"sc")
, (["jaíba"],"mg")
, (["jaicós"],"pi")
, (["jales"],"sp")
, (["jambeiro"],"sp")
, (["jampruca"],"mg")
, (["janaúba"],"mg")
, (["jandaia"],"go")
, (["jandaia","do","sul"],"pr")
, (["jandaíra"],"ba")
, (["jandaíra"],"rn")
, (["jandira"],"sp")
, (["janduís"],"rn")
, (["jangada"],"mt")
, (["janiópolis"],"pr")
, (["januária"],"mg")
, (["januário","cicco"],"rn")
, (["japaraíba"],"mg")
, (["japaratinga"],"al")
, (["japaratuba"],"se")
, (["japeri"],"rj")
, (["japi"],"rn")
, (["japira"],"pr")
, (["japoatã"],"se")
, (["japonvar"],"mg")
, (["japurá"],"am")
, (["japurá"],"pr")
, (["japorã"],"ms")
, (["jaqueira"],"pe")
, (["jaquirana"],"rs")
, (["jaraguá"],"go")
, (["jaraguá","do","sul"],"sc")
, (["jaraguari"],"ms")
, (["jaramataia"],"al")
, (["jardim"],"ce")
, (["jardim"],"ms")
, (["jardim","alegre"],"pr")
, (["jardim","de","angicos"],"rn")
, (["jardim","de","piranhas"],"rn")
, (["jardim","do","mulato"],"pi")
, (["jardim","do","seridó"],"rn")
, (["jardim","olinda"],"pr")
, (["jardinópolis"],"sc")
, (["jardinópolis"],"sp")
, (["jari"],"rs")
, (["jarinu"],"sp")
, (["jaru"],"ro")
, (["jataí"],"go")
, (["jataizinho"],"pr")
, (["jataúba"],"pe")
, (["jateí"],"ms")
, (["jati"],"ce")
, (["jatobá"],"ma")
, (["jatobá"],"pe")
, (["jatobá","do","piauí"],"pi")
, (["jaú"],"sp")
, (["jaú","do","tocantins"],"to")
, (["jaupaci"],"go")
, (["jauru"],"mt")
, (["jeceaba"],"mg")
, (["jenipapo","de","minas"],"mg")
, (["jenipapo","dos","vieiras"],"ma")
, (["jequeri"],"mg")
, (["jequiá","da","praia"],"al")
, (["jequié"],"ba")
, (["jequitaí"],"mg")
, (["jequitibá"],"mg")
, (["jequitinhonha"],"mg")
, (["jeremoabo"],"ba")
, (["jericó"],"pb")
, (["jeriquara"],"sp")
, (["jerônimo","monteiro"],"es")
, (["jerumenha"],"pi")
, (["jesuânia"],"mg")
, (["jesuítas"],"pr")
, (["jesúpolis"],"go")
, (["jijoca","de","jericoacoara"],"ce")
, (["ji","-","paraná"],"ro")
, (["jiquiriçá"],"ba")
, (["jitaúna"],"ba")
, (["joaçaba"],"sc")
, (["joaíma"],"mg")
, (["joanésia"],"mg")
, (["joanópolis"],"sp")
, (["joão","alfredo"],"pe")
, (["joão","câmara"],"rn")
, (["joão","costa"],"pi")
, (["joão","dias"],"rn")
, (["joão","dourado"],"ba")
, (["joão","lisboa"],"ma")
, (["joão","monlevade"],"mg")
, (["joão","neiva"],"es")
, (["joão","pessoa"],"pb")
, (["joão","pinheiro"],"mg")
, (["joão","ramalho"],"sp")
, (["joaquim","felício"],"mg")
, (["joaquim","gomes"],"al")
, (["joaquim","nabuco"],"pe")
, (["joaquim","pires"],"pi")
, (["joaquim","távora"],"pr")
, (["joca","marques"],"pi")
, (["jóia"],"rs")
, (["joinville"],"sc")
, (["jordânia"],"mg")
, (["jordão"],"ac")
, (["josé","boiteux"],"sc")
, (["josé","bonifácio"],"sp")
, (["josé","da","penha"],"rn")
, (["josé","de","freitas"],"pi")
, (["josé","gonçalves","de","minas"],"mg")
, (["joselândia"],"ma")
, (["josenópolis"],"mg")
, (["josé","raydan"],"mg")
, (["joviânia"],"go")
, (["juara"],"mt")
, (["juarez","távora"],"pb")
, (["juarina"],"to")
, (["juatuba"],"mg")
, (["juazeirinho"],"pb")
, (["juazeiro"],"ba")
, (["juazeiro","do","norte"],"ce")
, (["juazeiro","do","piauí"],"pi")
, (["jucás"],"ce")
, (["jucati"],"pe")
, (["jucuruçu"],"ba")
, (["jucurutu"],"rn")
, (["juína"],"mt")
, (["juiz","de","fora"],"mg")
, (["júlio","borges"],"pi")
, (["júlio","de","castilhos"],"rs")
, (["júlio","mesquita"],"sp")
, (["jumirim"],"sp")
, (["junco","do","maranhão"],"ma")
, (["junco","do","seridó"],"pb")
, (["jundiá"],"al")
, (["jundiaí"],"sp")
, (["jundiá"],"rn")
, (["jundiaí","do","sul"],"pr")
, (["junqueiro"],"al")
, (["junqueirópolis"],"sp")
, (["jupi"],"pe")
, (["jupiá"],"sc")
, (["juquiá"],"sp")
, (["juquitiba"],"sp")
, (["juramento"],"mg")
, (["juranda"],"pr")
, (["jurema"],"pe")
, (["jurema"],"pi")
, (["juripiranga"],"pb")
, (["juru"],"pb")
, (["juruá"],"am")
, (["juruaia"],"mg")
, (["juruena"],"mt")
, (["juruti"],"pa")
, (["juscimeira"],"mt")
, (["jussara"],"ba")
, (["jussara"],"go")
, (["jussara"],"pr")
, (["jussari"],"ba")
, (["jussiape"],"ba")
, (["jutaí"],"am")
, (["juti"],"ms")
, (["juvenília"],"mg")
, (["kaloré"],"pr")
, (["lábrea"],"am")
, (["lacerdópolis"],"sc")
, (["ladainha"],"mg")
, (["ladário"],"ms")
, (["lafaiete","coutinho"],"ba")
, (["lagamar"],"mg")
, (["lagarto"],"se")
, (["lages"],"sc")
, (["lagoa"],"pb")
, (["lagoa","alegre"],"pi")
, (["lagoa","bonita","do","sul"],"rs")
, (["lagoa","da","canoa"],"al")
, (["lagoa","da","confusão"],"to")
, (["lagoa","d'anta"],"rn")
, (["lagoa","da","prata"],"mg")
, (["lagoa","de","dentro"],"pb")
, (["lagoa","do","itaenga"],"pe")
, (["lagoa","de","pedras"],"rn")
, (["lagoa","de","são","francisco"],"pi")
, (["lagoa","de","velhos"],"rn")
, (["lagoa","do","barro","do","piauí"],"pi")
, (["lagoa","do","carro"],"pe")
, (["lagoa","do","mato"],"ma")
, (["lagoa","do","ouro"],"pe")
, (["lagoa","do","piauí"],"pi")
, (["lagoa","dos","gatos"],"pe")
, (["lagoa","do","sítio"],"pi")
, (["lagoa","dos","patos"],"mg")
, (["lagoa","dos","três","cantos"],"rs")
, (["lagoa","do","tocantins"],"to")
, (["lagoa","dourada"],"mg")
, (["lagoa","formosa"],"mg")
, (["lagoa","grande","do","maranhão"],"ma")
, (["lagoa","grande"],"mg")
, (["lagoa","grande"],"pe")
, (["lagoa","nova"],"rn")
, (["lagoão"],"rs")
, (["lagoa","real"],"ba")
, (["lagoa","salgada"],"rn")
, (["lagoa","santa"],"go")
, (["lagoa","santa"],"mg")
, (["lagoa","seca"],"pb")
, (["lagoa","vermelha"],"rs")
, (["lago","da","pedra"],"ma")
, (["lago","do","junco"],"ma")
, (["lago","dos","rodrigues"],"ma")
, (["lagoinha"],"sp")
, (["lagoinha","do","piauí"],"pi")
, (["lago","verde"],"ma")
, (["laguna","carapã"],"ms")
, (["laguna"],"sc")
, (["laje"],"ba")
, (["lajeado"],"rs")
, (["lajeado"],"to")
, (["lajeado","do","bugre"],"rs")
, (["lajeado","grande"],"sc")
, (["lajeado","novo"],"ma")
, (["lajedão"],"ba")
, (["lajedinho"],"ba")
, (["lajedo"],"pe")
, (["lajedo","do","tabocal"],"ba")
, (["laje","do","muriaé"],"rj")
, (["lajes"],"rn")
, (["lajes","pintadas"],"rn")
, (["lajinha"],"mg")
, (["lamarão"],"ba")
, (["lambari"],"mg")
, (["lambari","d'oeste"],"mt")
, (["lamim"],"mg")
, (["landri","sales"],"pi")
, (["lapa"],"pr")
, (["lapão"],"ba")
, (["laranja","da","terra"],"es")
, (["laranjal"],"mg")
, (["laranjal"],"pr")
, (["laranjal","do","jari"],"ap")
, (["laranjal","paulista"],"sp")
, (["laranjeiras"],"se")
, (["laranjeiras","do","sul"],"pr")
, (["lassance"],"mg")
, (["lastro"],"pb")
, (["laurentino"],"sc")
, (["lauro","de","freitas"],"ba")
, (["lauro","müller"],"sc")
, (["lavandeira"],"to")
, (["lavínia"],"sp")
, (["lavras"],"mg")
, (["lavras","da","mangabeira"],"ce")
, (["lavras","do","sul"],"rs")
, (["lavrinhas"],"sp")
, (["leandro","ferreira"],"mg")
, (["lebon","régis"],"sc")
, (["leme"],"sp")
, (["leme","do","prado"],"mg")
, (["lençóis"],"ba")
, (["lençóis","paulista"],"sp")
, (["leoberto","leal"],"sc")
, (["leopoldina"],"mg")
, (["leopoldo","de","bulhões"],"go")
, (["leópolis"],"pr")
, (["liberato","salzano"],"rs")
, (["liberdade"],"mg")
, (["licínio","de","almeida"],"ba")
, (["lidianópolis"],"pr")
, (["lima","campos"],"ma")
, (["lima","duarte"],"mg")
, (["limeira"],"sp")
, (["limeira","do","oeste"],"mg")
, (["limoeiro"],"pe")
, (["limoeiro","de","anadia"],"al")
, (["limoeiro","do","ajuru"],"pa")
, (["limoeiro","do","norte"],"ce")
, (["lindoeste"],"pr")
, (["lindóia"],"sp")
, (["lindóia","do","sul"],"sc")
, (["lindolfo","collor"],"rs")
, (["linha","nova"],"rs")
, (["linhares"],"es")
, (["lins"],"sp")
, (["livramento"],"pb")
, (["livramento","de","nossa","senhora"],"ba")
, (["lizarda"],"to")
, (["loanda"],"pr")
, (["lobato"],"pr")
, (["logradouro"],"pb")
, (["londrina"],"pr")
, (["lontra"],"mg")
, (["lontras"],"sc")
, (["lorena"],"sp")
, (["loreto"],"ma")
, (["lourdes"],"sp")
, (["louveira"],"sp")
, (["lucas","do","rio","verde"],"mt")
, (["lucélia"],"sp")
, (["lucena"],"pb")
, (["lucianópolis"],"sp")
, (["luciara"],"mt")
, (["lucrécia"],"rn")
, (["luís","antônio"],"sp")
, (["luisburgo"],"mg")
, (["luís","alves"],"sc")
, (["luís","correia"],"pi")
, (["luís","domingues"],"ma")
, (["luís","eduardo","magalhães"],"ba")
, (["luís","gomes"],"rn")
, (["luisiana"],"pr")
, (["luisiânia"],"sp")
, (["luislândia"],"mg")
, (["luminárias"],"mg")
, (["lunardelli"],"pr")
, (["lupércio"],"sp")
, (["lupionópolis"],"pr")
, (["lutécia"],"sp")
, (["luz"],"mg")
, (["luzerna"],"sc")
, (["luziânia"],"go")
, (["luzilândia"],"pi")
, (["luzinópolis"],"to")
, (["macaé"],"rj")
, (["macaíba"],"rn")
, (["macajuba"],"ba")
, (["maçambara"],"rs")
, (["macambira"],"se")
, (["macapá"],"ap")
, (["macaparana"],"pe")
, (["macarani"],"ba")
, (["macatuba"],"sp")
, (["macau"],"rn")
, (["macaubal"],"sp")
, (["macaúbas"],"ba")
, (["macedônia"],"sp")
, (["maceió"],"al")
, (["machacalis"],"mg")
, (["machadinho"],"rs")
, (["machadinho","d'oeste"],"ro")
, (["machado"],"mg")
, (["machados"],"pe")
, (["macieira"],"sc")
, (["macuco"],"rj")
, (["macururé"],"ba")
, (["madalena"],"ce")
, (["madeiro"],"pi")
, (["madre","de","deus"],"ba")
, (["madre","de","deus","de","minas"],"mg")
, (["mãe","d'água"],"pb")
, (["mãe","do","rio"],"pa")
, (["maetinga"],"ba")
, (["mafra"],"sc")
, (["magalhães","barata"],"pa")
, (["magalhães","de","almeida"],"ma")
, (["magda"],"sp")
, (["magé"],"rj")
, (["maiquinique"],"ba")
, (["mairi"],"ba")
, (["mairinque"],"sp")
, (["mairiporã"],"sp")
, (["mairipotaba"],"go")
, (["major","gercino"],"sc")
, (["major","isidoro"],"al")
, (["major","sales"],"rn")
, (["major","vieira"],"sc")
, (["malacacheta"],"mg")
, (["malhada"],"ba")
, (["malhada","de","pedras"],"ba")
, (["malhada","dos","bois"],"se")
, (["malhador"],"se")
, (["mallet"],"pr")
, (["malta"],"pb")
, (["mamanguape"],"pb")
, (["mambaí"],"go")
, (["mamborê"],"pr")
, (["mamonas"],"mg")
, (["mampituba"],"rs")
, (["manacapuru"],"am")
, (["manaíra"],"pb")
, (["manaquiri"],"am")
, (["manari"],"pe")
, (["manaus"],"am")
, (["mâncio","lima"],"ac")
, (["mandaguaçu"],"pr")
, (["mandaguari"],"pr")
, (["mandirituba"],"pr")
, (["manduri"],"sp")
, (["manfrinópolis"],"pr")
, (["manga"],"mg")
, (["mangaratiba"],"rj")
, (["mangueirinha"],"pr")
, (["manhuaçu"],"mg")
, (["manhumirim"],"mg")
, (["manicoré"],"am")
, (["manoel","emídio"],"pi")
, (["manoel","ribas"],"pr")
, (["manoel","urbano"],"ac")
, (["manoel","viana"],"rs")
, (["manoel","vitorino"],"ba")
, (["mansidão"],"ba")
, (["mantena"],"mg")
, (["mantenópolis"],"es")
, (["maquiné"],"rs")
, (["maraã"],"am")
, (["marabá"],"pa")
, (["marabá","paulista"],"sp")
, (["maracaçumé"],"ma")
, (["maracaí"],"sp")
, (["maracajá"],"sc")
, (["maracaju"],"ms")
, (["maracanã"],"pa")
, (["maracanaú"],"ce")
, (["maracás"],"ba")
, (["maragogi"],"al")
, (["maragogipe"],"ba")
, (["maraial"],"pe")
, (["marajá","do","sena"],"ma")
, (["maranguape"],"ce")
, (["maranhãozinho"],"ma")
, (["marapanim"],"pa")
, (["marapoama"],"sp")
, (["mara","rosa"],"go")
, (["marataízes"],"es")
, (["maratá"],"rs")
, (["maraú"],"ba")
, (["marau"],"rs")
, (["maravilha"],"al")
, (["maravilha"],"sc")
, (["maravilhas"],"mg")
, (["marcação"],"pb")
, (["marcelândia"],"mt")
, (["marcelino","ramos"],"rs")
, (["marcelino","vieira"],"rn")
, (["marcionílio","souza"],"ba")
, (["marco"],"ce")
, (["marcolândia"],"pi")
, (["marcos","parente"],"pi")
, (["mar","de","espanha"],"mg")
, (["marechal","cândido","rondon"],"pr")
, (["marechal","deodoro"],"al")
, (["marechal","floriano"],"es")
, (["marechal","thaumaturgo"],"ac")
, (["marema"],"sc")
, (["mari"],"pb")
, (["maria","da","fé"],"mg")
, (["maria","helena"],"pr")
, (["marialva"],"pr")
, (["mariana"],"mg")
, (["mariana","pimentel"],"rs")
, (["mariano","moro"],"rs")
, (["marianópolis","do","tocantins"],"to")
, (["mariápolis"],"sp")
, (["maribondo"],"al")
, (["maricá"],"rj")
, (["marilac"],"mg")
, (["marilândia"],"es")
, (["marilândia","do","sul"],"pr")
, (["marilena"],"pr")
, (["marília"],"sp")
, (["mariluz"],"pr")
, (["maringá"],"pr")
, (["marinópolis"],"sp")
, (["mário","campos"],"mg")
, (["mariópolis"],"pr")
, (["maripá"],"pr")
, (["maripá","de","minas"],"mg")
, (["marituba"],"pa")
, (["marizópolis"],"pb")
, (["marliéria"],"mg")
, (["marmeleiro"],"pr")
, (["marmelópolis"],"mg")
, (["marques","de","souza"],"rs")
, (["marquinho"],"pr")
, (["martinho","campos"],"mg")
, (["martinópole"],"ce")
, (["martinópolis"],"sp")
, (["martins"],"rn")
, (["martins","soares"],"mg")
, (["maruim"],"se")
, (["marumbi"],"pr")
, (["mar","vermelho"],"al")
, (["marzagão"],"go")
, (["mascote"],"ba")
, (["massapê"],"ce")
, (["massapê","do","piauí"],"pi")
, (["massaranduba"],"pb")
, (["massaranduba"],"sc")
, (["mata"],"rs")
, (["mata","de","são","joão"],"ba")
, (["mata","grande"],"al")
, (["matão"],"sp")
, (["mataraca"],"pb")
, (["mata","roma"],"ma")
, (["mata","verde"],"mg")
, (["mateiros"],"to")
, (["matelândia"],"pr")
, (["materlândia"],"mg")
, (["mateus","leme"],"mg")
, (["matias","barbosa"],"mg")
, (["matias","cardoso"],"mg")
, (["matias","lobato"],"mg")
, (["matias","olímpio"],"pi")
, (["matina"],"ba")
, (["matinha"],"ma")
, (["matinhas"],"pb")
, (["matinhos"],"pr")
, (["matipó"],"mg")
, (["mato","castelhano"],"rs")
, (["matões"],"ma")
, (["matões","do","norte"],"ma")
, (["mato","grosso"],"pb")
, (["mato","leitão"],"rs")
, (["mato","queimado"],"rs")
, (["mato","rico"],"pr")
, (["matos","costa"],"sc")
, (["mato","verde"],"mg")
, (["matozinhos"],"mg")
, (["matrinchã"],"go")
, (["matriz","de","camaragibe"],"al")
, (["matupá"],"mt")
, (["maturéia"],"pb")
, (["matutina"],"mg")
, (["mauá"],"sp")
, (["mauá","da","serra"],"pr")
, (["maués"],"am")
, (["maurilândia"],"go")
, (["maurilândia","do","tocantins"],"to")
, (["mauriti"],"ce")
, (["maxaranguape"],"rn")
, (["maximiliano","de","almeida"],"rs")
, (["mazagão"],"ap")
, (["medeiros"],"mg")
, (["medeiros","neto"],"ba")
, (["medianeira"],"pr")
, (["medicilândia"],"pa")
, (["medina"],"mg")
, (["meleiro"],"sc")
, (["melgaço"],"pa")
, (["mendes"],"rj")
, (["mendes","pimentel"],"mg")
, (["mendonça"],"sp")
, (["mercedes"],"pr")
, (["mercês"],"mg")
, (["meridiano"],"sp")
, (["meruoca"],"ce")
, (["mesópolis"],"sp")
, (["mesquita"],"mg")
, (["mesquita"],"rj")
, (["messias"],"al")
, (["messias","targino"],"rn")
, (["miguel","alves"],"pi")
, (["miguel","calmon"],"ba")
, (["miguel","leão"],"pi")
, (["miguelópolis"],"sp")
, (["miguel","pereira"],"rj")
, (["milagres"],"ba")
, (["milagres","(ceará)"],"ce")
, (["milagres","do","maranhão"],"ma")
, (["milhã"],"ce")
, (["milton","brandão"],"pi")
, (["mimoso","de","goiás"],"go")
, (["mimoso","do","sul"],"es")
, (["minaçu"],"go")
, (["minador","do","negrão"],"al")
, (["minas","do","leão"],"rs")
, (["minas","novas"],"mg")
, (["minduri"],"mg")
, (["mineiros"],"go")
, (["mineiros","do","tietê"],"sp")
, (["ministro","andreazza"],"ro")
, (["mirabela"],"mg")
, (["miracatu"],"sp")
, (["miracema"],"rj")
, (["miracema","do","tocantins"],"to")
, (["mirador"],"ma")
, (["mirador"],"pr")
, (["miradouro"],"mg")
, (["mira","estrela"],"sp")
, (["miraguaí"],"rs")
, (["miraí"],"mg")
, (["miraíma"],"ce")
, (["miranda"],"ms")
, (["miranda","do","norte"],"ma")
, (["mirandiba"],"pe")
, (["mirandópolis"],"sp")
, (["mirangaba"],"ba")
, (["miranorte"],"to")
, (["mirante"],"ba")
, (["mirante","da","serra"],"ro")
, (["mirante","do","paranapanema"],"sp")
, (["miraselva"],"pr")
, (["mirassol"],"sp")
, (["mirassolândia"],"sp")
, (["mirassol","d'oeste"],"mt")
, (["miravânia"],"mg")
, (["mirim","doce"],"sc")
, (["mirinzal"],"ma")
, (["missal"],"pr")
, (["missão","velha"],"ce")
, (["mocajuba"],"pa")
, (["mococa"],"sp")
, (["modelo"],"sc")
, (["moeda"],"mg")
, (["moema"],"mg")
, (["mogeiro"],"pb")
, (["mogi","das","cruzes"],"sp")
, (["mogi","guaçu"],"sp")
, (["mogi","mirim"],"sp")
, (["moiporá"],"go")
, (["moita","bonita"],"se")
, (["moju"],"pa")
, (["mombaça"],"ce")
, (["mombuca"],"sp")
, (["monção"],"ma")
, (["monções"],"sp")
, (["mondaí"],"sc")
, (["mongaguá"],"sp")
, (["monjolos"],"mg")
, (["monsenhor","gil"],"pi")
, (["monsenhor","hipólito"],"pi")
, (["monsenhor","paulo"],"mg")
, (["monsenhor","tabosa"],"ce")
, (["montadas"],"pb")
, (["montalvânia"],"mg")
, (["montanha"],"es")
, (["montanhas"],"rn")
, (["montauri"],"rs")
, (["monte","alegre"],"pa")
, (["monte","alegre"],"rn")
, (["monte","alegre","de","goiás"],"go")
, (["monte","alegre","de","minas"],"mg")
, (["monte","alegre","de","sergipe"],"se")
, (["monte","alegre","do","piauí"],"pi")
, (["monte","alegre","dos","campos"],"rs")
, (["monte","alegre","do","sul"],"sp")
, (["monte","alto"],"sp")
, (["monte","aprazível"],"sp")
, (["monte","azul"],"mg")
, (["monte","azul","paulista"],"sp")
, (["monte","belo"],"mg")
, (["monte","belo","do","sul"],"rs")
, (["monte","carlo"],"sc")
, (["monte","carmelo"],"mg")
, (["monte","castelo"],"sc")
, (["monte","castelo"],"sp")
, (["monte","das","gameleiras"],"rn")
, (["monte","do","carmo"],"to")
, (["monte","formoso"],"mg")
, (["monte","horebe"],"pb")
, (["monteiro"],"pb")
, (["monteiro","lobato"],"sp")
, (["monteirópolis"],"al")
, (["monte","mor"],"sp")
, (["monte","negro"],"ro")
, (["montenegro"],"rs")
, (["montes","altos"],"ma")
, (["monte","santo"],"ba")
, (["monte","santo"],"to")
, (["monte","santo","de","minas"],"mg")
, (["montes","claros"],"mg")
, (["montes","claros","de","goiás"],"go")
, (["monte","sião"],"mg")
, (["montezuma"],"mg")
, (["montividiu"],"go")
, (["montividiu","do","norte"],"go")
, (["morada","nova"],"ce")
, (["morada","nova","de","minas"],"mg")
, (["moraújo"],"ce")
, (["moreilândia"],"pe")
, (["moreira","sales"],"pr")
, (["moreno"],"pe")
, (["mormaço"],"rs")
, (["morpará"],"ba")
, (["morretes"],"pr")
, (["morrinhos"],"ce")
, (["morrinhos"],"go")
, (["morrinhos","do","sul"],"rs")
, (["morro","agudo"],"sp")
, (["morro","agudo","de","goiás"],"go")
, (["morro","cabeça","no","tempo"],"pi")
, (["morro","da","fumaça"],"sc")
, (["morro","da","garça"],"mg")
, (["morro","do","chapéu"],"ba")
, (["morro","do","chapéu","do","piauí"],"pi")
, (["morro","do","pilar"],"mg")
, (["morro","grande"],"sc")
, (["morro","redondo"],"rs")
, (["morro","reuter"],"rs")
, (["morros"],"ma")
, (["mortugaba"],"ba")
, (["morungaba"],"sp")
, (["mossâmedes"],"go")
, (["mossoró"],"rn")
, (["mostardas"],"rs")
, (["motuca"],"sp")
, (["mozarlândia"],"go")
, (["muaná"],"pa")
, (["mucajaí"],"rr")
, (["mucambo"],"ce")
, (["mucugê"],"ba")
, (["muçum"],"rs")
, (["mucuri"],"ba")
, (["mucurici"],"es")
, (["muitos","capões"],"rs")
, (["muliterno"],"rs")
, (["mulungu"],"ce")
, (["mulungu"],"pb")
, (["mulungu","do","morro"],"ba")
, (["mundo","novo"],"ba")
, (["mundo","novo"],"go")
, (["mundo","novo"],"ms")
, (["munhoz"],"mg")
, (["munhoz","de","melo"],"pr")
, (["muniz","ferreira"],"ba")
, (["muniz","freire"],"es")
, (["muquém","de","são","francisco"],"ba")
, (["muqui"],"es")
, (["muriaé"],"mg")
, (["muribeca"],"se")
, (["murici"],"al")
, (["murici","dos","portelas"],"pi")
, (["muricilândia"],"to")
, (["muritiba"],"ba")
, (["murutinga","do","sul"],"sp")
, (["mutuípe"],"ba")
, (["mutum"],"mg")
, (["mutunópolis"],"go")
, (["muzambinho"],"mg")
, (["nacip","raydan"],"mg")
, (["nantes"],"sp")
, (["nanuque"],"mg")
, (["não","-","me-toque"],"rs")
, (["naque"],"mg")
, (["narandiba"],"sp")
, (["natal"],"rn")
, (["natalândia"],"mg")
, (["natércia"],"mg")
, (["natividade"],"rj")
, (["natividade"],"to")
, (["natividade","da","serra"],"sp")
, (["natuba"],"pb")
, (["navegantes"],"sc")
, (["naviraí"],"ms")
, (["nazaré"],"ba")
, (["nazaré"],"to")
, (["nazaré","da","mata"],"pe")
, (["nazaré","do","piauí"],"pi")
, (["nazareno"],"mg")
, (["nazaré","paulista"],"sp")
, (["nazarezinho"],"pb")
, (["nazário"],"go")
, (["neópolis"],"se")
, (["nepomuceno"],"mg")
, (["nerópolis"],"go")
, (["neves","paulista"],"sp")
, (["nhamundá"],"am")
, (["nhandeara"],"sp")
, (["nicolau","vergueiro"],"rs")
, (["nilo","peçanha"],"ba")
, (["nilópolis"],"rj")
, (["nina","rodrigues"],"ma")
, (["ninheira"],"mg")
, (["nioaque"],"ms")
, (["nipoã"],"sp")
, (["niquelândia"],"go")
, (["nísia","floresta"],"rn")
, (["niterói"],"rj")
, (["nobres"],"mt")
, (["nonoai"],"rs")
, (["nordestina"],"ba")
, (["normandia"],"rr")
, (["nortelândia"],"mt")
, (["nossa","senhora","aparecida"],"se")
, (["nossa","senhora","da","glória"],"se")
, (["nossa","senhora","das","dores"],"se")
, (["nossa","senhora","das","graças"],"pr")
, (["nossa","senhora","de","lourdes"],"se")
, (["nossa","senhora","de","nazaré"],"pi")
, (["nossa","senhora","do","livramento"],"mt")
, (["nossa","senhora","do","socorro"],"se")
, (["nossa","senhora","dos","remédios"],"pi")
, (["nova","aliança"],"sp")
, (["nova","aliança","do","ivaí"],"pr")
, (["nova","alvorada"],"rs")
, (["nova","alvorada","do","sul"],"ms")
, (["nova","américa"],"go")
, (["nova","américa","da","colina"],"pr")
, (["nova","andradina"],"ms")
, (["nova","araçá"],"rs")
, (["nova","aurora"],"go")
, (["nova","aurora"],"pr")
, (["nova","bandeirantes"],"mt")
, (["nova","bassano"],"rs")
, (["nova","belém"],"mg")
, (["nova","boa","vista"],"rs")
, (["nova","brasilândia"],"mt")
, (["nova","brasilândia","d'oeste"],"ro")
, (["nova","bréscia"],"rs")
, (["nova","campina"],"sp")
, (["nova","canaã"],"ba")
, (["nova","canaã","do","norte"],"mt")
, (["nova","canaã","paulista"],"sp")
, (["nova","candelária"],"rs")
, (["nova","cantu"],"pr")
, (["nova","castilho"],"sp")
, (["nova","colinas"],"ma")
, (["nova","crixás"],"go")
, (["nova","cruz"],"rn")
, (["nova","era"],"mg")
, (["nova","erexim"],"sc")
, (["nova","esperança"],"pr")
, (["nova","esperança","do","piriá"],"pa")
, (["nova","esperança","do","sudoeste"],"pr")
, (["nova","esperança","do","sul"],"rs")
, (["nova","europa"],"sp")
, (["nova","fátima"],"ba")
, (["nova","fátima"],"pr")
, (["nova","floresta"],"pb")
, (["nova","friburgo"],"rj")
, (["nova","glória"],"go")
, (["nova","granada"],"sp")
, (["nova","guarita"],"mt")
, (["nova","guataporanga"],"sp")
, (["nova","hartz"],"rs")
, (["nova","ibiá"],"ba")
, (["nova","iguaçu"],"rj")
, (["nova","iguaçu","de","goiás"],"go")
, (["nova","independência"],"sp")
, (["nova","iorque"],"ma")
, (["nova","ipixuna"],"pa")
, (["novais"],"sp")
, (["nova","itaberaba"],"sc")
, (["nova","itarana"],"ba")
, (["nova","lacerda"],"mt")
, (["nova","laranjeiras"],"pr")
, (["nova","lima"],"mg")
, (["nova","londrina"],"pr")
, (["nova","luzitânia"],"sp")
, (["nova","mamoré"],"ro")
, (["nova","marilândia"],"mt")
, (["nova","maringá"],"mt")
, (["nova","módica"],"mg")
, (["nova","monte","verde"],"mt")
, (["nova","mutum"],"mt")
, (["nova","nazaré"],"mt")
, (["nova","odessa"],"sp")
, (["nova","olímpia"],"mt")
, (["nova","olímpia"],"pr")
, (["nova","olinda"],"ce")
, (["nova","olinda"],"pb")
, (["nova","olinda"],"to")
, (["nova","olinda","do","maranhão"],"ma")
, (["nova","olinda","do","norte"],"am")
, (["nova","pádua"],"rs")
, (["nova","palma"],"rs")
, (["nova","palmeira"],"pb")
, (["nova","petrópolis"],"rs")
, (["nova","ponte"],"mg")
, (["nova","porteirinha"],"mg")
, (["nova","prata"],"rs")
, (["nova","prata","do","iguaçu"],"pr")
, (["nova","ramada"],"rs")
, (["nova","redenção"],"ba")
, (["nova","resende"],"mg")
, (["nova","roma"],"go")
, (["nova","roma","do","sul"],"rs")
, (["nova","rosalândia"],"to")
, (["nova","russas"],"ce")
, (["nova","santa","bárbara"],"pr")
, (["nova","santa","helena"],"mt")
, (["nova","santa","rita"],"pi")
, (["nova","santa","rita"],"rs")
, (["nova","santa","rosa"],"pr")
, (["nova","serrana"],"mg")
, (["nova","soure"],"ba")
, (["nova","tebas"],"pr")
, (["nova","timboteua"],"pa")
, (["nova","trento"],"sc")
, (["nova","ubiratã"],"mt")
, (["nova","união"],"mg")
, (["nova","união"],"ro")
, (["nova","venécia"],"es")
, (["nova","veneza"],"go")
, (["nova","veneza"],"sc")
, (["nova","viçosa"],"ba")
, (["nova","xavantina"],"mt")
, (["novo","acordo"],"to")
, (["novo","alegre"],"to")
, (["novo","aripuanã"],"am")
, (["novo","airão"],"am")
, (["novo","barreiro"],"rs")
, (["novo","brasil"],"go")
, (["novo","cabrais"],"rs")
, (["novo","cruzeiro"],"mg")
, (["novo","gama"],"go")
, (["novo","hamburgo"],"rs")
, (["novo","horizonte","(bahia)"],"ba")
, (["novo","horizonte"],"sc")
, (["novo","horizonte"],"sp")
, (["novo","horizonte","do","norte"],"mt")
, (["novo","horizonte","do","oeste"],"ro")
, (["novo","horizonte","do","sul"],"ms")
, (["novo","itacolomi"],"pr")
, (["novo","jardim"],"to")
, (["novo","lino"],"al")
, (["novo","machado"],"rs")
, (["novo","mundo"],"mt")
, (["novo","oriente"],"ce")
, (["novo","oriente","de","minas"],"mg")
, (["novo","oriente","do","piauí"],"pi")
, (["novo","planalto"],"go")
, (["novo","progresso"],"pa")
, (["novo","repartimento"],"pa")
, (["novorizonte"],"mg")
, (["novo","santo","antônio"],"mt")
, (["novo","santo","antônio"],"pi")
, (["novo","são","joaquim"],"mt")
, (["novo","tiradentes"],"rs")
, (["novo","triunfo"],"ba")
, (["novo","xingu"],"rs")
, (["nuporanga"],"sp")
, (["óbidos"],"pa")
, (["ocara"],"ce")
, (["ocauçu"],"sp")
, (["oeiras"],"pi")
, (["oeiras","do","pará"],"pa")
, (["oiapoque"],"ap")
, (["olaria"],"mg")
, (["óleo"],"sp")
, (["olho","d'água"],"pb")
, (["olhos","d'água"],"mg")
, (["olho","d'água","das","cunhãs"],"ma")
, (["olho","d'água","das","flores"],"al")
, (["olho","d'água","do","borges"],"rn")
, (["olho","d'água","do","casado"],"al")
, (["olho","d'água","do","piauí"],"pi")
, (["olho","d'água","grande"],"al")
, (["olímpia"],"sp")
, (["olímpio","noronha"],"mg")
, (["olinda"],"pe")
, (["olinda","nova","do","maranhão"],"ma")
, (["olindina"],"ba")
, (["olivedos"],"pb")
, (["oliveira"],"mg")
, (["oliveira","de","fátima"],"to")
, (["oliveira","dos","brejinhos"],"ba")
, (["oliveira","fortes"],"mg")
, (["olivença"],"al")
, (["onça","de","pitangui"],"mg")
, (["onda","verde"],"sp")
, (["oratórios"],"mg")
, (["oriente"],"sp")
, (["orindiúva"],"sp")
, (["oriximiná"],"pa")
, (["orizânia"],"mg")
, (["orizona"],"go")
, (["orlândia"],"sp")
, (["orleans"],"sc")
, (["orobó"],"pe")
, (["orocó"],"pe")
, (["orós"],"ce")
, (["ortigueira"],"pr")
, (["osasco"],"sp")
, (["oscar","bressane"],"sp")
, (["osório"],"rs")
, (["osvaldo","cruz"],"sp")
, (["otacílio","costa"],"sc")
, (["ourém"],"pa")
, (["ouriçangas"],"ba")
, (["ouricuri"],"pe")
, (["ourilândia","do","norte"],"pa")
, (["ourinhos"],"sp")
, (["ourizona"],"pr")
, (["ouro"],"sc")
, (["ouro","branco"],"al")
, (["ouro","branco"],"mg")
, (["ouro","branco"],"rn")
, (["ouroeste"],"sp")
, (["ouro","fino"],"mg")
, (["ourolândia"],"ba")
, (["ouro","preto"],"mg")
, (["ouro","preto","do","oeste"],"ro")
, (["ouro","velho"],"pb")
, (["ouro","verde"],"sc")
, (["ouro","verde"],"sp")
, (["ouro","verde","de","goiás"],"go")
, (["ouro","verde","de","minas"],"mg")
, (["ouro","verde","do","oeste"],"pr")
, (["ouvidor"],"go")
, (["pacaembu"],"sp")
, (["pacajá"],"pa")
, (["pacajus"],"ce")
, (["pacaraima"],"rr")
, (["pacatuba"],"ce")
, (["pacatuba"],"se")
, (["paço","do","lumiar"],"ma")
, (["pacoti"],"ce")
, (["pacujá"],"ce")
, (["padre","bernardo"],"go")
, (["padre","carvalho"],"mg")
, (["padre","marcos"],"pi")
, (["padre","paraíso"],"mg")
, (["paes","landim"],"pi")
, (["paial"],"sc")
, (["paiçandu"],"pr")
, (["paim","filho"],"rs")
, (["paineiras"],"mg")
, (["painel"],"sc")
, (["pains"],"mg")
, (["pai","pedro"],"mg")
, (["paiva"],"mg")
, (["pajeú","do","piauí"],"pi")
, (["palestina"],"al")
, (["palestina"],"sp")
, (["palestina","de","goiás"],"go")
, (["palestina","do","pará"],"pa")
, (["palhano"],"ce")
, (["palhoça"],"sc")
, (["palma"],"mg")
, (["palmácia"],"ce")
, (["palmares"],"pe")
, (["palmares","do","sul"],"rs")
, (["palmares","paulista"],"sp")
, (["palmas","de","monte","alto"],"ba")
, (["palmas"],"pr")
, (["palmas"],"to")
, (["palma","sola"],"sc")
, (["palmeira"],"pr")
, (["palmeira"],"sc")
, (["palmeira","das","missões"],"rs")
, (["palmeira","d'oeste"],"sp")
, (["palmeira","do","piauí"],"pi")
, (["palmeira","dos","índios"],"al")
, (["palmeirais"],"pi")
, (["palmeirândia"],"ma")
, (["palmeirante"],"to")
, (["palmeiras"],"ba")
, (["palmeiras","de","goiás"],"go")
, (["palmeiras","do","tocantins"],"to")
, (["palmeirina"],"pe")
, (["palmeirópolis"],"to")
, (["palmelo"],"go")
, (["palminópolis"],"go")
, (["palmital"],"pr")
, (["palmital"],"sp")
, (["palmitinho"],"rs")
, (["palmitos"],"sc")
, (["palmópolis"],"mg")
, (["palotina"],"pr")
, (["panamá"],"go")
, (["panambi"],"rs")
, (["pancas"],"es")
, (["panelas"],"pe")
, (["panorama"],"sp")
, (["pântano","grande"],"rs")
, (["pão","de","açúcar"],"al")
, (["papagaios"],"mg")
, (["papanduva"],"sc")
, (["paquetá"],"pi")
, (["paracambi"],"rj")
, (["paracatu"],"mg")
, (["paracuru"],"ce")
, (["pará","de","minas"],"mg")
, (["paragominas"],"pa")
, (["paraguaçu"],"mg")
, (["paraguaçu","paulista"],"sp")
, (["paraí"],"rs")
, (["paraíba","do","sul"],"rj")
, (["paraibano"],"ma")
, (["paraibuna"],"sp")
, (["paraipaba"],"ce")
, (["paraíso"],"sc")
, (["paraíso"],"sp")
, (["paraíso","do","norte"],"pr")
, (["paraíso","do","sul"],"rs")
, (["paraíso","do","tocantins"],"to")
, (["paraisópolis"],"mg")
, (["parambu"],"ce")
, (["paramirim"],"ba")
, (["paramoti"],"ce")
, (["paranacity"],"pr")
, (["paranaguá"],"pr")
, (["paranaíba"],"ms")
, (["paranaiguara"],"go")
, (["paranaíta"],"mt")
, (["paranapanema"],"sp")
, (["paranapoema"],"pr")
, (["paranapuã"],"sp")
, (["paraná"],"rn")
, (["paranã"],"to")
, (["paranatama"],"pe")
, (["paranatinga"],"mt")
, (["paranavaí"],"pr")
, (["paranhos"],"ms")
, (["paraopeba"],"mg")
, (["parapuã"],"sp")
, (["parari"],"pb")
, (["parati"],"rj")
, (["paratinga"],"ba")
, (["parauapebas"],"pa")
, (["paraúna"],"go")
, (["paraú"],"rn")
, (["parazinho"],"rn")
, (["pardinho"],"sp")
, (["pareci","novo"],"rs")
, (["parecis"],"ro")
, (["parelhas"],"rn")
, (["pariconha"],"al")
, (["parintins"],"am")
, (["paripiranga"],"ba")
, (["paripueira"],"al")
, (["pariquera","-","açu"],"sp")
, (["parisi"],"sp")
, (["parnaguá"],"pi")
, (["parnaíba"],"pi")
, (["parnamirim"],"pe")
, (["parnamirim"],"rn")
, (["parnarama"],"ma")
, (["parobé"],"rs")
, (["passabém"],"mg")
, (["passa","e","fica"],"rn")
, (["passagem"],"pb")
, (["passagem"],"rn")
, (["passagem","franca"],"ma")
, (["passagem","franca","do","piauí"],"pi")
, (["passa","-","quatro"],"mg")
, (["passa","sete"],"rs")
, (["passa","tempo"],"mg")
, (["passa","-","vinte"],"mg")
, (["passira"],"pe")
, (["passo","de","camaragibe"],"al")
, (["passo","de","torres"],"sc")
, (["passo","do","sobrado"],"rs")
, (["passo","fundo"],"rs")
, (["passos"],"mg")
, (["passos","maia"],"sc")
, (["pastos","bons"],"ma")
, (["patis"],"mg")
, (["pato","bragado"],"pr")
, (["pato","branco"],"pr")
, (["patos"],"pb")
, (["patos","de","minas"],"mg")
, (["patos","do","piauí"],"pi")
, (["patrocínio"],"mg")
, (["patrocínio","do","muriaé"],"mg")
, (["patrocínio","paulista"],"sp")
, (["patu"],"rn")
, (["pati","do","alferes"],"rj")
, (["pau","brasil"],"ba")
, (["paudalho"],"pe")
, (["pau","d'arco","do","piauí"],"pi")
, (["pau","d'arco"],"pa")
, (["pau","d'arco"],"to")
, (["pau","dos","ferros"],"rn")
, (["pauini"],"am")
, (["paula","cândido"],"mg")
, (["paula","freitas"],"pr")
, (["paulicéia"],"sp")
, (["paulínia"],"sp")
, (["paulino","neves"],"ma")
, (["paulistana"],"pi")
, (["paulistânia"],"sp")
, (["paulista"],"pb")
, (["paulista"],"pe")
, (["paulistas"],"mg")
, (["paulo","afonso"],"ba")
, (["paulo","bento"],"rs")
, (["paulo","de","faria"],"sp")
, (["paulo","frontin"],"pr")
, (["paulo","jacinto"],"al")
, (["paulo","lopes"],"sc")
, (["paulo","ramos"],"ma")
, (["pavão"],"mg")
, (["paverama"],"rs")
, (["pavussu"],"pi")
, (["peabiru"],"pr")
, (["peçanha"],"mg")
, (["pederneiras"],"sp")
, (["pé","de","serra"],"ba")
, (["pedra","azul"],"mg")
, (["pedra","bela"],"sp")
, (["pedra","bonita"],"mg")
, (["pedra","branca"],"ce")
, (["pedra","branca"],"pb")
, (["pedra","do","anta"],"mg")
, (["pedra","do","indaiá"],"mg")
, (["pedra","dourada"],"mg")
, (["pedra","grande"],"rn")
, (["pedra","lavrada"],"pb")
, (["pedralva"],"mg")
, (["pedra","mole"],"se")
, (["pedranópolis"],"sp")
, (["pedrão"],"ba")
, (["pedra"],"pe")
, (["pedra","preta"],"mt")
, (["pedra","preta"],"rn")
, (["pedras","altas"],"rs")
, (["pedras","de","fogo"],"pb")
, (["pedras","de","maria","da","cruz"],"mg")
, (["pedras","grandes"],"sc")
, (["pedregulho"],"sp")
, (["pedreira"],"sp")
, (["pedreiras"],"ma")
, (["pedrinhas"],"se")
, (["pedrinhas","paulista"],"sp")
, (["pedrinópolis"],"mg")
, (["pedro","afonso"],"to")
, (["pedro","alexandre"],"ba")
, (["pedro","avelino"],"rn")
, (["pedro","canário"],"es")
, (["pedro","de","toledo"],"sp")
, (["pedro","do","rosário"],"ma")
, (["pedro","gomes"],"ms")
, (["pedro","laurentino"],"pi")
, (["pedro","leopoldo"],"mg")
, (["pedro","osório"],"rs")
, (["pedro","ii"],"pi")
, (["pedro","teixeira"],"mg")
, (["pedro","velho"],"rn")
, (["peixe"],"to")
, (["peixe","-","boi"],"pa")
, (["peixoto","de","azevedo"],"mt")
, (["pejuçara"],"rs")
, (["pelotas"],"rs")
, (["penaforte"],"ce")
, (["penalva"],"ma")
, (["penápolis"],"sp")
, (["pendências"],"rn")
, (["penedo"],"al")
, (["penha"],"sc")
, (["pentecoste"],"ce")
, (["pequeri"],"mg")
, (["pequi"],"mg")
, (["pequizeiro"],"to")
, (["perdigão"],"mg")
, (["perdizes"],"mg")
, (["perdões"],"mg")
, (["pereira","barreto"],"sp")
, (["pereiras"],"sp")
, (["pereiro"],"ce")
, (["peri","mirim"],"ma")
, (["periquito"],"mg")
, (["peritiba"],"sc")
, (["peritoró"],"ma")
, (["perobal"],"pr")
, (["pérola"],"pr")
, (["pérola","d'oeste"],"pr")
, (["perolândia"],"go")
, (["peruíbe"],"sp")
, (["pescador"],"mg")
, (["pesqueira"],"pe")
, (["petrolândia"],"pe")
, (["petrolândia"],"sc")
, (["petrolina"],"pe")
, (["petrolina","de","goiás"],"go")
, (["petrópolis"],"rj")
, (["piaçabuçu"],"al")
, (["piacatu"],"sp")
, (["piancó"],"pb")
, (["piatã"],"ba")
, (["piau"],"mg")
, (["picada","café"],"rs")
, (["piçarra"],"pa")
, (["piçarras"],"sc")
, (["picos"],"pi")
, (["picuí"],"pb")
, (["piedade"],"sp")
, (["piedade","de","caratinga"],"mg")
, (["piedade","de","ponte","nova"],"mg")
, (["piedade","do","rio","grande"],"mg")
, (["piedade","dos","gerais"],"mg")
, (["piên"],"pr")
, (["pilão","arcado"],"ba")
, (["pilar"],"al")
, (["pilar"],"pb")
, (["pilar","de","goiás"],"go")
, (["pilar","do","sul"],"sp")
, (["pilões"],"pb")
, (["pilões"],"rn")
, (["pilõezinhos"],"pb")
, (["pimenta"],"mg")
, (["pimenta","bueno"],"ro")
, (["pimenteiras"],"pi")
, (["pimenteiras","do","oeste"],"ro")
, (["pindaí"],"ba")
, (["pindamonhangaba"],"sp")
, (["pindaré","mirim"],"ma")
, (["pindoba"],"al")
, (["pindobaçu"],"ba")
, (["pindorama"],"sp")
, (["pindorama","do","tocantins"],"to")
, (["pindoretama"],"ce")
, (["pingo","d'água"],"mg")
, (["pinhais"],"pr")
, (["pinhal"],"rs")
, (["pinhalão"],"pr")
, (["pinhal","da","serra"],"rs")
, (["pinhal","de","são","bento"],"pr")
, (["pinhal","grande"],"rs")
, (["pinhalzinho"],"sc")
, (["pinhalzinho"],"sp")
, (["pinhão"],"pr")
, (["pinhão"],"se")
, (["pinheiral"],"rj")
, (["pinheirinho","do","vale"],"rs")
, (["pinheiro"],"ma")
, (["pinheiro","machado"],"rs")
, (["pinheiro","preto"],"sc")
, (["pinheiros"],"es")
, (["pintadas"],"ba")
, (["pintópolis"],"mg")
, (["pio","ix"],"pi")
, (["pio","xii"],"ma")
, (["piquerobi"],"sp")
, (["piquet","carneiro"],"ce")
, (["piquete"],"sp")
, (["piracaia"],"sp")
, (["piracanjuba"],"go")
, (["piracema"],"mg")
, (["piracicaba"],"sp")
, (["piracuruca"],"pi")
, (["piraí"],"rj")
, (["piraí","do","norte"],"ba")
, (["piraí","do","sul"],"pr")
, (["piraju"],"sp")
, (["pirajuba"],"mg")
, (["pirajuí"],"sp")
, (["pirambu"],"se")
, (["piranga"],"mg")
, (["pirangi"],"sp")
, (["piranguçu"],"mg")
, (["piranguinho"],"mg")
, (["piranhas"],"al")
, (["piranhas"],"go")
, (["pirapemas"],"ma")
, (["pirapetinga"],"mg")
, (["pirapó"],"rs")
, (["pirapora"],"mg")
, (["pirapora","do","bom","jesus"],"sp")
, (["pirapozinho"],"sp")
, (["piraquara"],"pr")
, (["piraquê"],"to")
, (["pirassununga"],"sp")
, (["piratini"],"rs")
, (["piratininga"],"sp")
, (["piratuba"],"sc")
, (["piraúba"],"mg")
, (["pirenópolis"],"go")
, (["pires","do","rio"],"go")
, (["pires","ferreira"],"ce")
, (["piripá"],"ba")
, (["piripiri"],"pi")
, (["piritiba"],"ba")
, (["pirpirituba"],"pb")
, (["pitanga"],"pr")
, (["pitangueiras"],"pr")
, (["pitangueiras"],"sp")
, (["pitangui"],"mg")
, (["pitimbu"],"pb")
, (["pium"],"to")
, (["piúma"],"es")
, (["piumhi"],"mg")
, (["placas"],"pa")
, (["plácido","de","castro"],"ac")
, (["planaltina","do","paraná"],"pr")
, (["planaltina"],"go")
, (["planaltino"],"ba")
, (["planalto"],"ba")
, (["planalto"],"pr")
, (["planalto"],"rs")
, (["planalto","(são","paulo)"],"sp")
, (["planalto","alegre"],"sc")
, (["planalto","da","serra"],"mt")
, (["planura"],"mg")
, (["platina"],"sp")
, (["poá"],"sp")
, (["poção"],"pe")
, (["poção","de","pedras"],"ma")
, (["pocinhos"],"pb")
, (["poço","branco"],"rn")
, (["poço","dantas"],"pb")
, (["poço","das","antas"],"rs")
, (["poço","das","trincheiras"],"al")
, (["poço","de","josé","de","moura"],"pb")
, (["poções"],"ba")
, (["poço","fundo"],"mg")
, (["poconé"],"mt")
, (["poço","redondo"],"se")
, (["poços","de","caldas"],"mg")
, (["poço","verde"],"se")
, (["pocrane"],"mg")
, (["pojuca"],"ba")
, (["poloni"],"sp")
, (["pombal"],"pb")
, (["pombos"],"pe")
, (["pomerode"],"sc")
, (["pompéia"],"sp")
, (["pompéu"],"mg")
, (["pongaí"],"sp")
, (["ponta","de","pedras"],"pa")
, (["ponta","grossa"],"pr")
, (["pontal"],"sp")
, (["pontal","do","araguaia"],"mt")
, (["pontal","do","paraná"],"pr")
, (["pontalina"],"go")
, (["pontalinda"],"sp")
, (["pontão"],"rs")
, (["ponta","porã"],"ms")
, (["ponte","alta","do","bom","jesus"],"to")
, (["ponte","alta","do","norte"],"sc")
, (["ponte","alta","do","tocantins"],"to")
, (["ponte","alta"],"sc")
, (["ponte","branca"],"mt")
, (["ponte","nova"],"mg")
, (["ponte","preta"],"rs")
, (["pontes","e","lacerda"],"mt")
, (["ponte","serrada"],"sc")
, (["pontes","gestal"],"sp")
, (["ponto","belo"],"es")
, (["ponto","chique"],"mg")
, (["ponto","dos","volantes"],"mg")
, (["ponto","novo"],"ba")
, (["populina"],"sp")
, (["porangaba"],"sp")
, (["poranga"],"ce")
, (["porangatu"],"go")
, (["porciúncula"],"rj")
, (["porecatu"],"pr")
, (["portalegre"],"rn")
, (["portão"],"rs")
, (["porteirão"],"go")
, (["porteiras"],"ce")
, (["porteirinha"],"mg")
, (["portelândia"],"go")
, (["portel"],"pa")
, (["porto"],"pi")
, (["porto","acre"],"ac")
, (["porto","alegre"],"rs")
, (["porto","alegre","do","norte"],"mt")
, (["porto","alegre","do","piauí"],"pi")
, (["porto","alegre","do","tocantins"],"to")
, (["porto","amazonas"],"pr")
, (["porto","barreiro"],"pr")
, (["porto","belo"],"sc")
, (["porto","calvo"],"al")
, (["porto","da","folha"],"se")
, (["porto","de","moz"],"pa")
, (["porto","de","pedras"],"al")
, (["porto","do","mangue"],"rn")
, (["porto","dos","gaúchos"],"mt")
, (["porto","esperidião"],"mt")
, (["porto","estrela"],"mt")
, (["porto","feliz"],"sp")
, (["porto","ferreira"],"sp")
, (["porto","firme"],"mg")
, (["porto","franco"],"ma")
, (["porto","grande"],"ap")
, (["porto","lucena"],"rs")
, (["porto","mauá"],"rs")
, (["porto","murtinho"],"ms")
, (["porto","nacional"],"to")
, (["porto","real"],"rj")
, (["porto","real","do","colégio"],"al")
, (["porto","rico"],"pr")
, (["porto","rico","do","maranhão"],"ma")
, (["porto","seguro"],"ba")
, (["porto","união"],"sc")
, (["porto","velho"],"ro")
, (["porto","vera","cruz"],"rs")
, (["porto","vitória"],"pr")
, (["porto","walter"],"ac")
, (["porto","xavier"],"rs")
, (["posse"],"go")
, (["poté"],"mg")
, (["potengi"],"ce")
, (["potim"],"sp")
, (["potiraguá"],"ba")
, (["potirendaba"],"sp")
, (["potiretama"],"ce")
, (["pouso","alegre"],"mg")
, (["pouso","alto"],"mg")
, (["pouso","novo"],"rs")
, (["pouso","redondo"],"sc")
, (["poxoréo"],"mt")
, (["pracinha"],"sp")
, (["pracuúba"],"ap")
, (["prado"],"ba")
, (["prado","ferreira"],"pr")
, (["pradópolis"],"sp")
, (["prados"],"mg")
, (["praia","grande"],"sc")
, (["praia","grande"],"sp")
, (["praia","norte"],"to")
, (["prainha"],"pa")
, (["pranchita"],"pr")
, (["prata"],"mg")
, (["prata"],"pb")
, (["prata","do","piauí"],"pi")
, (["pratânia"],"sp")
, (["pratápolis"],"mg")
, (["pratinha"],"mg")
, (["presidente","alves"],"sp")
, (["presidente","bernardes"],"mg")
, (["presidente","bernardes"],"sp")
, (["presidente","castelo","branco"],"pr")
, (["presidente","castelo","branco"],"sc")
, (["presidente","dutra"],"ba")
, (["presidente","dutra"],"ma")
, (["presidente","epitácio"],"sp")
, (["presidente","figueiredo"],"am")
, (["presidente","getúlio"],"sc")
, (["presidente","jânio","quadros"],"ba")
, (["presidente","juscelino"],"ma")
, (["presidente","juscelino"],"mg")
, (["presidente","kennedy"],"es")
, (["presidente","kennedy"],"to")
, (["presidente","kubitschek"],"mg")
, (["presidente","lucena"],"rs")
, (["presidente","médici"],"ma")
, (["presidente","médici"],"ro")
, (["presidente","nereu"],"sc")
, (["presidente","olegário"],"mg")
, (["presidente","prudente"],"sp")
, (["presidente","sarney"],"ma")
, (["presidente","tancredo","neves"],"ba")
, (["presidente","vargas"],"ma")
, (["presidente","venceslau"],"sp")
, (["primavera"],"pa")
, (["primavera"],"pe")
, (["primavera","de","rondônia"],"ro")
, (["primavera","do","leste"],"mt")
, (["primeira","cruz"],"ma")
, (["primeiro","de","maio"],"pr")
, (["princesa"],"sc")
, (["princesa","isabel"],"pb")
, (["professor","jamil","safady"],"go")
, (["progresso"],"rs")
, (["promissão"],"sp")
, (["propriá"],"se")
, (["protásio","alves"],"rs")
, (["prudente","de","morais"],"mg")
, (["prudentópolis"],"pr")
, (["pugmil"],"to")
, (["pureza"],"rn")
, (["putinga"],"rs")
, (["puxinanã"],"pb")
, (["quadra"],"sp")
, (["quaraí"],"rs")
, (["quartel","geral"],"mg")
, (["quarto","centenário"],"pr")
, (["quatá"],"sp")
, (["quatiguá"],"pr")
, (["quatipuru"],"pa")
, (["quatis"],"rj")
, (["quatro","barras"],"pr")
, (["quatro","irmãos"],"rs")
, (["quatro","pontes"],"pr")
, (["quebrângulo"],"al")
, (["quedas","do","iguaçu"],"pr")
, (["queimada","nova"],"pi")
, (["queimadas"],"ba")
, (["queimadas"],"pb")
, (["queimados"],"rj")
, (["queiroz"],"sp")
, (["queluzito"],"mg")
, (["queluz"],"sp")
, (["querência","do","norte"],"pr")
, (["querência"],"mt")
, (["quevedos"],"rs")
, (["quijingue"],"ba")
, (["quilombo"],"sc")
, (["quinta","do","sol"],"pr")
, (["quintana"],"sp")
, (["quinze","de","novembro"],"rs")
, (["quipapá"],"pe")
, (["quirinópolis"],"go")
, (["quissamã"],"rj")
, (["quitandinha"],"pr")
, (["quiterianópolis"],"ce")
, (["quixaba"],"pb")
, (["quixabá"],"pe")
, (["quixabeira"],"ba")
, (["quixadá"],"ce")
, (["quixelô"],"ce")
, (["quixeramobim"],"ce")
, (["quixeré"],"ce")
, (["rafael","fernandes"],"rn")
, (["rafael","godeiro"],"rn")
, (["rafael","jambeiro"],"ba")
, (["rafard"],"sp")
, (["ramilândia"],"pr")
, (["rancharia"],"sp")
, (["rancho","alegre","d'oeste"],"pr")
, (["rancho","alegre"],"pr")
, (["rancho","queimado"],"sc")
, (["raposa"],"ma")
, (["raposos"],"mg")
, (["raul","soares"],"mg")
, (["realeza"],"pr")
, (["rebouças"],"pr")
, (["recife"],"pe")
, (["recreio"],"mg")
, (["recursolândia"],"to")
, (["redenção"],"ce")
, (["redenção","da","serra"],"sp")
, (["redenção","do","gurguéia"],"pi")
, (["redenção"],"pa")
, (["redentora"],"rs")
, (["reduto"],"mg")
, (["regeneração"],"pi")
, (["regente","feijó"],"sp")
, (["reginópolis"],"sp")
, (["registro"],"sp")
, (["relvado"],"rs")
, (["remanso"],"ba")
, (["remígio"],"pb")
, (["renascença"],"pr")
, (["reriutaba"],"ce")
, (["resende","costa"],"mg")
, (["resende"],"rj")
, (["reserva","do","cabaçal"],"mt")
, (["reserva","do","iguaçu"],"pr")
, (["reserva"],"pr")
, (["resplendor"],"mg")
, (["ressaquinha"],"mg")
, (["restinga","seca"],"rs")
, (["restinga"],"sp")
, (["retirolândia"],"ba")
, (["retiro"],"pb")
, (["riachão","das","neves"],"ba")
, (["riachão","do","bacamarte"],"pb")
, (["riachão","do","dantas"],"se")
, (["riachão","do","jacuípe"],"ba")
, (["riachão","do","poço"],"pb")
, (["riachão"],"ma")
, (["riachão"],"pb")
, (["riachinho"],"mg")
, (["riachinho"],"to")
, (["riacho","da","cruz"],"rn")
, (["riacho","das","almas"],"pe")
, (["riacho","de","santana"],"ba")
, (["riacho","de","santana"],"rn")
, (["riacho","de","santo","antônio"],"pb")
, (["riacho","dos","cavalos"],"pb")
, (["riacho","dos","machados"],"mg")
, (["riacho","frio"],"pi")
, (["riachuelo"],"rn")
, (["riachuelo"],"se")
, (["rialma"],"go")
, (["rianápolis"],"go")
, (["ribamar","fiquene"],"ma")
, (["ribas","do","rio","pardo"],"ms")
, (["ribeira","do","amparo"],"ba")
, (["ribeira","do","piauí"],"pi")
, (["ribeira","do","pombal"],"ba")
, (["ribeirão","bonito"],"sp")
, (["ribeirão","branco"],"sp")
, (["ribeirão","cascalheira"],"mt")
, (["ribeirão","claro"],"pr")
, (["ribeirão","corrente"],"sp")
, (["ribeirão","das","neves"],"mg")
, (["ribeirão","do","largo"],"ba")
, (["ribeirão","do","pinhal"],"pr")
, (["ribeirão","dos","índios"],"sp")
, (["ribeirão","do","sul"],"sp")
, (["ribeirão","grande"],"sp")
, (["ribeirão"],"pe")
, (["ribeirão","pires"],"sp")
, (["ribeirão","preto"],"sp")
, (["ribeirão","vermelho"],"mg")
, (["ribeirãozinho"],"mt")
, (["ribeira"],"sp")
, (["ribeiro","gonçalves"],"pi")
, (["ribeirópolis"],"se")
, (["rifaina"],"sp")
, (["rincão"],"sp")
, (["rinópolis"],"sp")
, (["rio","acima"],"mg")
, (["rio","azul"],"pr")
, (["rio","bananal"],"es")
, (["rio","bom"],"pr")
, (["rio","bonito","do","iguaçu"],"pr")
, (["rio","bonito"],"rj")
, (["rio","branco"],"ac")
, (["rio","branco","do","ivaí"],"pr")
, (["rio","branco","do","sul"],"pr")
, (["rio","branco"],"mt")
, (["rio","brilhante"],"ms")
, (["rio","casca"],"mg")
, (["rio","claro"],"rj")
, (["rio","claro"],"sp")
, (["rio","crespo"],"ro")
, (["rio","da","conceição"],"to")
, (["rio","das","antas"],"sc")
, (["rio","das","flores"],"rj")
, (["rio","das","ostras"],"rj")
, (["rio","das","pedras"],"sp")
, (["rio","de","contas"],"ba")
, (["rio","de","janeiro"],"rj")
, (["rio","do","antônio"],"ba")
, (["rio","do","campo"],"sc")
, (["rio","doce"],"mg")
, (["rio","do","fogo"],"rn")
, (["rio","do","oeste"],"sc")
, (["rio","do","pires"],"ba")
, (["rio","do","prado"],"mg")
, (["rio","dos","bois"],"to")
, (["rio","dos","cedros"],"sc")
, (["rio","dos","índios"],"rs")
, (["rio","do","sul"],"sc")
, (["rio","espera"],"mg")
, (["rio","formoso"],"pe")
, (["rio","fortuna"],"sc")
, (["rio","grande","da","serra"],"sp")
, (["rio","grande","do","piauí"],"pi")
, (["rio","grande"],"rs")
, (["riolândia"],"sp")
, (["rio","largo"],"al")
, (["rio","manso"],"mg")
, (["rio","maria"],"pa")
, (["rio","negrinho"],"sc")
, (["rio","negro"],"ms")
, (["rio","negro"],"pr")
, (["rio","novo","do","sul"],"es")
, (["rio","novo"],"mg")
, (["rio","paranaíba"],"mg")
, (["rio","pardo","de","minas"],"mg")
, (["rio","pardo"],"rs")
, (["rio","piracicaba"],"mg")
, (["rio","pomba"],"mg")
, (["rio","preto","da","eva"],"am")
, (["rio","preto"],"mg")
, (["rio","quente"],"go")
, (["rio","real"],"ba")
, (["rio","rufino"],"sc")
, (["rio","sono"],"to")
, (["rio","tinto"],"pb")
, (["rio","verde","de","mato","grosso"],"ms")
, (["rio","verde"],"go")
, (["rio","vermelho"],"mg")
, (["riozinho"],"rs")
, (["riqueza"],"sc")
, (["ritápolis"],"mg")
, (["riversul"],"sp")
, (["roca","sales"],"rs")
, (["rochedo","de","minas"],"mg")
, (["rochedo"],"ms")
, (["rodeio"],"sc")
, (["rodeio","bonito"],"rs")
, (["rodeiro"],"mg")
, (["rodelas"],"ba")
, (["rodolfo","fernandes"],"rn")
, (["rodrigues","alves"],"ac")
, (["rolador"],"rs")
, (["rolândia"],"pr")
, (["rolante"],"rs")
, (["rolim","de","moura"],"ro")
, (["romaria"],"mg")
, (["romelândia"],"sc")
, (["roncador"],"pr")
, (["ronda","alta"],"rs")
, (["rondinha"],"rs")
, (["rondolândia"],"mt")
, (["rondon","do","pará"],"pa")
, (["rondonópolis"],"mt")
, (["rondon"],"pr")
, (["roque","gonzales"],"rs")
, (["rorainópolis"],"rr")
, (["rosana"],"sp")
, (["rosário","da","limeira"],"mg")
, (["rosário","do","catete"],"se")
, (["rosário","do","ivaí"],"pr")
, (["rosário","do","sul"],"rs")
, (["rosário"],"ma")
, (["rosário","oeste"],"mt")
, (["roseira"],"sp")
, (["roteiro"],"al")
, (["rubelita"],"mg")
, (["rubiácea"],"sp")
, (["rubiataba"],"go")
, (["rubim"],"mg")
, (["rubinéia"],"sp")
, (["rurópolis"],"pa")
, (["russas"],"ce")
, (["ruy","barbosa"],"ba")
, (["ruy","barbosa"],"rn")
, (["sabará"],"mg")
, (["sabáudia"],"pr")
, (["sabinópolis"],"mg")
, (["sabino"],"sp")
, (["saboeiro"],"ce")
, (["sacramento"],"mg")
, (["sagrada","família"],"rs")
, (["sagres"],"sp")
, (["sairé"],"pe")
, (["saldanha","marinho"],"rs")
, (["sales"],"sp")
, (["sales","oliveira"],"sp")
, (["salesópolis"],"sp")
, (["salete"],"sc")
, (["salgadinho"],"pb")
, (["salgadinho"],"pe")
, (["salgado"],"se")
, (["salgado","de","são","félix"],"pb")
, (["salgado","filho"],"pr")
, (["salgueiro"],"pe")
, (["salinas","da","margarida"],"ba")
, (["salinas"],"mg")
, (["salinópolis"],"pa")
, (["salitre"],"ce")
, (["salmourão"],"sp")
, (["saloá"],"pe")
, (["saltinho"],"sc")
, (["saltinho"],"sp")
, (["salto"],"sp")
, (["salto","da","divisa"],"mg")
, (["salto","de","pirapora"],"sp")
, (["salto","do","céu"],"mt")
, (["salto","do","itararé"],"pr")
, (["salto","do","jacuí"],"rs")
, (["salto","do","lontra"],"pr")
, (["salto","grande"],"sp")
, (["salto","veloso"],"sc")
, (["salvador"],"ba")
, (["salvador","das","missões"],"rs")
, (["salvador","do","sul"],"rs")
, (["salvaterra"],"pa")
, (["sambaíba"],"ma")
, (["sampaio"],"to")
, (["sananduva"],"rs")
, (["sanclerlândia"],"go")
, (["sandolândia"],"to")
, (["sandovalina"],"sp")
, (["sangão"],"sc")
, (["sanharó"],"pe")
, (["santa","adélia"],"sp")
, (["santa","albertina"],"sp")
, (["santa","amélia"],"pr")
, (["santa","bárbara"],"ba")
, (["santa","bárbara"],"mg")
, (["santa","bárbara","de","goiás"],"go")
, (["santa","bárbara","do","leste"],"mg")
, (["santa","bárbara","do","monte","verde"],"mg")
, (["santa","bárbara","do","pará"],"pa")
, (["santa","bárbara","do","sul"],"rs")
, (["santa","bárbara","do","tugúrio"],"mg")
, (["santa","bárbara","d'oeste"],"sp")
, (["santa","branca"],"sp")
, (["santa","brígida"],"ba")
, (["santa","carmem"],"mt")
, (["santa","cecília"],"pb")
, (["santa","cecília"],"sc")
, (["santa","cecília","do","pavão"],"pr")
, (["santa","cecília","do","sul"],"rs")
, (["santa","clara","do","sul"],"rs")
, (["santa","clara","d'oeste"],"sp")
, (["santa","cruz"],"pb")
, (["santa","cruz"],"pe")
, (["santa","cruz"],"rn")
, (["santa","cruz","cabrália"],"ba")
, (["santa","cruz","da","baixa","verde"],"pe")
, (["santa","cruz","da","conceição"],"sp")
, (["santa","cruz","da","esperança"],"sp")
, (["santa","cruz","da","vitória"],"ba")
, (["santa","cruz","das","palmeiras"],"sp")
, (["santa","cruz","de","goiás"],"go")
, (["santa","cruz","de","minas"],"mg")
, (["santa","cruz","de","monte","castelo"],"pr")
, (["santa","cruz","de","salinas"],"mg")
, (["santa","cruz","do","arari"],"pa")
, (["santa","cruz","do","capibaribe"],"pe")
, (["santa","cruz","do","escalvado"],"mg")
, (["santa","cruz","do","piauí"],"pi")
, (["santa","cruz","do","rio","pardo"],"sp")
, (["santa","cruz","do","sul"],"rs")
, (["santa","cruz","do","xingu"],"mt")
, (["santa","cruz","dos","milagres"],"pi")
, (["santa","efigênia","de","minas"],"mg")
, (["santa","ernestina"],"sp")
, (["santa","fé"],"pr")
, (["santa","fé","de","goiás"],"go")
, (["santa","fé","de","minas"],"mg")
, (["santa","fé","do","araguaia"],"to")
, (["santa","fé","do","sul"],"sp")
, (["santa","filomena"],"pe")
, (["santa","filomena"],"pi")
, (["santa","filomena","do","maranhão"],"ma")
, (["santa","gertrudes"],"sp")
, (["santa","helena"],"ma")
, (["santa","helena"],"pb")
, (["santa","helena"],"pr")
, (["santa","helena"],"sc")
, (["santa","helena","de","goiás"],"go")
, (["santa","helena","de","minas"],"mg")
, (["santa","inês"],"ba")
, (["santa","inês"],"ma")
, (["santa","inês"],"pb")
, (["santa","inês"],"pr")
, (["santa","isabel"],"go")
, (["santa","isabel"],"sp")
, (["santa","isabel","do","ivaí"],"pr")
, (["santa","isabel","do","pará"],"pa")
, (["santa","isabel","do","rio","negro"],"am")
, (["santa","izabel","do","oeste"],"pr")
, (["santa","juliana"],"mg")
, (["santa","leopoldina"],"es")
, (["santa","lúcia"],"pr")
, (["santa","lúcia"],"sp")
, (["santa","luz"],"pi")
, (["santa","luzia"],"ba")
, (["santa","luzia"],"ma")
, (["santa","luzia"],"mg")
, (["santa","luzia"],"pb")
, (["santa","luzia","d'oeste"],"ro")
, (["santa","luzia","do","itanhy"],"se")
, (["santa","luzia","do","norte"],"al")
, (["santa","luzia","do","pará"],"pa")
, (["santa","luzia","do","paruá"],"ma")
, (["santa","margarida"],"mg")
, (["santa","margarida","do","sul"],"rs")
, (["santa","maria"],"rn")
, (["santa","maria"],"rs")
, (["santa","maria","da","boa","vista"],"pe")
, (["santa","maria","das","barreiras"],"pa")
, (["santa","maria","da","serra"],"sp")
, (["santa","maria","da","vitória"],"ba")
, (["santa","maria","de","itabira"],"mg")
, (["santa","maria","do","cambucá"],"pe")
, (["santa","maria","do","herval"],"rs")
, (["santa","maria","de","jetibá"],"es")
, (["santa","maria","do","oeste"],"pr")
, (["santa","maria","do","pará"],"pa")
, (["santa","maria","do","salto"],"mg")
, (["santa","maria","do","suaçuí"],"mg")
, (["santa","maria","do","tocantins"],"to")
, (["santa","maria","madalena"],"rj")
, (["santa","mariana"],"pr")
, (["santa","mercedes"],"sp")
, (["santa","mônica"],"pr")
, (["santa","quitéria"],"ce")
, (["santa","quitéria","do","maranhão"],"ma")
, (["santa","rita","de","caldas"],"mg")
, (["santa","rita","de","cássia"],"ba")
, (["santa","rita","de","jacutinga"],"mg")
, (["santa","rita","de","minas"],"mg")
, (["santa","rita","do","araguaia"],"go")
, (["santa","rita","d'oeste"],"sp")
, (["santa","rita","de","ibitipoca"],"mg")
, (["santa","rita","do","itueto"],"mg")
, (["santa","rita","do","novo","destino"],"go")
, (["santa","rita","do","pardo"],"ms")
, (["santa","rita","do","passa","quatro"],"sp")
, (["santa","rita","do","sapucaí"],"mg")
, (["santa","rita","do","tocantins"],"to")
, (["santa","rita","do","trivelato"],"mt")
, (["santa","rita"],"ma")
, (["santa","rita"],"pb")
, (["santa","rosa"],"rs")
, (["santa","rosa","da","serra"],"mg")
, (["santa","rosa","de","goiás"],"go")
, (["santa","rosa","de","lima"],"sc")
, (["santa","rosa","de","lima"],"se")
, (["santa","rosa","de","viterbo"],"sp")
, (["santa","rosa","do","piauí"],"pi")
, (["santa","rosa","do","purus"],"ac")
, (["santa","rosa","do","sul"],"sc")
, (["santa","rosa","do","tocantins"],"to")
, (["santa","salete"],"sp")
, (["santa","teresa"],"es")
, (["santa","teresinha"],"ba")
, (["santa","teresinha"],"pb")
, (["santa","tereza"],"rs")
, (["santa","tereza","de","goiás"],"go")
, (["santa","tereza","do","oeste"],"pr")
, (["santa","tereza","do","tocantins"],"to")
, (["santa","terezinha","de","goiás"],"go")
, (["santa","terezinha","de","itaipu"],"pr")
, (["santa","terezinha","do","progresso"],"sc")
, (["santa","terezinha","do","tocantins"],"to")
, (["santa","terezinha"],"mt")
, (["santa","terezinha"],"pe")
, (["santa","terezinha"],"sc")
, (["santa","vitória"],"mg")
, (["santa","vitória","do","palmar"],"rs")
, (["santaluz"],"ba")
, (["santana"],"ap")
, (["santana"],"ba")
, (["santana","da","boa","vista"],"rs")
, (["santana","da","ponte","pensa"],"sp")
, (["santana","da","vargem"],"mg")
, (["santana","de","cataguases"],"mg")
, (["santana","do","ipanema"],"al")
, (["santana","de","mangueira"],"pb")
, (["santana","de","parnaíba"],"sp")
, (["santana","de","pirapama"],"mg")
, (["santana","do","acaraú"],"ce")
, (["santana","do","araguaia"],"pa")
, (["santana","do","cariri"],"ce")
, (["santana","do","deserto"],"mg")
, (["santana","do","garambéu"],"mg")
, (["santana","do","itararé"],"pr")
, (["santana","do","jacaré"],"mg")
, (["santana","do","livramento"],"rs")
, (["santana","do","manhuaçu"],"mg")
, (["santana","do","maranhão"],"ma")
, (["santana","do","matos"],"rn")
, (["santana","do","mundaú"],"al")
, (["santana","do","paraíso"],"mg")
, (["santana","do","piauí"],"pi")
, (["santana","do","riacho"],"mg")
, (["santana","do","são","francisco"],"se")
, (["santana","do","seridó"],"rn")
, (["santana","dos","garrotes"],"pb")
, (["santana","dos","montes"],"mg")
, (["santanópolis"],"ba")
, (["santarém"],"pa")
, (["santarém"],"pb")
, (["santarém","novo"],"pa")
, (["santiago"],"rs")
, (["santiago","do","sul"],"sc")
, (["santo","afonso"],"mt")
, (["santo","amaro"],"ba")
, (["santo","amaro","da","imperatriz"],"sc")
, (["santo","amaro","das","brotas"],"se")
, (["santo","amaro","do","maranhão"],"ma")
, (["santo","anastácio"],"sp")
, (["santo","andré"],"pb")
, (["santo","andré"],"sp")
, (["santo","ângelo"],"rs")
, (["santo","antônio"],"rn")
, (["santo","antônio","da","alegria"],"sp")
, (["santo","antônio","da","barra"],"go")
, (["santo","antônio","da","patrulha"],"rs")
, (["santo","antônio","da","platina"],"pr")
, (["santo","antônio","de","posse"],"sp")
, (["santo","antônio","das","missões"],"rs")
, (["santo","antônio","de","goiás"],"go")
, (["santo","antônio","de","jesus"],"ba")
, (["santo","antônio","de","lisboa"],"pi")
, (["santo","antônio","de","pádua"],"rj")
, (["santo","antônio","do","amparo"],"mg")
, (["santo","antônio","do","aracanguá"],"sp")
, (["santo","antônio","do","aventureiro"],"mg")
, (["santo","antônio","do","caiuá"],"pr")
, (["santo","antônio","do","descoberto"],"go")
, (["santo","antônio","do","grama"],"mg")
, (["santo","antônio","do","içá"],"am")
, (["santo","antônio","do","itambé"],"mg")
, (["santo","antônio","do","jacinto"],"mg")
, (["santo","antônio","do","jardim"],"sp")
, (["santo","antônio","do","leste"],"mt")
, (["santo","antônio","do","leverger"],"mt")
, (["santo","antônio","do","monte"],"mg")
, (["santo","antônio","do","palma"],"rs")
, (["santo","antônio","do","paraíso"],"pr")
, (["santo","antônio","do","pinhal"],"sp")
, (["santo","antônio","do","planalto"],"rs")
, (["santo","antônio","do","retiro"],"mg")
, (["santo","antônio","do","rio","abaixo"],"mg")
, (["santo","antônio","dos","lopes"],"ma")
, (["santo","antônio","dos","milagres"],"pi")
, (["santo","antônio","do","sudoeste"],"pr")
, (["santo","antônio","do","tauá"],"pa")
, (["santo","augusto"],"rs")
, (["santo","cristo"],"rs")
, (["santo","estêvão"],"ba")
, (["santo","expedito"],"sp")
, (["santo","expedito","do","sul"],"rs")
, (["santo","hipólito"],"mg")
, (["santo","inácio"],"pr")
, (["santo","inácio","do","piauí"],"pi")
, (["santópolis","do","aguapeí"],"sp")
, (["santos"],"sp")
, (["santos","dumont"],"mg")
, (["são","benedito"],"ce")
, (["são","benedito","do","rio","preto"],"ma")
, (["são","benedito","do","sul"],"pe")
, (["são","bento"],"ma")
, (["são","bento"],"pb")
, (["são","bento","abade"],"mg")
, (["são","bento","de","pombal"],"pb")
, (["são","bento","do","norte"],"rn")
, (["são","bento","do","sapucaí"],"sp")
, (["são","bento","do","sul"],"sc")
, (["são","bento","do","tocantins"],"to")
, (["são","bento","do","trairi"],"rn")
, (["são","bento","do","una"],"pe")
, (["são","bernardino"],"sc")
, (["são","bernardo"],"ma")
, (["são","bernardo","do","campo"],"sp")
, (["são","bonifácio"],"sc")
, (["são","borja"],"rs")
, (["são","brás"],"al")
, (["são","brás","do","suaçuí"],"mg")
, (["são","braz","do","piauí"],"pi")
, (["são","caetano"],"pe")
, (["são","caetano","de","odivelas"],"pa")
, (["são","caetano","do","sul"],"sp")
, (["são","carlos","do","ivaí"],"pr")
, (["são","carlos"],"sc")
, (["são","carlos"],"sp")
, (["são","cristóvão"],"se")
, (["são","cristóvão","do","sul"],"sc")
, (["são","desidério"],"ba")
, (["são","domingos"],"ba")
, (["são","domingos"],"go")
, (["são","domingos"],"sc")
, (["são","domingos"],"se")
, (["são","domingos","das","dores"],"mg")
, (["são","domingos","de","pombal"],"pb")
, (["são","domingos","do","araguaia"],"pa")
, (["são","domingos","do","azeitão"],"ma")
, (["são","domingos","do","capim"],"pa")
, (["são","domingos","do","cariri"],"pb")
, (["são","domingos","do","maranhão"],"ma")
, (["são","domingos","do","norte"],"es")
, (["são","domingos","do","prata"],"mg")
, (["são","domingos","do","sul"],"rs")
, (["são","felipe"],"ba")
, (["são","felipe","d'oeste"],"ro")
, (["são","félix"],"ba")
, (["são","félix","de","balsas"],"ma")
, (["são","félix","de","minas"],"mg")
, (["são","félix","do","araguaia"],"mt")
, (["são","félix","do","coribe"],"ba")
, (["são","félix","do","piauí"],"pi")
, (["são","félix","do","tocantins"],"to")
, (["são","félix","do","xingu"],"pa")
, (["são","fernando"],"rn")
, (["são","fidélis"],"rj")
, (["são","francisco"],"mg")
, (["são","francisco"],"pb")
, (["são","francisco"],"se")
, (["são","francisco"],"sp")
, (["são","francisco","de","assis","do","piauí"],"pi")
, (["são","francisco","de","assis"],"rs")
, (["são","francisco","de","goiás"],"go")
, (["são","francisco","de","itabapoana"],"rj")
, (["são","francisco","de","paula"],"mg")
, (["são","francisco","de","paula"],"rs")
, (["são","francisco","de","sales"],"mg")
, (["são","francisco","do","brejão"],"ma")
, (["são","francisco","do","conde"],"ba")
, (["são","francisco","do","glória"],"mg")
, (["são","francisco","do","guaporé"],"ro")
, (["são","francisco","do","maranhão"],"ma")
, (["são","francisco","do","oeste"],"rn")
, (["são","francisco","do","pará"],"pa")
, (["são","francisco","do","piauí"],"pi")
, (["são","francisco","do","sul"],"sc")
, (["são","gabriel"],"ba")
, (["são","gabriel"],"rs")
, (["são","gabriel","da","cachoeira"],"am")
, (["são","gabriel","da","palha"],"es")
, (["são","gabriel","do","oeste"],"ms")
, (["são","geraldo"],"mg")
, (["são","geraldo","da","piedade"],"mg")
, (["são","geraldo","do","araguaia"],"pa")
, (["são","geraldo","do","baixio"],"mg")
, (["são","gonçalo"],"rj")
, (["são","gonçalo","do","abaeté"],"mg")
, (["são","gonçalo","do","amarante"],"ce")
, (["são","gonçalo","do","amarante"],"rn")
, (["são","gonçalo","do","gurguéia"],"pi")
, (["são","gonçalo","do","pará"],"mg")
, (["são","gonçalo","do","piauí"],"pi")
, (["são","gonçalo","do","rio","abaixo"],"mg")
, (["são","gonçalo","do","rio","preto"],"mg")
, (["são","gonçalo","do","sapucaí"],"mg")
, (["são","gonçalo","dos","campos"],"ba")
, (["são","gotardo"],"mg")
, (["são","jerônimo"],"rs")
, (["são","jerônimo","da","serra"],"pr")
, (["são","joão"],"pe")
, (["são","joão"],"pr")
, (["são","joão","batista","do","glória"],"mg")
, (["são","joão","batista"],"ma")
, (["são","joão","batista"],"sc")
, (["são","joão","da","baliza"],"rr")
, (["são","joão","da","barra"],"rj")
, (["são","joão","da","boa","vista"],"sp")
, (["são","joão","da","canabrava"],"pi")
, (["são","joão","da","fronteira"],"pi")
, (["são","joão","da","lagoa"],"mg")
, (["são","joão","d'aliança"],"go")
, (["são","joão","da","mata"],"mg")
, (["são","joão","da","paraúna"],"go")
, (["são","joão","da","ponta"],"pa")
, (["são","joão","da","ponte"],"mg")
, (["são","joão","das","duas","pontes"],"sp")
, (["são","joão","da","serra"],"pi")
, (["são","joão","das","missões"],"mg")
, (["são","joão","da","urtiga"],"rs")
, (["são","joão","da","varjota"],"pi")
, (["são","joão","de","iracema"],"sp")
, (["são","joão","del","-","rei"],"mg")
, (["são","joão","de","meriti"],"rj")
, (["são","joão","de","pirabas"],"pa")
, (["são","joão","do","araguaia"],"pa")
, (["são","joão","do","arraial"],"pi")
, (["são","joão","do","caiuá"],"pr")
, (["são","joão","do","cariri"],"pb")
, (["são","joão","do","caru"],"ma")
, (["são","joão","do","itaperiú"],"sc")
, (["são","joão","do","ivaí"],"pr")
, (["são","joão","do","jaguaribe"],"ce")
, (["são","joão","do","manhuaçu"],"mg")
, (["são","joão","do","manteninha"],"mg")
, (["são","joão","do","oeste"],"sc")
, (["são","joão","do","oriente"],"mg")
, (["são","joão","do","pacuí"],"mg")
, (["são","joão","do","paraíso"],"ma")
, (["são","joão","do","paraíso"],"mg")
, (["são","joão","do","pau","d'alho"],"sp")
, (["são","joão","do","piauí"],"pi")
, (["são","joão","do","polêsine"],"rs")
, (["são","joão","do","rio","do","peixe"],"pb")
, (["são","joão","do","sabugi"],"rn")
, (["são","joão","do","soter"],"ma")
, (["são","joão","dos","patos"],"ma")
, (["são","joão","do","sul"],"sc")
, (["são","joão","do","tigre"],"pb")
, (["são","joão","do","triunfo"],"pr")
, (["são","joão","evangelista"],"mg")
, (["são","joão","nepomuceno"],"mg")
, (["são","joaquim","da","barra"],"sp")
, (["são","joaquim","de","bicas"],"mg")
, (["são","joaquim","do","monte"],"pe")
, (["são","joaquim"],"sc")
, (["são","jorge"],"rs")
, (["são","jorge","d'oeste"],"pr")
, (["são","jorge","do","ivaí"],"pr")
, (["são","jorge","do","patrocínio"],"pr")
, (["são","josé"],"sc")
, (["são","josé","da","barra"],"mg")
, (["são","josé","da","bela","vista"],"sp")
, (["são","josé","da","boa","vista"],"pr")
, (["são","josé","da","coroa","grande"],"pe")
, (["são","josé","da","lage"],"al")
, (["são","josé","da","lagoa","tapada"],"pb")
, (["são","josé","da","lapa"],"mg")
, (["são","josé","da","safira"],"mg")
, (["são","josé","das","missões"],"rs")
, (["são","josé","das","palmeiras"],"pr")
, (["são","josé","da","tapera"],"al")
, (["são","josé","da","varginha"],"mg")
, (["são","josé","da","vitória"],"ba")
, (["são","josé","de","caiana"],"pb")
, (["são","josé","de","espinharas"],"pb")
, (["são","josé","de","mipibu"],"rn")
, (["são","josé","de","piranhas"],"pb")
, (["são","josé","de","princesa"],"pb")
, (["são","josé","de","ribamar"],"ma")
, (["são","josé","de","ubá"],"rj")
, (["são","josé","do","alegre"],"mg")
, (["são","josé","do","barreiro"],"sp")
, (["são","josé","do","belmonte"],"pe")
, (["são","josé","do","bonfim"],"pb")
, (["são","josé","do","brejo","do","cruz"],"pb")
, (["são","josé","do","calçado"],"es")
, (["são","josé","do","campestre"],"rn")
, (["são","josé","do","cedro"],"sc")
, (["são","josé","do","cerrito"],"sc")
, (["são","josé","do","divino"],"mg")
, (["são","josé","do","divino"],"pi")
, (["são","josé","do","egito"],"pe")
, (["são","josé","do","goiabal"],"mg")
, (["são","josé","do","herval"],"rs")
, (["são","josé","do","hortêncio"],"rs")
, (["são","josé","do","inhacorá"],"rs")
, (["são","josé","do","jacuípe"],"ba")
, (["são","josé","do","jacuri"],"mg")
, (["são","josé","do","mantimento"],"mg")
, (["são","josé","do","norte"],"rs")
, (["são","josé","do","ouro"],"rs")
, (["são","josé","do","peixe"],"pi")
, (["são","josé","do","piauí"],"pi")
, (["são","josé","do","povo"],"mt")
, (["são","josé","do","rio","claro"],"mt")
, (["são","josé","do","rio","pardo"],"sp")
, (["são","josé","do","rio","preto"],"sp")
, (["são","josé","do","sabugi"],"pb")
, (["são","josé","dos","ausentes"],"rs")
, (["são","josé","dos","basílios"],"ma")
, (["são","josé","dos","campos"],"sp")
, (["são","josé","dos","cordeiros"],"pb")
, (["são","josé","do","seridó"],"rn")
, (["são","josé","dos","pinhais"],"pr")
, (["são","josé","dos","quatro","marcos"],"mt")
, (["são","josé","dos","ramos"],"pb")
, (["são","josé","do","sul"],"rs")
, (["são","josé","do","vale","do","rio","preto"],"rj")
, (["são","josé","do","xingu"],"mt")
, (["são","julião"],"pi")
, (["são","leopoldo"],"rs")
, (["são","lourenço"],"mg")
, (["são","lourenço","da","mata"],"pe")
, (["são","lourenço","da","serra"],"sp")
, (["são","lourenço","do","oeste"],"sc")
, (["são","lourenço","do","piauí"],"pi")
, (["são","lourenço","do","sul"],"rs")
, (["são","ludgero"],"sc")
, (["são","luís"],"ma")
, (["são","luís"],"rr")
, (["são","luís","de","montes","belos"],"go")
, (["são","luís","do","curu"],"ce")
, (["são","luís","do","norte"],"go")
, (["são","luís","do","paraitinga"],"sp")
, (["são","luís","do","piauí"],"pi")
, (["são","luís","do","quitunde"],"al")
, (["são","luiz","gonzaga"],"rs")
, (["são","luís","gonzaga","do","maranhão"],"ma")
, (["são","mamede"],"pb")
, (["são","manoel","do","paraná"],"pr")
, (["são","manuel"],"sp")
, (["são","marcos"],"rs")
, (["são","martinho"],"rs")
, (["são","martinho"],"sc")
, (["são","martinho","da","serra"],"rs")
, (["são","mateus"],"es")
, (["são","mateus","do","maranhão"],"ma")
, (["são","mateus","do","sul"],"pr")
, (["são","miguel"],"rn")
, (["são","miguel","arcanjo"],"sp")
, (["são","miguel","da","baixa","grande"],"pi")
, (["são","miguel","da","boa","vista"],"sc")
, (["são","miguel","das","matas"],"ba")
, (["são","miguel","das","missões"],"rs")
, (["são","miguel","de","taipu"],"pb")
, (["são","miguel","de","touros"],"rn")
, (["são","miguel","do","aleixo"],"se")
, (["são","miguel","do","anta"],"mg")
, (["são","miguel","do","araguaia"],"go")
, (["são","miguel","do","oeste"],"sc")
, (["são","miguel","do","fidalgo"],"pi")
, (["são","miguel","do","guamá"],"pa")
, (["são","miguel","do","guaporé"],"ro")
, (["são","miguel","do","iguaçu"],"pr")
, (["são","miguel","do","passa","quatro"],"go")
, (["são","miguel","do","tapuio"],"pi")
, (["são","miguel","do","tocantins"],"to")
, (["são","miguel","dos","campos"],"al")
, (["são","miguel","dos","milagres"],"al")
, (["são","nicolau"],"rs")
, (["são","patrício"],"go")
, (["são","paulo"],"sp")
, (["são","paulo","das","missões"],"rs")
, (["são","paulo","de","olivença"],"am")
, (["são","paulo","do","potengi"],"rn")
, (["são","pedro"],"rn")
, (["são","pedro"],"sp")
, (["são","pedro","da","água","branca"],"ma")
, (["são","pedro","da","aldeia"],"rj")
, (["são","pedro","da","cipa"],"mt")
, (["são","pedro","da","serra"],"rs")
, (["são","pedro","das","missões"],"rs")
, (["são","pedro","da","união"],"mg")
, (["são","pedro","de","alcântara"],"sc")
, (["são","pedro","do","butiá"],"rs")
, (["são","pedro","do","iguaçu"],"pr")
, (["são","pedro","do","ivaí"],"pr")
, (["são","pedro","do","paraná"],"pr")
, (["são","pedro","do","piauí"],"pi")
, (["são","pedro","dos","crentes"],"ma")
, (["são","pedro","dos","ferros"],"mg")
, (["são","pedro","do","suaçuí"],"mg")
, (["são","pedro","do","sul"],"rs")
, (["são","pedro","do","turvo"],"sp")
, (["são","rafael"],"rn")
, (["são","raimundo","do","doca","bezerra"],"ma")
, (["são","raimundo","das","mangabeiras"],"ma")
, (["são","raimundo","nonato"],"pi")
, (["são","roberto"],"ma")
, (["são","romão"],"mg")
, (["são","roque"],"sp")
, (["são","roque","de","minas"],"mg")
, (["são","roque","do","canaã"],"es")
, (["são","salvador","do","tocantins"],"to")
, (["são","sebastião"],"al")
, (["são","sebastião"],"sp")
, (["são","sebastião","da","amoreira"],"pr")
, (["são","sebastião","da","bela","vista"],"mg")
, (["são","sebastião","da","boa","vista"],"pa")
, (["são","sebastião","da","grama"],"sp")
, (["são","sebastião","da","vargem","alegre"],"mg")
, (["são","sebastião","de","lagoa","de","roça"],"pb")
, (["são","sebastião","do","alto"],"rj")
, (["são","sebastião","do","anta"],"mg")
, (["são","sebastião","do","caí"],"rs")
, (["são","sebastião","do","maranhão"],"mg")
, (["são","sebastião","do","oeste"],"mg")
, (["são","sebastião","do","paraíso"],"mg")
, (["são","sebastião","do","passé"],"ba")
, (["são","sebastião","do","rio","preto"],"mg")
, (["são","sebastião","do","rio","verde"],"mg")
, (["são","sebastião","do","tocantins"],"to")
, (["são","sebastião","do","uatumã"],"am")
, (["são","sebastião","do","umbuzeiro"],"pb")
, (["são","sepé"],"rs")
, (["são","simão"],"go")
, (["são","simão"],"sp")
, (["são","tiago"],"mg")
, (["são","tomás","de","aquino"],"mg")
, (["são","tomé","das","letras"],"mg")
, (["são","tomé"],"pr")
, (["são","tomé"],"rn")
, (["são","valentim","do","sul"],"rs")
, (["são","valentim"],"rs")
, (["são","valério","da","natividade"],"to")
, (["são","valério","do","sul"],"rs")
, (["são","vendelino"],"rs")
, (["são","vicente"],"rn")
, (["são","vicente"],"sp")
, (["são","vicente","de","minas"],"mg")
, (["são","vicente","do","seridó"],"pb")
, (["são","vicente","do","sul"],"rs")
, (["são","vicente","ferrer"],"ma")
, (["são","vicente","ferrer"],"pe")
, (["sapeaçu"],"ba")
, (["sapé"],"pb")
, (["sapezal"],"mt")
, (["sapiranga"],"rs")
, (["sapopema"],"pr")
, (["sapucaia","do","sul"],"rs")
, (["sapucaia"],"pa")
, (["sapucaia"],"rj")
, (["sapucaí","-","mirim"],"mg")
, (["saquarema"],"rj")
, (["sarandi"],"pr")
, (["sarandi"],"rs")
, (["sarapuí"],"sp")
, (["sardoá"],"mg")
, (["sarutaiá"],"sp")
, (["sarzedo"],"mg")
, (["sátiro","dias"],"ba")
, (["satuba"],"al")
, (["satubinha"],"ma")
, (["saubara"],"ba")
, (["saudade","do","iguaçu"],"pr")
, (["saudades"],"sc")
, (["saúde"],"ba")
, (["schroeder"],"sc")
, (["seabra"],"ba")
, (["seara"],"sc")
, (["sebastianópolis","do","sul"],"sp")
, (["sebastião","barros"],"pi")
, (["sebastião","laranjeiras"],"ba")
, (["sebastião","leal"],"pi")
, (["seberi"],"rs")
, (["sede","nova"],"rs")
, (["segredo"],"rs")
, (["selbach"],"rs")
, (["selvíria"],"ms")
, (["sem","-","peixe"],"mg")
, (["senador","alexandre","costa"],"ma")
, (["senador","amaral"],"mg")
, (["senador","canedo"],"go")
, (["senador","catunda"],"ce")
, (["senador","cortes"],"mg")
, (["senador","elói","de","souza"],"rn")
, (["senador","firmino"],"mg")
, (["senador","georgino","avelino"],"rn")
, (["senador","guiomard"],"ac")
, (["senador","josé","bento"],"mg")
, (["senador","josé","porfírio"],"pa")
, (["senador","la","rocque"],"ma")
, (["senador","modestino","gonçalves"],"mg")
, (["senador","pompeu"],"ce")
, (["senador","rui","palmeira"],"al")
, (["senador","sá"],"ce")
, (["senador","salgado","filho"],"rs")
, (["sena","madureira"],"ac")
, (["sengés"],"pr")
, (["senhor","do","bonfim"],"ba")
, (["senhora","de","oliveira"],"mg")
, (["senhora","do","porto"],"mg")
, (["senhora","dos","remédios"],"mg")
, (["sentinela","do","sul"],"rs")
, (["sento","sé"],"ba")
, (["serafina","corrêa"],"rs")
, (["sericita"],"mg")
, (["seringueiras"],"ro")
, (["sério"],"rs")
, (["seritinga"],"mg")
, (["seropédica"],"rj")
, (["serra","alta"],"sc")
, (["serra","azul"],"sp")
, (["serra","azul","de","minas"],"mg")
, (["serra","branca"],"pb")
, (["serra","caiada"],"rn")
, (["serra","da","raiz"],"pb")
, (["serra","da","saudade"],"mg")
, (["serra","de","são","bento"],"rn")
, (["serra","do","mel"],"rn")
, (["serra","do","navio"],"ap")
, (["serra","do","ramalho"],"ba")
, (["serra","do","salitre"],"mg")
, (["serra","dos","aimorés"],"mg")
, (["serra","dourada"],"ba")
, (["serra"],"es")
, (["serra","grande"],"pb")
, (["serra","negra","do","norte"],"rn")
, (["serra","negra"],"sp")
, (["serra","nova","dourada"],"mt")
, (["serra","preta"],"ba")
, (["serra","redonda"],"pb")
, (["serra","talhada"],"pe")
, (["serrana"],"sp")
, (["serrania"],"mg")
, (["serrano","do","maranhão"],"ma")
, (["serranópolis","de","minas"],"mg")
, (["serranópolis","do","iguaçu"],"pr")
, (["serranópolis"],"go")
, (["serranos"],"mg")
, (["serraria"],"pb")
, (["serrinha"],"ba")
, (["serrinha"],"rn")
, (["serrinha","dos","pintos"],"rn")
, (["serrita"],"pe")
, (["serrolândia"],"ba")
, (["serro"],"mg")
, (["sertaneja"],"pr")
, (["sertânia"],"pe")
, (["sertanópolis"],"pr")
, (["sertão"],"rs")
, (["sertão","santana"],"rs")
, (["sertãozinho"],"pb")
, (["sertãozinho"],"sp")
, (["sete","barras"],"sp")
, (["sete","de","setembro"],"rs")
, (["sete","lagoas"],"mg")
, (["sete","quedas"],"ms")
, (["setubinha"],"mg")
, (["severiano","de","almeida"],"rs")
, (["severiano","melo"],"rn")
, (["severínia"],"sp")
, (["siderópolis"],"sc")
, (["sidrolândia"],"ms")
, (["sigefredo","pacheco"],"pi")
, (["silva","jardim"],"rj")
, (["silvânia"],"go")
, (["silvanópolis"],"to")
, (["silveira","martins"],"rs")
, (["silveirânia"],"mg")
, (["silveiras"],"sp")
, (["silves"],"am")
, (["silvianópolis"],"mg")
, (["simão","dias"],"se")
, (["simão","pereira"],"mg")
, (["simões"],"pi")
, (["simões","filho"],"ba")
, (["simolândia"],"go")
, (["simonésia"],"mg")
, (["simplício","mendes"],"pi")
, (["sinimbu"],"rs")
, (["sinop"],"mt")
, (["siqueira","campos"],"pr")
, (["sirinhaém"],"pe")
, (["siriri"],"se")
, (["sítio","do","mato"],"ba")
, (["sítio","do","quinto"],"ba")
, (["sítio","d'abadia"],"go")
, (["sítio","novo","do","tocantins"],"to")
, (["sítio","novo"],"ma")
, (["sítio","novo"],"rn")
, (["sobradinho"],"ba")
, (["sobradinho"],"rs")
, (["sobrado"],"pb")
, (["sobral"],"ce")
, (["sobrália"],"mg")
, (["socorro"],"sp")
, (["socorro","do","piauí"],"pi")
, (["solânea"],"pb")
, (["soledade"],"pb")
, (["soledade"],"rs")
, (["soledade","de","minas"],"mg")
, (["solidão"],"pe")
, (["solonópole"],"ce")
, (["sombrio"],"sc")
, (["sonora"],"ms")
, (["sooretama"],"es")
, (["sorocaba"],"sp")
, (["sorriso"],"mt")
, (["sossêgo"],"pb")
, (["soure"],"pa")
, (["sousa"],"pb")
, (["souto","soares"],"ba")
, (["sucupira"],"to")
, (["sucupira","do","norte"],"ma")
, (["sucupira","do","riachão"],"ma")
, (["sud","mennucci"],"sp")
, (["sul","brasil"],"sc")
, (["sulina"],"pr")
, (["sumaré"],"sp")
, (["sumé"],"pb")
, (["sumidouro"],"rj")
, (["surubim"],"pe")
, (["sussuapara"],"pi")
, (["suzanápolis"],"sp")
, (["suzano"],"sp")
, (["tabaí"],"rs")
, (["tabaporã"],"mt")
, (["tabapuã"],"sp")
, (["tabatinga"],"am")
, (["tabatinga"],"sp")
, (["tabira"],"pe")
, (["taboão","da","serra"],"sp")
, (["tabocas","do","brejo","velho"],"ba")
, (["taboleiro","grande"],"rn")
, (["tabuleiro","do","norte"],"ce")
, (["tabuleiro"],"mg")
, (["tacaimbó"],"pe")
, (["tacaratu"],"pe")
, (["taciba"],"sp")
, (["tacima"],"pb")
, (["tacuru"],"ms")
, (["taguaí"],"sp")
, (["taguatinga"],"to")
, (["taiaçu"],"sp")
, (["tailândia"],"pa")
, (["taiobeiras"],"mg")
, (["taió"],"sc")
, (["taipas","do","tocantins"],"to")
, (["taipu"],"rn")
, (["taiúva"],"sp")
, (["talismã"],"to")
, (["tamandaré"],"pe")
, (["tamarana"],"pr")
, (["tambaú"],"sp")
, (["tamboara"],"pr")
, (["tamboril"],"ce")
, (["tamboril","do","piauí"],"pi")
, (["tanabi"],"sp")
, (["tangará","da","serra"],"mt")
, (["tangará"],"rn")
, (["tangará"],"sc")
, (["tanguá"],"rj")
, (["tanhaçu"],"ba")
, (["tanque","d'arca"],"al")
, (["tanque","do","piauí"],"pi")
, (["tanque","novo"],"ba")
, (["tanquinho"],"ba")
, (["taparuba"],"mg")
, (["tapauá"],"am")
, (["tapejara","(paraná)"],"pr")
, (["tapejara"],"rs")
, (["tapera"],"rs")
, (["taperoá"],"ba")
, (["taperoá"],"pb")
, (["tapes"],"rs")
, (["tapiraí"],"mg")
, (["tapiraí"],"sp")
, (["tapira"],"mg")
, (["tapira"],"pr")
, (["tapiramutá"],"ba")
, (["tapiratiba"],"sp")
, (["tapurah"],"mt")
, (["taquara"],"rs")
, (["taquaraçu","de","minas"],"mg")
, (["taquaral"],"sp")
, (["taquaral","de","goiás"],"go")
, (["taquarana"],"al")
, (["taquari"],"rs")
, (["taquaritinga"],"sp")
, (["taquaritinga","do","norte"],"pe")
, (["taquarituba"],"sp")
, (["taquarivaí"],"sp")
, (["taquaruçu","do","sul"],"rs")
, (["taquarussu","do","porto"],"to")
, (["taquarussu"],"ms")
, (["tarabai"],"sp")
, (["tarauacá"],"ac")
, (["tarrafas"],"ce")
, (["tartarugalzinho"],"ap")
, (["tarumã"],"sp")
, (["tarumirim"],"mg")
, (["tasso","fragoso"],"ma")
, (["tatuí"],"sp")
, (["tauá"],"ce")
, (["taubaté"],"sp")
, (["tavares"],"pb")
, (["tavares"],"rs")
, (["tefé"],"am")
, (["teixeira"],"pb")
, (["teixeira","de","freitas"],"ba")
, (["teixeira","soares"],"pr")
, (["teixeiras"],"mg")
, (["teixeirópolis"],"ro")
, (["tejuçuoca"],"ce")
, (["tejupá"],"sp")
, (["telêmaco","borba"],"pr")
, (["telha"],"se")
, (["tenente","ananias"],"rn")
, (["tenente","laurentino","cruz"],"rn")
, (["tenente","portela"],"rs")
, (["tenório"],"pb")
, (["teodoro","sampaio"],"ba")
, (["teodoro","sampaio"],"sp")
, (["teofilândia"],"ba")
, (["teófilo","otoni"],"mg")
, (["teolândia"],"ba")
, (["teotônio","vilela"],"al")
, (["terenos"],"ms")
, (["teresina"],"pi")
, (["teresópolis"],"rj")
, (["teresina","de","goiás"],"go")
, (["terezinha"],"pe")
, (["terezópolis","de","goiás"],"go")
, (["terra","alta"],"pa")
, (["terra","boa"],"pr")
, (["terra","de","areia"],"rs")
, (["terra","nova"],"ba")
, (["terra","nova"],"pe")
, (["terra","nova","do","norte"],"mt")
, (["terra","rica"],"pr")
, (["terra","roxa"],"pr")
, (["terra","roxa"],"sp")
, (["terra","santa"],"pa")
, (["tesouro"],"mt")
, (["teutônia"],"rs")
, (["theobroma"],"ro")
, (["tianguá"],"ce")
, (["tibagi"],"pr")
, (["tibau","do","sul"],"rn")
, (["tibau"],"rn")
, (["tietê"],"sp")
, (["tigrinhos"],"sc")
, (["tijucas"],"sc")
, (["tijucas","do","sul"],"pr")
, (["timbaúba"],"pe")
, (["timbaúba","dos","batistas"],"rn")
, (["timbé","do","sul"],"sc")
, (["timbiras"],"ma")
, (["timbó"],"sc")
, (["timbó","grande"],"sc")
, (["timburi"],"sp")
, (["timon"],"ma")
, (["timóteo"],"mg")
, (["tio","hugo"],"rs")
, (["tiradentes|tiradentes"],"mg")
, (["tiradentes","do","sul"],"rs")
, (["tiros"],"mg")
, (["tobias","barreto"],"se")
, (["tocantínia"],"to")
, (["tocantinópolis"],"to")
, (["tocantins"],"mg")
, (["tocos","do","moji"],"mg")
, (["toledo"],"mg")
, (["toledo"],"pr")
, (["tomar","do","geru"],"se")
, (["tomazina"],"pr")
, (["tombos"],"mg")
, (["tomé","-","açu"],"pa")
, (["tonantins"],"am")
, (["toritama"],"pe")
, (["torixoréu"],"mt")
, (["toropi"],"rs")
, (["torre","de","pedra"],"sp")
, (["torres"],"rs")
, (["torrinha"],"sp")
, (["touros"],"rn")
, (["trabiju"],"sp")
, (["tracuateua"],"pa")
, (["tracunhaém"],"pe")
, (["traipu"],"al")
, (["trairão"],"pa")
, (["trairi"],"ce")
, (["trajano","de","moraes"],"rj")
, (["tramandaí"],"rs")
, (["travesseiro"],"rs")
, (["tremedal"],"ba")
, (["tremembé"],"sp")
, (["três","arroios"],"rs")
, (["três","barras","do","paraná"],"pr")
, (["três","barras"],"sc")
, (["três","cachoeiras"],"rs")
, (["três","corações"],"mg")
, (["três","coroas"],"rs")
, (["três","de","maio"],"rs")
, (["três","forquilhas"],"rs")
, (["três","fronteiras"],"sp")
, (["três","lagoas"],"ms")
, (["três","marias"],"mg")
, (["três","palmeiras"],"rs")
, (["três","passos"],"rs")
, (["três","pontas"],"mg")
, (["três","ranchos"],"go")
, (["três","rios"],"rj")
, (["treviso"],"sc")
, (["treze","de","maio"],"sc")
, (["treze","tílias"],"sc")
, (["trizidela","do","vale"],"ma")
, (["trindade"],"go")
, (["trindade"],"pe")
, (["trindade","do","sul"],"rs")
, (["triunfo"],"pb")
, (["triunfo"],"pe")
, (["triunfo"],"rs")
, (["triunfo","potiguar"],"rn")
, (["trombas"],"go")
, (["trombudo","central"],"sc")
, (["tubarão"],"sc")
, (["tucano"],"ba")
, (["tucumã"],"pa")
, (["tucunduva"],"rs")
, (["tucuruí"],"pa")
, (["tufilândia"],"ma")
, (["tuiuti"],"sp")
, (["tumiritinga"],"mg")
, (["tunápolis"],"sc")
, (["tunas","do","paraná"],"pr")
, (["tunas"],"rs")
, (["tuneiras","do","oeste"],"pr")
, (["tuntum"],"ma")
, (["tupaciguara"],"mg")
, (["tupanatinga"],"pe")
, (["tupanci","do","sul"],"rs")
, (["tupanciretã"],"rs")
, (["tupandi"],"rs")
, (["tuparendi"],"rs")
, (["tuparetama"],"pe")
, (["tupã"],"sp")
, (["tupãssi"],"pr")
, (["tupi","paulista"],"sp")
, (["tupirama"],"to")
, (["tupiratins"],"to")
, (["turiaçu"],"ma")
, (["turilândia"],"ma")
, (["turiúba"],"sp")
, (["turmalina"],"mg")
, (["turmalina"],"sp")
, (["turuçu"],"rs")
, (["tururu"],"ce")
, (["turvânia"],"go")
, (["turvelândia"],"go")
, (["turvo"],"pr")
, (["turvo"],"sc")
, (["turvolândia"],"mg")
, (["tutóia"],"ma")
, (["uarini"],"am")
, (["uauá"],"ba")
, (["ubaí"],"mg")
, (["ubaíra"],"ba")
, (["ubaitaba"],"ba")
, (["ubajara"],"ce")
, (["ubá"],"mg")
, (["ubaporanga"],"mg")
, (["ubarana"],"sp")
, (["ubatã"],"ba")
, (["ubatuba"],"sp")
, (["uberaba"],"mg")
, (["uberlândia"],"mg")
, (["ubirajara"],"sp")
, (["ubiratã"],"pr")
, (["ubiretama"],"rs")
, (["uchoa"],"sp")
, (["uibaí"],"ba")
, (["uiramutã"],"rr")
, (["uirapuru"],"go")
, (["uiraúna"],"pb")
, (["ulianópolis"],"pa")
, (["umari"],"ce")
, (["umarizal"],"rn")
, (["umbaúba"],"se")
, (["umburanas"],"ba")
, (["umburatiba"],"mg")
, (["umbuzeiro"],"pb")
, (["umirim"],"ce")
, (["umuarama"],"pr")
, (["una"],"ba")
, (["unaí"],"mg")
, (["união"],"pi")
, (["união","da","serra"],"rs")
, (["união","da","vitória"],"pr")
, (["união","de","minas"],"mg")
, (["união","do","oeste"],"sc")
, (["união","do","sul"],"mt")
, (["união","dos","palmares"],"al")
, (["união","paulista"],"sp")
, (["uniflor"],"pr")
, (["unistalda"],"rs")
, (["upanema"],"rn")
, (["uraí"],"pr")
, (["urandi"],"ba")
, (["urânia"],"sp")
, (["urbano","santos"],"ma")
, (["uruaçu"],"go")
, (["uruana","de","minas"],"mg")
, (["uruana"],"go")
, (["uruará"],"pa")
, (["urubici"],"sc")
, (["uruburetama"],"ce")
, (["urucânia"],"mg")
, (["urucará"],"am")
, (["uruçuca"],"ba")
, (["urucuia"],"mg")
, (["uruçuí"],"pi")
, (["urucurituba"],"am")
, (["uruguaiana"],"rs")
, (["uruoca"],"ce")
, (["urupá"],"ro")
, (["urupema"],"sc")
, (["urupês"],"sp")
, (["uru"],"sp")
, (["urussanga"],"sc")
, (["urutaí"],"go")
, (["utinga"],"ba")
, (["vacaria"],"rs")
, (["vale","de","são","domingos"],"mt")
, (["vale","do","anari"],"ro")
, (["vale","do","paraíso"],"ro")
, (["vale","do","sol"],"rs")
, (["valença"],"ba")
, (["valença"],"rj")
, (["valença","do","piauí"],"pi")
, (["valente"],"ba")
, (["valentim","gentil"],"sp")
, (["vale","real"],"rs")
, (["vale","verde"],"rs")
, (["valinhos"],"sp")
, (["valparaíso"],"sp")
, (["valparaíso","de","goiás"],"go")
, (["vanini"],"rs")
, (["vargeão"],"sc")
, (["vargem"],"sc")
, (["vargem"],"sp")
, (["vargem","alegre"],"mg")
, (["vargem","alta"],"es")
, (["vargem","bonita"],"mg")
, (["vargem","bonita"],"sc")
, (["vargem","grande","do","rio","pardo"],"mg")
, (["vargem","grande","do","sul"],"sp")
, (["vargem","grande"],"ma")
, (["vargem","grande","paulista"],"sp")
, (["varginha"],"mg")
, (["varjão"],"go")
, (["varjão","de","minas"],"mg")
, (["varjota"],"ce")
, (["varre","-","sai"],"rj")
, (["várzea"],"pb")
, (["várzea"],"rn")
, (["várzea","alegre"],"ce")
, (["várzea","branca"],"pi")
, (["várzea","da","palma"],"mg")
, (["várzea","da","roça"],"ba")
, (["várzea","do","poço"],"ba")
, (["várzea","grande"],"mt")
, (["várzea","grande"],"pi")
, (["várzea","nova"],"ba")
, (["várzea","paulista"],"sp")
, (["varzedo"],"ba")
, (["varzelândia"],"mg")
, (["vassouras"],"rj")
, (["vazante"],"mg")
, (["venâncio","aires"],"rs")
, (["venda","nova","do","imigrante"],"es")
, (["venha","-","ver"],"rn")
, (["ventania"],"pr")
, (["venturosa"],"pe")
, (["vera"],"mt")
, (["vera","cruz"],"ba")
, (["vera","cruz"],"rn")
, (["vera","cruz"],"rs")
, (["vera","cruz"],"sp")
, (["vera","cruz","do","oeste"],"pr")
, (["vera","mendes"],"pi")
, (["veranópolis"],"rs")
, (["verdejante"],"pe")
, (["verdelândia"],"mg")
, (["vereda"],"ba")
, (["veredinha"],"mg")
, (["verê"],"pr")
, (["veríssimo"],"mg")
, (["vermelho","novo"],"mg")
, (["vertente","do","lério"],"pe")
, (["vertentes"],"pe")
, (["vespasiano"],"mg")
, (["vespasiano","correa"],"rs")
, (["viadutos"],"rs")
, (["viamão"],"rs")
, (["viana"],"es")
, (["viana"],"ma")
, (["vianópolis"],"go")
, (["vicência"],"pe")
, (["vicente","dutra"],"rs")
, (["vicentina"],"ms")
, (["vicentinópolis"],"go")
, (["viçosa"],"al")
, (["viçosa"],"mg")
, (["viçosa"],"rn")
, (["viçosa","do","ceará"],"ce")
, (["victor","graeff"],"rs")
, (["vidal","ramos"],"sc")
, (["videira"],"sc")
, (["vieiras"],"mg")
, (["vieirópolis"],"pb")
, (["vigia"],"pa")
, (["vila","bela","da","santíssima","trindade"],"mt")
, (["vila","boa"],"go")
, (["vila","flores"],"rs")
, (["vila","flor"],"rn")
, (["vila","lângaro"],"rs")
, (["vila","maria"],"rs")
, (["vila","nova","do","piauí"],"pi")
, (["vila","nova","dos","martírios"],"ma")
, (["vila","nova","do","sul"],"rs")
, (["vila","pavão"],"es")
, (["vila","propício"],"go")
, (["vila","rica"],"mt")
, (["vila","valério"],"es")
, (["vila","velha"],"es")
, (["vilhena"],"ro")
, (["vinhedo"],"sp")
, (["viradouro"],"sp")
, (["virgem","da","lapa"],"mg")
, (["virgínia"],"mg")
, (["virginópolis"],"mg")
, (["virgolândia"],"mg")
, (["virmond"],"pr")
, (["visconde","do","rio","branco"],"mg")
, (["vista","alegre","do","alto"],"sp")
, (["vista","alegre","do","prata"],"rs")
, (["vista","alegre"],"rs")
, (["vista","gaúcha"],"rs")
, (["vista","serrana"],"pb")
, (["vitória"],"es")
, (["vitória","brasil"],"sp")
, (["vitória","da","conquista"],"ba")
, (["vitória","das","missões"],"rs")
, (["vitória","de","santo","antão"],"pe")
, (["vitória","do","jari"],"ap")
, (["vitória","do","mearim"],"ma")
, (["vitória","do","xingu"],"pa")
, (["vitorino"],"pr")
, (["vitorino","freire"],"ma")
, (["vitor","meireles"],"sc")
, (["viseu"],"pa")
, (["volta","grande"],"mg")
, (["volta","redonda"],"rj")
, (["votorantim"],"sp")
, (["votuporanga"],"sp")
, (["wagner"],"ba")
, (["wall","ferraz"],"pi")
, (["wanderlândia"],"to")
, (["wanderley"],"ba")
, (["wenceslau","braz"],"mg")
, (["wenceslau","braz"],"pr")
, (["wenceslau","guimarães"],"ba")
, (["westfalia"],"rs")
, (["witmarsum"],"sc")
, (["xambioá"],"to")
, (["xambrê"],"pr")
, (["xangri","-","lá"],"rs")
, (["xanxerê"],"sc")
, (["xapuri"],"ac")
, (["xavantina"],"sc")
, (["xaxim"],"sc")
, (["xexéu"],"pe")
, (["xinguara"],"pa")
, (["xique","-","xique"],"ba")
, (["zabelê"],"pb")
, (["zacarias"],"sp")
, (["zé","doca"],"ma")
, (["zortéa"],"sc")
]
type NomeMunicipio = NomeComposto
type NomeMunicipioNormal = NomeComposto
type NomeEstadoNormal = NomeComposto
lista2 :: [(NomeMunicipio,(NomeMunicipioNormal,NomeEstadoNormal))]
lista2 = [let nome' = map (map toLower) nome in (nome',(limpaNome nome',lookupEstado estado)) | (nome,estado) <- lista1]
lookupEstado sg = abrevEstados ! sg
abrevEstados = M.fromList $ [
("ac",["acre"])
, ("al",["alagoas"])
, ("am",["amazonas"])
, ("ap",["amapa"])
, ("ba",["bahia"])
, ("ce",["ceara"])
, ("df",["distrito","federal"])
, ("es",["espirito","santo"])
, ("go",["goias"])
, ("ma",["maranhao"])
, ("mg",["minas","gerais"])
, ("ms",["mato","grosso","sul"])
, ("mt",["mato","grosso"])
, ("pa",["para"])
, ("pb",["paraiba"])
, ("pe",["pernambuco"])
, ("pi",["piaui"])
, ("pr",["parana"])
, ("rj",["rio","janeiro"])
, ("rn",["rio","grande","norte"])
, ("ro",["rondonia"])
, ("rr",["roraima"])
, ("rs",["rio","grande","sul"])
, ("sc",["santa","catarina"])
, ("se",["sergipe"])
, ("sp",["sao","paulo"])
, ("to",["tocantins"])
]
limpaNome = filter (\s -> not $ S.member s ignoredWords) . map (map limpaChar)
ignoredWords = S.fromList [
"de", "do", "da", "dos", "das", "no", "na", "-", "e"
]
limpaChar 'á' = 'a'
limpaChar 'à' = 'a'
limpaChar 'ã' = 'a'
limpaChar 'â' = 'a'
limpaChar 'é' = 'e'
limpaChar 'è' = 'e'
limpaChar 'ê' = 'e'
limpaChar 'í' = 'i'
limpaChar 'î' = 'i'
limpaChar 'ó' = 'o'
limpaChar 'ò' = 'o'
limpaChar 'ô' = 'o'
limpaChar 'õ' = 'o'
limpaChar 'ú' = 'u'
limpaChar 'ù' = 'u'
limpaChar 'û' = 'u'
limpaChar 'ü' = 'u'
limpaChar c = c
newtype Trie a b = Trie ([b],M.Map a (Trie a b)) deriving (Eq,Ord,Show)
unTrie (Trie r) = r
emptyTrie :: Trie a b
emptyTrie = Trie ([],M.empty)
makeTrie :: Ord a => [([a],b)] -> Trie a b
makeTrie = Trie . foldl g (unTrie emptyTrie)
where
g r (x,y) = unTrie $ f (Trie r) x y
f (Trie (l,m)) [] x = Trie (x : l,m)
f (Trie (l,m)) (k:r) x = Trie (l,M.insert k (f (M.findWithDefault emptyTrie k m) r x) m)
trieMunicipios :: Trie ComponenteNome (NomeMunicipioNormal,NomeEstadoNormal)
trieMunicipios = makeTrie lista2
parseTrie :: Trie String b -> LinkerParserMonad (Pos,Pos,[b])
parseTrie (Trie (here,m)) = do
(inicio,td) <- anyTokenData
let w = case td of
Palavra s -> map toLower s
Hifen -> "-"
_ -> ""
case M.lookup w m of
Nothing -> result inicio inicio here
Just trie -> do
(_,fim,res) <- parseTrie trie
result inicio fim res
where
result _ _ [] = fail "Nothing found"
result i f l = return (i,f,l)
parseMunicipio :: LinkerParserMonad (Pos,Pos,[([String],[String])])
parseMunicipio = parseTrie trieMunicipios
| lexml/lexml-linker | src/main/haskell/LexML/Linker/Municipios.hs | gpl-2.0 | 180,436 | 0 | 14 | 34,060 | 82,411 | 54,817 | 27,594 | 5,659 | 5 |
module GRSynth.Utils where
import Data.Either (either)
import Data.Maybe (fromJust)
import Control.Applicative
import qualified Data.Set as S
import MuCalc.MuFormula hiding (iff, implies)
import GRSynth.States
import MuCalc.Realization
import GRSynth.Semantics
import GRSynth.GameStructure
import Test.QuickCheck
import Test.HUnit hiding (Test, State)
import Test.Framework.Providers.QuickCheck2
import Test.Framework.Providers.HUnit
import Test.Framework
isLeft = either (const True) (const False)
isRight = not . isLeft
implies p q = (not p) || q
iff p q = (p `implies` q) && (q `implies` p)
zipProperties :: [(String, Property)] -> [Test]
zipProperties = uncurry (zipWith testProperty) . unzip
zipTestCases :: [(String, Assertion)] -> [Test]
zipTestCases = uncurry (zipWith testCase) . unzip
interpret :: State s => [PState] -> [s]
interpret = map (fst . fromJust . decode)
(@?~) :: (Ord a, Show a) => [a] -> [a] -> Assertion
(@?~) xs ys = S.fromList xs @?= S.fromList ys
newtype Int2 = Int2 Int
deriving (Show, Eq, Ord)
instance State Int2 where
encode (Int2 x) = if x < 0 || x >= 4
then error "Invalid 2 bit integer"
else [x >= 2, x `mod` 2 == 1]
decode pState = if length pState < 2
then Nothing
else let (two, rest) = splitAt 2 pState
x = (if head two then 2 else 0) + (if head $ tail two then 1 else 0)
in Just (Int2 x, rest)
domX = map Int2 [0..3]
domY = map Int2 [0..3]
cartProd = (,) <$> domX <*> domY
baseGS = newGameStructure domX domY
| johnbcoughlin/mucalc | test/GRSynth/Utils.hs | gpl-2.0 | 1,605 | 0 | 15 | 396 | 618 | 346 | 272 | 42 | 1 |
{- |
Module : $Header$
Copyright : (c) Klaus Hartke, Uni Bremen 2008
License : GPLv2 or higher, see LICENSE.txt
Maintainer : Christian.Maeder@dfki.de
Stability : experimental
Portability : portable
-}
module ModalCaslToNuSmvLtl where
import Control.Monad as Monad
import Data.Maybe as Maybe
import Text.ParserCombinators.Parsec hiding (State)
import ModalCasl as Casl
import NuSmvLtl as Ltl
{------------------------------------------------------------------------------}
{- -}
{- Convert Modal CASL formulas to LTL formulas recognized by NuSMV -}
{- -}
{------------------------------------------------------------------------------}
convert :: Casl.StateFormula a -> Maybe (Ltl.Formula a)
convert (Casl.Var x) = Just (Ltl.Var x)
convert (Casl.A phi) = convert' phi
convert _ = Nothing
convert' :: Casl.PathFormula a -> Maybe (Ltl.Formula a)
convert' (Casl.Por (Casl.Pnot phi) psi) = liftM2 Ltl.Impl (convert' phi) (convert' psi)
convert' (Casl.State (Casl.Var x)) = Just (Ltl.Var x)
convert' (Casl.Pnot phi) = liftM Ltl.Not (convert' phi)
convert' (Casl.Pand phi psi) = liftM2 Ltl.And (convert' phi) (convert' psi)
convert' (Casl.Por phi psi) = liftM2 Ltl.Or (convert' phi) (convert' psi)
convert' (Casl.X phi) = liftM Ltl.X (convert' phi)
convert' (Casl.G phi) = liftM Ltl.G (convert' phi)
convert' (Casl.F phi) = liftM Ltl.F (convert' phi)
convert' (Casl.W phi psi) = convert' ((phi `Casl.Pand` psi) `Casl.B` ((Casl.Pnot phi) `Casl.Pand` psi))
convert' (Casl.U phi psi) = convert' (psi `Casl.B` ((Casl.Pnot phi) `Casl.Pand` (Casl.Pnot psi)))
convert' (Casl.B phi psi) = liftM2 Ltl.V (convert' phi) (convert' psi)
convert' (Casl.W' phi psi) = convert' ((Casl.Pnot psi) `Casl.U'` (phi `Casl.Pand` psi))
convert' (Casl.U' phi psi) = liftM2 Ltl.U (convert' phi) (convert' psi)
convert' (Casl.B' phi psi) = convert' ((Casl.Pnot psi) `Casl.U'` (phi `Casl.Pand` (Casl.Pnot psi)))
convert' (Casl.XPast phi) = liftM Ltl.Y (convert' phi)
convert' (Casl.GPast phi) = liftM Ltl.H (convert' phi)
convert' (Casl.FPast phi) = liftM Ltl.O (convert' phi)
convert' (Casl.WPast phi psi) = convert' ((phi `Casl.Pand` psi) `Casl.BPast` ((Casl.Pnot phi) `Casl.Pand` psi))
convert' (Casl.UPast phi psi) = convert' (psi `Casl.BPast` ((Casl.Pnot phi) `Casl.Pand` (Casl.Pnot psi)))
convert' (Casl.BPast phi psi) = liftM2 Ltl.T (convert' phi) (convert' psi)
convert' (Casl.WPast' phi psi) = convert' ((Casl.Pnot psi) `Casl.UPast'` (phi `Casl.Pand` psi))
convert' (Casl.UPast' phi psi) = liftM2 Ltl.S (convert' phi) (convert' psi)
convert' (Casl.BPast' phi psi) = convert' ((Casl.Pnot psi) `Casl.UPast'` (phi `Casl.Pand` (Casl.Pnot psi)))
convert' (Casl.XPast' phi) = liftM Ltl.Z (convert' phi)
convert' _ = Nothing
{------------------------------------------------------------------------------}
data Expr = Expr String
instance Show Expr where show (Expr x) = x
expr = liftM Expr (many1 (lower <|> char '=' <|> char '_' <|> char '.' <|> digit))
parseAndConvert :: String -> Formula Expr
parseAndConvert text = let Right x = parse (Casl.parser expr) "<<input>>" text in
let Just y = convert x in
y
{------------------------------------------------------------------------------}
| nevrenato/Hets_Fork | Temporal/ModalCaslToNuSmvLtl.hs | gpl-2.0 | 3,758 | 0 | 13 | 1,024 | 1,313 | 678 | 635 | 43 | 1 |
module MusicGraph where
import Util (foundIn, uniq)
import Data.List
-- import Music (Name(..), Accidental(..),
-- Quality(..), Number(..), Ratio(..),
-- sharp, flat, natural, pitch, int, note, relnote,
-- crotchet, minim, semibreve, quaver,
-- AbstractPitch2(..), AbstractInt2(..), AbstractDur2(..),
-- AbstractNote(..), Note)
--
--
-- x = note (pitch A sharp) minim
-- y = note (pitch G flat) quaver
--
-- z = relnote (int Dim Fourth) crotchet
data Graph a = a :~> [Graph a]
instance (Show a, Eq a) => Show (Graph a) where
show g = traverseShow [] g
showNode a = "{" ++ (show a) ++ "}"
joinStrings _ [] = ""
joinStrings _ (s:[]) = s
joinStrings k (s:ss) = s ++ k ++ (joinStrings k ss)
traverseShow :: (Eq a, Show a) => [a] -> Graph (a) -> [Char]
traverseShow seen (a :~> succ) = if a `foundIn` seen
then showNode a
else (showNode a) ++ ":~>[" ++ (joinStrings ", " $ map (traverseShow (a:seen)) succ) ++ "]"
instance Functor Graph where
fmap f (a :~> succ) = (f a) :~> (map (fmap f) succ)
traverse :: (Eq a) => (a -> a -> a) -> a -> [a] -> (Graph a) -> a
traverse f e seen (n :~> succ) = if n `foundIn` seen
then e
else foldl f n $ map (traverse f e (n:seen)) succ
mapNodes f (n :~> s) = let (n' :~> s') = f (n :~> s)
in n' :~> (map (mapNodes f) s')
names :: Eq a => Graph a -> [a]
names g = uniq $ names' [] g
where names' seen (n :~> succ) = if n `foundIn` seen
then []
else n:(concatMap (names' (n:seen)) succ)
nodes :: Eq a => (Graph a) -> [Graph a]
nodes g = nodes' [] g
where nodes' seen (n :~> s) = if n `foundIn` seen
then []
else (n :~> s):(concatMap (nodes' (n:seen)) s)
name (n :~> _) = n
succ (_ :~> s) = s
-- successors :: (Eq a) => [a] -> a -> Graph a -> [a]
-- successors seen x (n :~> s)
-- | n `foundIn` seen = []
-- | x `foundIn` (map node s) = n:(concatMap (successors (n:seen) x) s)
-- | otherwise = concatMap (successors (n:seen) x) s
successors x g = case findNode x g of
Just (n :~> s) -> map name s
Nothing -> []
modifyNode f x = mapNodes (\(n :~> s) -> if n == x then f (n :~> s) else n :~> s)
first _ [] = Nothing
first f (x:xs)
| f x = Just x
| otherwise = first f xs
findNode x g = first (\(n :~> _) -> n == x) (nodes g)
addEdge x y g = case findNode y g of
Just y' -> modifyNode (\(n :~> s) -> n :~> (y':s)) x g
Nothing -> g
-- delEdge x y = modifyNode (\(n :~> s) -> n :~> ()) x
a = 4 :~> [b,d]
b = 5 :~> [c]
c = 6 :~> [f]
d = 1 :~> [e]
e = 2 :~> [f]
f = 3 :~> []
mergeNode g (n' :~> []) = g
mergeNode g (n' :~> (x:xs)) = mergeNode (addEdge n' (name x) g) (n' :~> xs)
mergeGraphs a b = mergeGraphs' a (nodes b) where
mergeGraphs' g [] = g
mergeGraphs' g (h:hs) = mergeGraphs' (mergeNode g h) hs
reverseNode g (n :~> s) = let n' = n :~> []
rev = map (\x -> (name x) :~> [n']) s
in foldr mergeNode g rev
--uniqUnder =
-- transpose (n :~> s) =
| ejlilley/AbstractMusic | MusicGraph.hs | gpl-3.0 | 3,260 | 0 | 14 | 1,091 | 1,326 | 701 | 625 | 60 | 2 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Lib.Types
( typecheck
, TypeEnv(..)
, extendEnv
, envLookup
, defaultTypeEnv
-- , defaultClassEnv
, emptyTypeEnv
, buildTypeEnv
, normalize
, tyFun
, quantify
, removeEmptyPreds
, Class(..)
, ClassEnv(..)
, EnvTransformer(..)
, (<:>)
, showType
)
where
-- |
-- Inspiration for this code came from
-- https://brianmckenna.org/blog/type_annotation_cofree
--
-- I also liberally plagiarized this page:
-- http://dev.stephendiehl.com/fun/006_hindley_milner.html#generalization-and-instantiation
--
-- Finally, the paper "Typing Haskell in Haskell" was plagiarized once I
-- realized it's where the others were plagiarizing from.
--
-- It has been modified for a slightly different type and more flexible type
-- grammar.
import Control.Comonad (extend, extract, (=>>))
import Control.Comonad.Cofree (Cofree (..))
import Control.Monad (foldM, forM, forM_, guard, liftM2,
mapAndUnzipM, unless, zipWithM)
import Control.Monad.Except
import Control.Monad.Free
import Control.Monad.State
import Data.Maybe (fromJust, fromMaybe, isJust, isNothing)
import Data.Monoid ((<>))
import Data.Map (Map)
import qualified Data.Map.Lazy as M
import Data.Set (Set)
import qualified Data.Set as S
import Data.Graph (Graph, Vertex)
import qualified Data.Graph as G
import qualified Data.Text as T
import Data.Tree (flatten)
import Data.List (elem, intercalate, sort, sortBy)
import Data.Ord (Ordering (..))
import Text.Show.Unicode
import Lib.Syntax (AnnotatedExpr, CoreAst (..), CoreExpr,
SurfaceAst (..), SurfaceExpr, Symbol,
TopLevel (..), annotated)
import Lib.Types.Class
import Lib.Types.Constraint
import Lib.Types.Frame
import Lib.Types.Kind
import Lib.Types.Scheme
import Lib.Types.Type
import Lib.Types.TypeCheck
import Lib.Types.TypeEnv
import Lib.Compiler
import Lib.Errors
import Lib.Parser (parse_expr, parse_multi)
-- * Default type environment
t_0 :: Type
t_0 = TVar (TyVar 0 Star)
t_1 :: Type
t_1 = TVar (TyVar 1 Star)
tyEntail :: Type
tyEntail = TSym (TyLit "=>" Star)
clsEq :: Type
clsEq = TSym (TyLit "Eq" Star)
clsOrd :: Type
clsOrd = TSym (TyLit "Ord" Star)
clsNum :: Type
clsNum = TSym (TyLit "Num" Star)
eq_binop :: Type
eq_binop = TList [tyEntail, TList [clsEq, t_0],
TList [ tyFun, t_0, t_0, typeBool]]
num_binop :: Type
num_binop = [TPred "Num" [t_0]] :=> (TList [tyFun, t_0, t_0, t_0])
ord_binop :: Type
ord_binop = TList [tyEntail, TList [ clsOrd, t_0],
TList [ tyFun, t_0, t_0, typeBool]]
--ord_binop = [IsIn "Ord" [t_0]] :=> (TList [tyFun, t_0, t_0, typeBool])
-- where t_0 = TVar (TyVar 0 Star)
not_fn :: Type
not_fn = TList [tyFun, typeBool, typeBool]
-- Builtin binary operators for specialized, non-typeclass math functions.
int_binop :: Type
int_binop = TList [tyFun, typeInt, typeInt, typeInt]
float_binop :: Type
float_binop = TList [tyFun, typeFloat, typeFloat, typeFloat]
byte_binop :: Type
byte_binop = TList [ tyFun, typeByte, typeByte, typeByte ]
eq_fn :: Type -> Type
eq_fn t = TList [tyFun, t, t, typeBool]
fromint_fn :: Type -> Type
fromint_fn t = TList [tyFun, typeInt, t]
eval_fn :: Type
eval_fn = TList [ tyFun
, TList [ TSym (TyLit "FFI" Star)
, t_0
]
, TList [ tyFun
, t_0
, t_1
]
, t_1
]
-- | Builtin operators and functions with explicit type schemes
defaultTypeEnv :: TypeEnv
defaultTypeEnv = TypeEnv $ M.fromList
[ ("=?", generalize mempty $ [TPred "Eq" [t_0]] :=> (eq_fn t_0))
, ("not", generalize mempty not_fn)
, ("boolean-fromint", generalize mempty (fromint_fn typeBool))
, ("boolean=?", generalize mempty (eq_fn typeBool))
, ("int-add", generalize mempty int_binop)
, ("int-sub", generalize mempty int_binop)
, ("int-mul", generalize mempty int_binop)
, ("int-div", generalize mempty int_binop)
, ("int-modulo", generalize mempty int_binop)
, ("int-fromint", generalize mempty (fromint_fn typeInt))
, ("int=?", generalize mempty (eq_fn typeInt))
, ("float-add", generalize mempty float_binop)
, ("float-sub", generalize mempty float_binop)
, ("float-mul", generalize mempty float_binop)
, ("float-div", generalize mempty float_binop)
, ("float-modulo", generalize mempty float_binop)
, ("float=?", generalize mempty (eq_fn typeFloat))
, ("float-fromint", generalize mempty (fromint_fn typeFloat))
, ("byte-and", generalize mempty byte_binop)
, ("byte-or", generalize mempty byte_binop)
, ("byte-not", generalize mempty byte_binop)
, ("byte-xor", generalize mempty byte_binop)
, ("byte-2c", generalize mempty byte_binop)
, ("byte=?", generalize mempty (eq_fn typeByte))
, ("eval", generalize mempty eval_fn)
, ("+", generalize mempty num_binop)
, ("*", generalize mempty num_binop)
, ("-", generalize mempty num_binop)
, ("/", generalize mempty num_binop)
, ("modulo", generalize mempty num_binop)
--("=?", generalize mempty eq_binop)
--, ("<", generalize mempty ord_binop)
--, (">", generalize mempty ord_binop)
]
-- | We build a dependency graph of different definitions and topologically sort
-- them. Then typechecking, as crude as it may be, is simply folding the initial
-- type environment with the 'typecheck_pass' function over the sorted list.
typecheck :: TopLevel-> Compiler TopLevel
typecheck (TopLevel defs sigs tydefs cls mthds) = do
let te = defaultTypeEnv <> (TypeEnv sigs)
let dependency_graph = make_dep_graph defs
let defs' = reverse $ topo' dependency_graph
-- rewrite any annotated types using the aliases we've collected
let aliases = fmap (\ (sym, (vars, ty, _)) -> (sym, vars, ty)) $
M.toList $
M.filter (\(_,_,isAlias) -> isAlias) tydefs
logMsg . ushow $ aliases
-- let te' = transform_aliases te aliases
(TypeEnv sigs', defs'') <- runTypeCheck te $ do
foldM typecheck_pass (te, defs) defs'
-- (TypeEnv sigs', mthds') <- typecheck_methods te' mthds
return $ TopLevel defs'' sigs' tydefs cls mthds
transform_aliases :: TypeEnv -> [(Symbol, [TyVar], Sigma)] -> TypeEnv
transform_aliases (TypeEnv sigs) aliases = TypeEnv $ foldl go sigs aliases where
go sigs (sym, vars, ty) = fmap (alias_rewrite sym vars ty) sigs
typecheck_pass
:: (TypeEnv, Map Symbol (AnnotatedExpr (Maybe Type)))
-> (Symbol, AnnotatedExpr (Maybe Type))
-> TypeCheck (TypeEnv, Map Symbol (AnnotatedExpr (Maybe Type)))
typecheck_pass (te, defns) (sym, expr) = do
expr' <- sequence . extend infer $ expr
(frame, preds) <- solver `catchError` (handleTypecheckError sym te)
let sig = fmap (quantify . substitute frame) expr' :: AnnotatedExpr Type
let vars = ftv $ extract sig
let ps = predicatesForSignature preds vars frame
let scheme = extract $ (extend $ toSigma frame ps) sig
te' <- checkTypeEnv sym scheme te
return (te', M.insert sym (fmap Just sig) defns)
-- | Stupid and slow but necessary method to support MPTCs.
predicatesForSignature :: Map (Set TyVar) [Pred] -> Set TyVar -> Frame -> [Pred]
predicatesForSignature predMap tvs su = M.foldMapWithKey go predMap
where go predTvs ps = if predTvs `S.isSubsetOf` tvs
then (substitute su ps)
else []
handleTypecheckError sym te err = case envLookup te sym of
Nothing -> throwError $ OtherError $ "For " ++ sym ++ ", " ++ (show err)
Just ty -> throwError $ OtherError $
"For " ++ sym ++ " : " ++ (show ty ) ++ " " ++ (show $ ftv ty) ++ ", " ++ (show err)
checkTypeEnv :: Symbol -> Sigma -> TypeEnv -> TypeCheck TypeEnv
checkTypeEnv sym t1 tyEnv = case envLookup tyEnv sym of
Nothing -> return $ extendEnv tyEnv (sym, t1)
Just t2 -> do
let t1' = removeEmptyPreds t1
let t2' = removeEmptyPreds t2
(_, s1) <- skolemize t1'
(_, s2) <- skolemize t2'
(frame, _) <- match s1 s2 `catchError` (addContext sym t2)
return tyEnv
where addContext sym ty err =
throwError $ OtherError $
"For " ++ sym ++ " : " ++ (show $ ftv ty) ++ ",\n " ++ (show err)
toSigma :: Frame -> [Pred] -> AnnotatedExpr Type -> Sigma
toSigma frame ps expr =
let ty = extract expr
in closeOver frame (ps :=> ty)
-- | Compute dependencies for a given expression and return as a list of Symbols
--deps :: [(Symbol, AnnotatedExpr ())] -> AnnotatedExpr () -> [Symbol]
deps :: Map Symbol (AnnotatedExpr (Maybe Type)) -> AnnotatedExpr (Maybe Type) -> [Symbol]
deps xs expr = go expr where
go (_ :< (IdC sym)) = case M.lookup sym xs of
Nothing -> []
Just _ -> [sym]
go (_ :< (AppC op erands)) = (go op) ++ (concatMap go erands)
go (_ :< (FunC _ body)) = go body
go _ = []
-- oh what do you know I\'m stealing more from Stephen Diehl:
-- http://dev.stephendiehl.com/hask/#graphs
data Grph node key = Grph
{ _graph :: Graph
, _vertices :: Vertex -> (node, key, [key])
}
type DepGraph = Grph (Symbol, AnnotatedExpr (Maybe Type)) Symbol
fromList :: Ord key => [(node, key, [key])] -> Grph node key
fromList = uncurry Grph . G.graphFromEdges'
vertexLabels :: Functor f => Grph b t -> (f Vertex) -> f b
vertexLabels g = fmap (vertexLabel g)
vertexLabel :: Grph b t -> Vertex -> b
vertexLabel g = (\(vi, _, _) -> vi) . (_vertices g)
-- Topologically sort a graph
topo' :: Grph node key -> [node]
topo' g = vertexLabels g $ G.topSort (_graph g)
-- | Traverse an expression tree and create a dependency graph
make_dep_graph defns = fromList $ M.elems $ M.mapWithKey dep_list defns where
dep_list sym expr = ((sym, expr), sym, extract ( extend (deps defns) expr))
| gatlin/psilo | src/Lib/Types.hs | gpl-3.0 | 10,477 | 0 | 16 | 2,802 | 3,042 | 1,681 | 1,361 | 201 | 5 |
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PatternGuards #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
-- | This module provides functions for printing schedules.
module Data.VPlan.Print
(-- * Printing functions
showScheduleTree
, showScheduleTable
-- * Pretty printing utilities
, enclose
, rep
, hpad
, showFromToTable
, showTree
-- * Implementation
, gshow
, gshowQ
, tshow
, showHoles
, showHolesSchedule
, safeInit
-- * Reexports
, render
, printBox
) where
import Control.Applicative
import Control.Lens
import Control.Monad.Identity
import Control.Monad.Writer hiding ((<>))
import Data.Data
import Data.Generics hiding (gshow)
import Data.List
import Data.Maybe
import Data.Proxy
import Data.Traversable (sequenceA)
import Data.Tree
import Data.VPlan.Modifier.Enum
import Data.VPlan.Schedule
import Text.PrettyPrint.Boxes
-- | Type of a function that can extend a given pretty printing function that returns a type b.
type Extender b = (forall a. (Typeable a) => (a -> b) -> (a -> b))
-- | Enclose a box in the given characters.
enclose :: Char -> Char -> Box -> Box
enclose f l b = char f <> b <> char l
-- | A box that just repeats another box n times horizontally
rep :: Int -> Alignment -> Box -> Box
rep n a = hcat a . replicate n
-- | Pad a box to the given number of columns, using the supplied fill character, and cat the boxes with the given
-- alignment together.
hpad :: Int -> Char -> Alignment -> Box -> Box
hpad n c a b
| n > cols b = b <> rep (n - cols b) a (char c)
| otherwise = b
-- | @gshow p a@ returns a string representation of a. If p is True, the result is enclosed in round parentheses.
gshow :: (Data a) => Bool -> a -> Box
gshow = gshowQ id
-- | Like @gshow@, but allows to specify a function that modifies the recursive caller function. This allows you
-- to provide custom special-case functions.
gshowQ :: forall a. (Data a) => Extender Box -> Bool -> a -> Box
gshowQ f p = runIdentity . tshow f' p
where f' :: (forall t. Typeable t => (t -> Identity Box) -> t -> Identity Box)
f' g = Identity . f (runIdentity . g)
-- | Like @gshowQ@, but allows to traverse with an applicative.
tshow :: forall m. forall a. (Data a, Applicative m) => (forall t. Typeable t => (t -> m Box) -> t -> m Box) -> Bool -> a -> m Box
tshow f p = showG `extQ` (pure . text) `ext1Q` slist
where showG o = fmap ?? cs $ \cs' -> case cs' of
[] -> c
_ -> enc $ c <+> hsep 1 top cs'
where c = text $ showConstr $ toConstr o
cs = sequenceA $ gmapQ (f $ tshow f True) o
enc = if p then enclose '(' ')' else id
slist :: forall b. Data b => [b] -> m Box
slist l = enclose '[' ']' . punctuateH top (char ',') <$> traverse (f $ tshow f True) (l `asTypeOf` [])
-- | Shows a value marking holes of a given type with "_" and returning an ordered list with the values of the holes.
showHoles :: forall p. forall a. (Data a, Typeable p) => Proxy p -> a -> (Box,[p])
showHoles _ a = runWriter $ (tshow (`extQ` f) False a :: Writer [p] Box)
where f :: p -> Writer [p] Box
f x = text "_" <$ tell [x]
-- | Show holes in a schedule, skipping the Enum/Schedule boiler plate.
showHolesSchedule :: forall s c i v. (Data (Schedule s c i v), EnumApply Data (s (Schedule s) c i v), Typeable3 (s (Schedule s)), Typeable c, Typeable i, Typeable v) => Schedule s c i v -> (Box, [Schedule s c i v])
showHolesSchedule s = enumApply (CFunc f :: CFunc Data (Box, [Schedule s c i v])) $ review schedule s
where f :: forall a. (Data a) => a -> (Box, [Schedule s c i v])
f a = showHoles (Proxy :: Proxy (Schedule s c i v)) a
-- | Show a tree
showTree :: Tree Box -> Box
showTree (Node n ns) = this <> connect // header // content
where this = n <> text " "
cs = map showTree ns
intervals = map (succ . succ . cols) cs & _head %~ max (cols this)
header = hcat top $ map (\x -> hpad x ' ' top $ text "¦") intervals
connect = hcat top $ map (\x -> rep (pred x) top (char '-') <> char '+') $ safeInit $ intervals & _head %~ subtract (cols this - 1)
content = hcat top $ zipWith (\x y -> hpad y ' ' top x) cs intervals
-- | A version of 'init' that returns an empty list when given an empty list
safeInit :: [a] -> [a]
safeInit [] = []
safeInit x = init x
-- | Render a schedule as a tree.
showScheduleTree :: forall s c i v. (EnumApply Data (s (Schedule s) c i v), Typeable3 (s (Schedule s)), Typeable3 (Schedule s), Typeable c, Typeable v, Typeable i, Data (s (Schedule s) c i v)) => Schedule s c i v -> Box
showScheduleTree = showTree . unfoldTree showHolesSchedule
-- | Show a function from a pair to a box as a table in the given range.
showFromToTable :: (Enum e) => ((e,e) -> Box) -> (e,e) -> (e,e) -> Box -> Box -> Box
showFromToTable f (x,y) (x',y') h v = hcat top $ intersperse h $ map g [x..x']
where g a = vcat right $ intersperse v $ map ((v <>) . f . (a,)) [y..y']
-- | Render a part of a schedule as a table
showScheduleTable :: (Ixed (Accessor (First (IxValue s))) s, Index s ~ (a,a), Enum a, Show (IxValue s)) => (a,a) -> (a,a) -> s -> Box
showScheduleTable b e s = showFromToTable (\x -> text $ fromMaybe "-" $ fmap show $ s ^? ix x) b e (char ' ') nullBox
| bennofs/vplan | src/Data/VPlan/Print.hs | gpl-3.0 | 5,677 | 0 | 18 | 1,484 | 1,988 | 1,063 | 925 | 91 | 3 |
module Data.Function.Decycle (decycleOn, decycle) where
import qualified Control.Lens as Lens
import Control.Lens.Operators
import qualified Data.Set as Set
-- | A fix for functions that terminates recursive cycles
decycleOn :: Ord b => (a -> b) -> (Maybe (a -> res) -> a -> res) -> a -> res
decycleOn toOrd f =
go Set.empty
where
go visited x = f (mRecurse visited (toOrd x)) x
mRecurse visited o
| visited ^. Lens.contains o = Nothing
| otherwise = visited & Lens.contains o .~ True & go & Just
decycle :: Ord a => (Maybe (a -> res) -> a -> res) -> a -> res
decycle = decycleOn id
| rvion/lamdu | bottlelib/Data/Function/Decycle.hs | gpl-3.0 | 644 | 0 | 13 | 170 | 240 | 126 | 114 | 13 | 1 |
-- A simple utility to transpose AVC files
--
-- Not especially fast, but it will help to develop v-binary which will
-- be fast
-- TODO: get repeats which may be non-1 and assoc with vector for instruction generation
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.ByteString as BS
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Unsafe as B
import qualified Data.Array.Repa as R
import qualified Data.Array.Repa.Repr.ByteString as R
import Data.Word (Word8)
import Data.Vector.Storable.ByteString
import qualified Data.Vector as V
import Lib
getFormat :: ByteString -> [ByteString]
getFormat =
takeWhile (B.notElem ';') .
drop 1 .
dropWhile (/= "FORMAT") .
B.words .
B.unlines .
dropComments .
B.lines
dropComments :: [ByteString] -> [ByteString]
dropComments = filter (not . B.isPrefixOf "#" ) . map ( B.dropWhile isSpace )
getDataLines :: ByteString -> [ByteString]
getDataLines =
splitSemi .
drop 1 .
dropWhile (B.notElem ';') .
dropWhile (not . B.isPrefixOf "FORMAT") .
dropComments .
B.lines
where
splitSemi [] = []
splitSemi s =
let
(a,b) = span (B.notElem ';') s
(c,d) = splitAt 1 b
in B.concat (a ++ c) : splitSemi d
lineSplit :: Int -> ByteString -> ByteString
lineSplit len inp = B.unlines (splitEvery inp)
where
splitEvery s
| B.length s < len = [ s ]
| otherwise =
let
(a,b) = B.splitAt len s
in
a : splitEvery b
transpose :: Int -> Int -> ByteString -> ByteString
transpose nRows nCols inp =
BS.pack [( B.unsafeIndex inp (row * nCols + col) ) | col <- [0..nCols - 1], row <- [0..nRows - 1]]
splitAtEach :: Int -> ByteString -> [ ByteString ]
splitAtEach i inp
| B.null inp = []
| otherwise =
let (first, rest) = B.splitAt i inp
in first : splitAtEach i rest
data Opts =
Opts
{ optInput :: FilePath
}
parseOpts :: OptionsParser Opts
parseOpts = Opts
<$> argument str
( metavar "ASCII_VECTOR.avc"
<> value ""
<> help "An AVC file formatted for input to v2b")
opts :: ParserInfo Opts
opts = info (parseOpts <**> helper)
( fullDesc
<> progDesc "An experiment to do v2b's job much faster. Uses too much memory and doesn't support scan or anything, but is fast."
<> header "avc-transpose - Fast vertical to horizontal ASCII vectors")
main = do
Opts avcFile <- execParser opts
f <- getInput avcFile -- TODO: getInput to mmapFileByteString if its a regular file
performGC
let
dataLines = getDataLines f
states =
filter (not . B.null ) .
concatMap (
take 1 .
drop 2 .
B.words) $
dataLines
nCols = B.length . head $ states
nRows = length states
allStates = B.concat states -- states as one long bytestring
pins = getFormat f
outp' = zip pins (B.transpose states)
r = R.fromByteString (R.Z R.:. (nRows::Int) R.:. (nCols::Int)) allStates
tr = R.transpose r
u <- R.computeP tr :: IO (R.Array R.U R.DIM2 Word8)
let v = R.toUnboxed u
bs = vectorToByteString . V.convert $ v
horizStates = (splitAtEach nRows bs )
outLines = zipWith glue pins horizStates
where
glue a b = B.concat [a, " ", b]
hPutStrLn stderr $ "Rows: " ++ show nRows
hPutStrLn stderr $ "Cols: " ++ show nCols
performGC
mapM_ B.putStrLn outLines
-- B.putStr (transpose nRows nCols $! allStates )
-- mapM_ B.putStrLn outp
-- forM_
-- outp'
-- (\( pin, state ) ->
-- do
-- B.putStrLn pin
-- B.putStrLn (lineSplit 80 state )
-- B.putStrLn ""
-- )
| gitfoxi/vcd-haskell | app/avc-transpose.hs | agpl-3.0 | 3,722 | 0 | 16 | 1,034 | 1,105 | 573 | 532 | 96 | 2 |
module Moonbase.Panel.Items.Clock
( clock, clockWith
) where
import Control.Monad
import Control.Applicative
import Control.Concurrent
import qualified Graphics.UI.Gtk as Gtk
import Data.Time.Format
import Data.Time.LocalTime
import Moonbase.Panel
import Moonbase.Theme
import Moonbase.DBus
import Moonbase.Util
import Moonbase.Util.Gtk
clock :: String -> PanelItems
clock fmt = item $ do
label <- liftIO $ createClockWidget fmt 1 Nothing
return $ PanelItem "date" (Gtk.toWidget label) Gtk.PackNatural
clockWith :: String -> Int -> Color -> PanelItems
clockWith fmt poll color = item $ do
label <- liftIO $ createClockWidget fmt poll (Just color)
return $ PanelItem "dateWith" (Gtk.toWidget label) Gtk.PackNatural
createClockWidget :: String -> Int -> Maybe Color -> IO Gtk.Label
createClockWidget fmt poll color = do
l <- Gtk.labelNew (Just "-")
Gtk.labelSetUseMarkup l True
_ <- Gtk.on l Gtk.realize $ void $
forkIO $ forever $ do
str <- formatTime defaultTimeLocale fmt <$> getZonedTime
Gtk.postGUISync $ Gtk.labelSetMarkup l $ format str
threadDelay (1000000 * poll)
return l
where
format str = case color of
Just x -> pangoColor x str
Nothing -> str
| felixsch/moonbase-ng | src/Moonbase/Panel/Items/Clock.hs | lgpl-2.1 | 1,347 | 0 | 14 | 357 | 409 | 205 | 204 | 34 | 2 |
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ExistentialQuantification #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE CPP #-}
--
-- Copyright (c) 2005-2022 Stefan Wehr - http://www.stefanwehr.de
--
-- This library is free software; you can redistribute it and/or
-- modify it under the terms of the GNU Lesser General Public
-- License as published by the Free Software Foundation; either
-- version 2.1 of the License, or (at your option) any later version.
--
-- This library is distributed in the hope that it will be useful,
-- but WITHOUT ANY WARRANTY; without even the implied warranty of
-- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-- Lesser General Public License for more details.
--
-- You should have received a copy of the GNU Lesser General Public
-- License along with this library; if not, write to the Free Software
-- Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA
--
{- |
This module integrates the 'Test.QuickCheck' library into HTF. It re-exports
all functionality of 'Test.QuickCheck' and defines some additional functions.
-}
module Test.Framework.QuickCheckWrapper (
module Test.QuickCheck,
-- * Arguments for evaluating properties
defaultArgs, getCurrentArgs, setDefaultArgs,
withQCArgs, WithQCArgs, setReplayFromString,
QCAssertion,
-- * Pending properties
qcPending,
-- * Auxiliary functions
#if !MIN_VERSION_QuickCheck(2,7,0)
ioProperty,
#endif
assertionAsProperty,
-- * Internal functions
qcAssertion
) where
#if !MIN_VERSION_base(4,6,0)
import Prelude hiding ( catch )
#endif
import Control.Exception ( SomeException, Exception, Handler(..),
throw, catch, catches, evaluate )
import Data.Typeable (Typeable)
import Data.Char
import qualified Data.List as List
import System.IO.Unsafe (unsafePerformIO)
import Data.IORef
import Test.QuickCheck
#if !MIN_VERSION_QuickCheck(2,7,0)
import Test.QuickCheck.Property (morallyDubiousIOProperty)
#endif
import Test.Framework.TestInterface
import Test.Framework.Utils
_DEBUG_ :: Bool
_DEBUG_ = False
debug :: String -> IO ()
debug s = if _DEBUG_ then putStrLn ("[DEBUG] " ++ s) else return ()
data QCState = QCState { qc_args :: !Args }
qcState :: IORef QCState
qcState = unsafePerformIO (newIORef (QCState defaultArgs))
{-# NOINLINE qcState #-}
-- | The 'Args' used if not explicitly changed.
defaultArgs :: Args
defaultArgs = stdArgs { chatty = False }
-- | Change the default 'Args' used to evaluate quick check properties.
setDefaultArgs :: Args -> IO ()
setDefaultArgs args =
do force <- atomicModifyIORef qcState $ \state ->
let newState = state { qc_args = args }
in (newState, newState)
force `seq` return ()
-- | Retrieve the 'Args' currently used per default when evaluating quick check properties.
getCurrentArgs :: IO Args
getCurrentArgs =
do state <- readIORef qcState
return (qc_args state)
data QCPendingException = QCPendingException String
deriving (Show,Read,Eq,Typeable)
instance Exception QCPendingException
quickCheckTestError :: Maybe String -> Assertion
quickCheckTestError m = failHTF $ mkFullTestResult Error m
quickCheckTestFail :: Maybe String -> Assertion
quickCheckTestFail m = failHTF $ mkFullTestResult Fail m
quickCheckTestPending :: String -> Assertion
quickCheckTestPending m = failHTF $ mkFullTestResult Pending (Just m)
quickCheckTestPass :: String -> Assertion
quickCheckTestPass m = failHTF $ mkFullTestResult Pass (Just m)
-- | Turns a 'Test.QuickCheck' property into an 'Assertion'. This function
-- is used internally in the code generated by @htfpp@, do not use it directly.
qcAssertion :: (QCAssertion t) => t -> Assertion
qcAssertion qc =
do origArgs <- getCurrentArgs
eitherArgs <-
(let a = (argsModifier qc) origArgs
in do _ <- evaluate (length (show a))
return (Right a))
`catch`
(\e -> return $ Left (show (e :: SomeException)))
case eitherArgs of
Left err -> quickCheckTestError
(Just ("Cannot evaluate custom arguments: "
++ err))
Right args ->
do res <- do anyTestable <- evaluate (testable qc)
x <- case anyTestable of
AnyTestable t' -> quickCheckWithResult args t'
return (Right x)
`catches`
[Handler $ \(QCPendingException msg) -> return $ Left msg]
debug ("QuickCheck result: " ++ show res)
case res of
Left err ->
quickCheckTestPending err
Right (Success { output=msg }) ->
quickCheckTestPass (adjustOutput msg)
Right (Failure { usedSize=size, usedSeed=gen, output=msg, reason=reason }) ->
case () of
_| pendingPrefix `List.isPrefixOf` reason ->
let pendingMsg = getPayload pendingPrefix pendingSuffix reason
in quickCheckTestPending pendingMsg
| failurePrefix `List.isPrefixOf` reason
, Just result <- readM (getPayload failurePrefix failureSuffix reason)
-> failHTF result
| otherwise ->
let replay = "Replay argument: " ++ (show (show (Just (gen, size))))
out = adjustOutput msg
in quickCheckTestFail (Just (out ++ "\n" ++ replay))
Right (GaveUp { output=msg }) ->
quickCheckTestFail (Just (adjustOutput msg))
Right (NoExpectedFailure { output=msg }) ->
quickCheckTestFail (Just (adjustOutput msg))
#if MIN_VERSION_QuickCheck(2,8,0) && !MIN_VERSION_QuickCheck(2,12,0)
Right (InsufficientCoverage { output=msg }) ->
quickCheckTestFail (Just (adjustOutput msg))
#endif
return ()
where
pendingPrefix = "Exception: 'QCPendingException \""
pendingSuffix = "\"'"
failurePrefix = "Exception: 'HTFFailure "
failureSuffix = "'"
getPayload pref suf reason =
let s = drop (length pref) reason
in take (length s - length suf) s
adjustOutput s = trimTrailing $
case s of
'+':'+':'+':' ':'O':'K':',':' ':'p':rest -> 'P':rest
'*':'*':'*':' ':'F':'a':'i':'l':'e':'d':'!':' ':rest -> rest
'*':'*':'*':' ':rest -> rest
_ -> s
trimTrailing = reverse . dropWhile isSpace . reverse
-- | Abstract type for representing quick check properties with custom 'Args'.
-- Used only internally.
data WithQCArgs a = WithQCArgs (Args -> Args) a
-- | Existential holding a 'Testable' value.
-- Used only internally.
data AnyTestable = forall a . Testable a => AnyTestable a
-- | Type class providing access to the custom 'Args' of a quick check property.
-- Used only internally.
class QCAssertion a where
argsModifier :: a -> (Args -> Args)
testable :: a -> AnyTestable
instance {-# OVERLAPPABLE #-} Testable a => QCAssertion a where
argsModifier _ = id
testable = AnyTestable
instance {-# OVERLAPPING #-} Testable a => QCAssertion (WithQCArgs a) where
argsModifier (WithQCArgs f _) = f
testable (WithQCArgs _ x) = AnyTestable x
-- | Run a 'Test.QuickCheck' property with modified quick check arguments 'Args'.
withQCArgs :: (Testable a) => (Args -> Args) -- ^ Modification function for the default 'Args'
-> a -- ^ Property
-> WithQCArgs a
withQCArgs = WithQCArgs
-- | Use @qcPending msg prop@ to mark the given quick check property as pending
-- without removing it from the test suite and without deleting or commenting out the property code.
qcPending :: Testable t => String -> t -> t
qcPending msg _ = throw (QCPendingException msg)
#if !MIN_VERSION_QuickCheck(2,7,0)
ioProperty :: Testable prop => IO prop -> Property
ioProperty = morallyDubiousIOProperty
#endif
assertionAsProperty :: IO () -> Property
assertionAsProperty action =
ioProperty $ action >> return True
-- | Sets the 'replay' parameter of the 'Args' datatype by parsing the given string.
setReplayFromString :: Args -> String -> Args
setReplayFromString args str =
#if !MIN_VERSION_QuickCheck(2,7,0)
case readM str of
Just x -> args { replay = x }
Nothing -> error ("Could not parse replay parameter from string " ++ show str)
#else
-- Starting with QC 2.7 the type of the replay field changed from
-- 'Maybe (StdGen, Int)' to 'Maybe (QCGen, Int)'
case readM str of
Just x -> args { replay = x }
Nothing ->
error ("Could not parse replay parameter from string " ++ show str)
#endif
| skogsbaer/HTF | Test/Framework/QuickCheckWrapper.hs | lgpl-2.1 | 9,075 | 0 | 30 | 2,439 | 1,846 | 977 | 869 | 144 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.