code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
{-|
Copyright : (c) Dave Laing, 2017
License : BSD3
Maintainer : dave.laing.80@gmail.com
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
module Fragment.Tuple.Ast.Error (
ErrExpectedTyTuple(..)
, AsExpectedTyTuple(..)
, expectTyTuple
, ErrTupleOutOfBounds(..)
, AsTupleOutOfBounds(..)
, lookupTuple
) where
import Control.Monad.Except (MonadError)
import Control.Monad.Error.Lens (throwing)
import Control.Lens (preview)
import Control.Lens.Prism (Prism')
import Control.Lens.TH (makePrisms)
import Ast.Type
import Ast.Error
import Fragment.Tuple.Ast.Type
data ErrExpectedTyTuple ki ty a = ErrExpectedTyTuple (Type ki ty a)
deriving (Eq, Ord, Show)
makePrisms ''ErrExpectedTyTuple
class AsExpectedTyTuple e ki ty a where -- | e -> ty, e -> a where
_ExpectedTyTuple :: Prism' e (Type ki ty a)
instance AsExpectedTyTuple (ErrExpectedTyTuple ki ty a) ki ty a where
_ExpectedTyTuple = _ErrExpectedTyTuple
instance {-# OVERLAPPABLE #-} AsExpectedTyTuple (ErrSum xs) ki ty a => AsExpectedTyTuple (ErrSum (x ': xs)) ki ty a where
_ExpectedTyTuple = _ErrNext . _ExpectedTyTuple
instance {-# OVERLAPPING #-} AsExpectedTyTuple (ErrSum (ErrExpectedTyTuple ki ty a ': xs)) ki ty a where
_ExpectedTyTuple = _ErrNow . _ExpectedTyTuple
expectTyTuple :: (MonadError e m, AsExpectedTyTuple e ki ty a, AsTyTuple ki ty) => Type ki ty a -> m [Type ki ty a]
expectTyTuple ty =
case preview _TyTuple ty of
Just tys -> return tys
_ -> throwing _ExpectedTyTuple ty
data ErrTupleOutOfBounds = ErrTupleOutOfBounds Int Int
deriving (Eq, Ord, Show)
makePrisms ''ErrTupleOutOfBounds
class AsTupleOutOfBounds e where
_TupleOutOfBounds :: Prism' e (Int, Int)
instance AsTupleOutOfBounds ErrTupleOutOfBounds where
_TupleOutOfBounds = _ErrTupleOutOfBounds
instance {-# OVERLAPPABLE #-} AsTupleOutOfBounds (ErrSum xs) => AsTupleOutOfBounds (ErrSum (x ': xs)) where
_TupleOutOfBounds = _ErrNext . _TupleOutOfBounds
instance {-# OVERLAPPING #-} AsTupleOutOfBounds (ErrSum (ErrTupleOutOfBounds ': xs)) where
_TupleOutOfBounds = _ErrNow . _TupleOutOfBounds
lookupTuple :: (MonadError e m, AsTupleOutOfBounds e) => [t a] -> Int -> m (t a)
lookupTuple ts i =
let
l = length ts
f x
| x < 0 = throwing _TupleOutOfBounds (x, l)
| x >= l = throwing _TupleOutOfBounds (x, l)
| otherwise = return $ ts !! i
in
f i
|
dalaing/type-systems
|
src/Fragment/Tuple/Ast/Error.hs
|
Haskell
|
bsd-3-clause
| 2,653
|
-----------------------------------------------------------------------------
-- |
-- Module : Distribution.Client.Setup
-- Copyright : (c) David Himmelstrup 2005
-- License : BSD-like
--
-- Maintainer : lemmih@gmail.com
-- Stability : provisional
-- Portability : portable
--
--
-----------------------------------------------------------------------------
module Distribution.Client.Setup
( globalCommand, GlobalFlags(..), globalRepos
, configureCommand, ConfigFlags(..), filterConfigureFlags
, configureExCommand, ConfigExFlags(..), defaultConfigExFlags
, configureExOptions
, installCommand, InstallFlags(..), installOptions, defaultInstallFlags
, listCommand, ListFlags(..)
, updateCommand
, upgradeCommand
, infoCommand, InfoFlags(..)
, fetchCommand, FetchFlags(..)
, checkCommand
, uploadCommand, UploadFlags(..)
, reportCommand, ReportFlags(..)
, unpackCommand, UnpackFlags(..)
, initCommand, IT.InitFlags(..)
, sdistCommand, SDistFlags(..), SDistExFlags(..), ArchiveFormat(..)
, parsePackageArgs
--TODO: stop exporting these:
, showRepo
, parseRepo
) where
import Distribution.Client.Types
( Username(..), Password(..), Repo(..), RemoteRepo(..), LocalRepo(..) )
import Distribution.Client.BuildReports.Types
( ReportLevel(..) )
import Distribution.Client.Dependency.Types
( Solver(..) )
import qualified Distribution.Client.Init.Types as IT
( InitFlags(..), PackageType(..) )
import Distribution.Client.Targets
( UserConstraint, readUserConstraint )
import Distribution.Simple.Program
( defaultProgramConfiguration )
import Distribution.Simple.Command hiding (boolOpt)
import qualified Distribution.Simple.Setup as Cabal
( configureCommand, sdistCommand, haddockCommand )
import Distribution.Simple.Setup
( ConfigFlags(..), SDistFlags(..), HaddockFlags(..) )
import Distribution.Simple.Setup
( Flag(..), toFlag, fromFlag, flagToList
, optionVerbosity, boolOpt, trueArg, falseArg )
import Distribution.Simple.InstallDirs
( PathTemplate, toPathTemplate, fromPathTemplate )
import Distribution.Version
( Version(Version), anyVersion, thisVersion )
import Distribution.Package
( PackageIdentifier, packageName, packageVersion, Dependency(..) )
import Distribution.Text
( Text(parse), display )
import Distribution.ReadE
( ReadE(..), readP_to_E, succeedReadE )
import qualified Distribution.Compat.ReadP as Parse
( ReadP, readP_to_S, readS_to_P, char, munch1, pfail, (+++) )
import Distribution.Verbosity
( Verbosity, normal )
import Distribution.Simple.Utils
( wrapText )
import Data.Char
( isSpace, isAlphaNum )
import Data.List
( intercalate )
import Data.Maybe
( listToMaybe, maybeToList, fromMaybe )
import Data.Monoid
( Monoid(..) )
import Control.Monad
( liftM )
import System.FilePath
( (</>) )
import Network.URI
( parseAbsoluteURI, uriToString )
-- ------------------------------------------------------------
-- * Global flags
-- ------------------------------------------------------------
-- | Flags that apply at the top level, not to any sub-command.
data GlobalFlags = GlobalFlags {
globalVersion :: Flag Bool,
globalNumericVersion :: Flag Bool,
globalConfigFile :: Flag FilePath,
globalRemoteRepos :: [RemoteRepo], -- ^ Available Hackage servers.
globalCacheDir :: Flag FilePath,
globalLocalRepos :: [FilePath],
globalLogsDir :: Flag FilePath,
globalWorldFile :: Flag FilePath
}
defaultGlobalFlags :: GlobalFlags
defaultGlobalFlags = GlobalFlags {
globalVersion = Flag False,
globalNumericVersion = Flag False,
globalConfigFile = mempty,
globalRemoteRepos = [],
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty
}
globalCommand :: CommandUI GlobalFlags
globalCommand = CommandUI {
commandName = "",
commandSynopsis = "",
commandUsage = \_ ->
"This program is the command line interface "
++ "to the Haskell Cabal infrastructure.\n"
++ "See http://www.haskell.org/cabal/ for more information.\n",
commandDescription = Just $ \pname ->
"For more information about a command use:\n"
++ " " ++ pname ++ " COMMAND --help\n\n"
++ "To install Cabal packages from hackage use:\n"
++ " " ++ pname ++ " install foo [--dry-run]\n\n"
++ "Occasionally you need to update the list of available packages:\n"
++ " " ++ pname ++ " update\n",
commandDefaultFlags = defaultGlobalFlags,
commandOptions = \showOrParseArgs ->
(case showOrParseArgs of ShowArgs -> take 2; ParseArgs -> id)
[option ['V'] ["version"]
"Print version information"
globalVersion (\v flags -> flags { globalVersion = v })
trueArg
,option [] ["numeric-version"]
"Print just the version number"
globalNumericVersion (\v flags -> flags { globalNumericVersion = v })
trueArg
,option [] ["config-file"]
"Set an alternate location for the config file"
globalConfigFile (\v flags -> flags { globalConfigFile = v })
(reqArgFlag "FILE")
,option [] ["remote-repo"]
"The name and url for a remote repository"
globalRemoteRepos (\v flags -> flags { globalRemoteRepos = v })
(reqArg' "NAME:URL" (maybeToList . readRepo) (map showRepo))
,option [] ["remote-repo-cache"]
"The location where downloads from all remote repos are cached"
globalCacheDir (\v flags -> flags { globalCacheDir = v })
(reqArgFlag "DIR")
,option [] ["local-repo"]
"The location of a local repository"
globalLocalRepos (\v flags -> flags { globalLocalRepos = v })
(reqArg' "DIR" (\x -> [x]) id)
,option [] ["logs-dir"]
"The location to put log files"
globalLogsDir (\v flags -> flags { globalLogsDir = v })
(reqArgFlag "DIR")
,option [] ["world-file"]
"The location of the world file"
globalWorldFile (\v flags -> flags { globalWorldFile = v })
(reqArgFlag "FILE")
]
}
instance Monoid GlobalFlags where
mempty = GlobalFlags {
globalVersion = mempty,
globalNumericVersion = mempty,
globalConfigFile = mempty,
globalRemoteRepos = mempty,
globalCacheDir = mempty,
globalLocalRepos = mempty,
globalLogsDir = mempty,
globalWorldFile = mempty
}
mappend a b = GlobalFlags {
globalVersion = combine globalVersion,
globalNumericVersion = combine globalNumericVersion,
globalConfigFile = combine globalConfigFile,
globalRemoteRepos = combine globalRemoteRepos,
globalCacheDir = combine globalCacheDir,
globalLocalRepos = combine globalLocalRepos,
globalLogsDir = combine globalLogsDir,
globalWorldFile = combine globalWorldFile
}
where combine field = field a `mappend` field b
globalRepos :: GlobalFlags -> [Repo]
globalRepos globalFlags = remoteRepos ++ localRepos
where
remoteRepos =
[ Repo (Left remote) cacheDir
| remote <- globalRemoteRepos globalFlags
, let cacheDir = fromFlag (globalCacheDir globalFlags)
</> remoteRepoName remote ]
localRepos =
[ Repo (Right LocalRepo) local
| local <- globalLocalRepos globalFlags ]
-- ------------------------------------------------------------
-- * Config flags
-- ------------------------------------------------------------
configureCommand :: CommandUI ConfigFlags
configureCommand = (Cabal.configureCommand defaultProgramConfiguration) {
commandDefaultFlags = mempty
}
configureOptions :: ShowOrParseArgs -> [OptionField ConfigFlags]
configureOptions = commandOptions configureCommand
filterConfigureFlags :: ConfigFlags -> Version -> ConfigFlags
filterConfigureFlags flags cabalLibVersion
| cabalLibVersion >= Version [1,3,10] [] = flags
-- older Cabal does not grok the constraints flag:
| otherwise = flags { configConstraints = [] }
-- ------------------------------------------------------------
-- * Config extra flags
-- ------------------------------------------------------------
-- | cabal configure takes some extra flags beyond runghc Setup configure
--
data ConfigExFlags = ConfigExFlags {
configCabalVersion :: Flag Version,
configExConstraints:: [UserConstraint],
configPreferences :: [Dependency],
configSolver :: Flag Solver
}
defaultConfigExFlags :: ConfigExFlags
defaultConfigExFlags = mempty { configSolver = Flag defaultSolver }
configureExCommand :: CommandUI (ConfigFlags, ConfigExFlags)
configureExCommand = configureCommand {
commandDefaultFlags = (mempty, defaultConfigExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (filter ((/="constraint") . optionName) $
configureOptions showOrParseArgs)
++ liftOptions snd setSnd (configureExOptions showOrParseArgs)
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
configureExOptions :: ShowOrParseArgs -> [OptionField ConfigExFlags]
configureExOptions _showOrParseArgs =
[ option [] ["cabal-lib-version"]
("Select which version of the Cabal lib to use to build packages "
++ "(useful for testing).")
configCabalVersion (\v flags -> flags { configCabalVersion = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse cabal lib version: "++)
(fmap toFlag parse))
(map display . flagToList))
, option [] ["constraint"]
"Specify constraints on a package (version, installed/source, flags)"
configExConstraints (\v flags -> flags { configExConstraints = v })
(reqArg "CONSTRAINT"
(fmap (\x -> [x]) (ReadE readUserConstraint))
(map display))
, option [] ["preference"]
"Specify preferences (soft constraints) on the version of a package"
configPreferences (\v flags -> flags { configPreferences = v })
(reqArg "CONSTRAINT"
(readP_to_E (const "dependency expected")
(fmap (\x -> [x]) parse))
(map display))
, optionSolver configSolver (\v flags -> flags { configSolver = v })
]
instance Monoid ConfigExFlags where
mempty = ConfigExFlags {
configCabalVersion = mempty,
configExConstraints= mempty,
configPreferences = mempty,
configSolver = mempty
}
mappend a b = ConfigExFlags {
configCabalVersion = combine configCabalVersion,
configExConstraints= combine configExConstraints,
configPreferences = combine configPreferences,
configSolver = combine configSolver
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Fetch command
-- ------------------------------------------------------------
data FetchFlags = FetchFlags {
-- fetchOutput :: Flag FilePath,
fetchDeps :: Flag Bool,
fetchDryRun :: Flag Bool,
fetchSolver :: Flag Solver,
fetchMaxBackjumps :: Flag Int,
fetchReorderGoals :: Flag Bool,
fetchIndependentGoals :: Flag Bool,
fetchVerbosity :: Flag Verbosity
}
defaultFetchFlags :: FetchFlags
defaultFetchFlags = FetchFlags {
-- fetchOutput = mempty,
fetchDeps = toFlag True,
fetchDryRun = toFlag False,
fetchSolver = Flag defaultSolver,
fetchMaxBackjumps = Flag defaultMaxBackjumps,
fetchReorderGoals = Flag False,
fetchIndependentGoals = Flag False,
fetchVerbosity = toFlag normal
}
fetchCommand :: CommandUI FetchFlags
fetchCommand = CommandUI {
commandName = "fetch",
commandSynopsis = "Downloads packages for later installation.",
commandDescription = Nothing,
commandUsage = usagePackages "fetch",
commandDefaultFlags = defaultFetchFlags,
commandOptions = \_ -> [
optionVerbosity fetchVerbosity (\v flags -> flags { fetchVerbosity = v })
-- , option "o" ["output"]
-- "Put the package(s) somewhere specific rather than the usual cache."
-- fetchOutput (\v flags -> flags { fetchOutput = v })
-- (reqArgFlag "PATH")
, option [] ["dependencies", "deps"]
"Resolve and fetch dependencies (default)"
fetchDeps (\v flags -> flags { fetchDeps = v })
trueArg
, option [] ["no-dependencies", "no-deps"]
"Ignore dependencies"
fetchDeps (\v flags -> flags { fetchDeps = v })
falseArg
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
fetchDryRun (\v flags -> flags { fetchDryRun = v })
trueArg
] ++
optionSolver fetchSolver (\v flags -> flags { fetchSolver = v }) :
optionSolverFlags fetchMaxBackjumps (\v flags -> flags { fetchMaxBackjumps = v })
fetchReorderGoals (\v flags -> flags { fetchReorderGoals = v })
fetchIndependentGoals (\v flags -> flags { fetchIndependentGoals = v })
}
-- ------------------------------------------------------------
-- * Other commands
-- ------------------------------------------------------------
updateCommand :: CommandUI (Flag Verbosity)
updateCommand = CommandUI {
commandName = "update",
commandSynopsis = "Updates list of known packages",
commandDescription = Nothing,
commandUsage = usagePackages "update",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> [optionVerbosity id const]
}
upgradeCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
upgradeCommand = configureCommand {
commandName = "upgrade",
commandSynopsis = "(command disabled, use install instead)",
commandDescription = Nothing,
commandUsage = usagePackages "upgrade",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = commandOptions installCommand
}
{-
cleanCommand :: CommandUI ()
cleanCommand = makeCommand name shortDesc longDesc emptyFlags options
where
name = "clean"
shortDesc = "Removes downloaded files"
longDesc = Nothing
emptyFlags = ()
options _ = []
-}
checkCommand :: CommandUI (Flag Verbosity)
checkCommand = CommandUI {
commandName = "check",
commandSynopsis = "Check the package for common mistakes",
commandDescription = Nothing,
commandUsage = \pname -> "Usage: " ++ pname ++ " check\n",
commandDefaultFlags = toFlag normal,
commandOptions = \_ -> []
}
-- ------------------------------------------------------------
-- * Report flags
-- ------------------------------------------------------------
data ReportFlags = ReportFlags {
reportUsername :: Flag Username,
reportPassword :: Flag Password,
reportVerbosity :: Flag Verbosity
}
defaultReportFlags :: ReportFlags
defaultReportFlags = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = toFlag normal
}
reportCommand :: CommandUI ReportFlags
reportCommand = CommandUI {
commandName = "report",
commandSynopsis = "Upload build reports to a remote server.",
commandDescription = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = \pname -> "Usage: " ++ pname ++ " report [FLAGS]\n\n"
++ "Flags for upload:",
commandDefaultFlags = defaultReportFlags,
commandOptions = \_ ->
[optionVerbosity reportVerbosity (\v flags -> flags { reportVerbosity = v })
,option ['u'] ["username"]
"Hackage username."
reportUsername (\v flags -> flags { reportUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
reportPassword (\v flags -> flags { reportPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid ReportFlags where
mempty = ReportFlags {
reportUsername = mempty,
reportPassword = mempty,
reportVerbosity = mempty
}
mappend a b = ReportFlags {
reportUsername = combine reportUsername,
reportPassword = combine reportPassword,
reportVerbosity = combine reportVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Unpack flags
-- ------------------------------------------------------------
data UnpackFlags = UnpackFlags {
unpackDestDir :: Flag FilePath,
unpackVerbosity :: Flag Verbosity
}
defaultUnpackFlags :: UnpackFlags
defaultUnpackFlags = UnpackFlags {
unpackDestDir = mempty,
unpackVerbosity = toFlag normal
}
unpackCommand :: CommandUI UnpackFlags
unpackCommand = CommandUI {
commandName = "unpack",
commandSynopsis = "Unpacks packages for user inspection.",
commandDescription = Nothing,
commandUsage = usagePackages "unpack",
commandDefaultFlags = mempty,
commandOptions = \_ -> [
optionVerbosity unpackVerbosity (\v flags -> flags { unpackVerbosity = v })
,option "d" ["destdir"]
"where to unpack the packages, defaults to the current directory."
unpackDestDir (\v flags -> flags { unpackDestDir = v })
(reqArgFlag "PATH")
]
}
instance Monoid UnpackFlags where
mempty = defaultUnpackFlags
mappend a b = UnpackFlags {
unpackDestDir = combine unpackDestDir
,unpackVerbosity = combine unpackVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * List flags
-- ------------------------------------------------------------
data ListFlags = ListFlags {
listInstalled :: Flag Bool,
listSimpleOutput :: Flag Bool,
listVerbosity :: Flag Verbosity
}
defaultListFlags :: ListFlags
defaultListFlags = ListFlags {
listInstalled = Flag False,
listSimpleOutput = Flag False,
listVerbosity = toFlag normal
}
listCommand :: CommandUI ListFlags
listCommand = CommandUI {
commandName = "list",
commandSynopsis = "List packages matching a search string.",
commandDescription = Nothing,
commandUsage = usagePackages "list",
commandDefaultFlags = defaultListFlags,
commandOptions = \_ -> [
optionVerbosity listVerbosity (\v flags -> flags { listVerbosity = v })
, option [] ["installed"]
"Only print installed packages"
listInstalled (\v flags -> flags { listInstalled = v })
trueArg
, option [] ["simple-output"]
"Print in a easy-to-parse format"
listSimpleOutput (\v flags -> flags { listSimpleOutput = v })
trueArg
]
}
instance Monoid ListFlags where
mempty = defaultListFlags
mappend a b = ListFlags {
listInstalled = combine listInstalled,
listSimpleOutput = combine listSimpleOutput,
listVerbosity = combine listVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Info flags
-- ------------------------------------------------------------
data InfoFlags = InfoFlags {
infoVerbosity :: Flag Verbosity
}
defaultInfoFlags :: InfoFlags
defaultInfoFlags = InfoFlags {
infoVerbosity = toFlag normal
}
infoCommand :: CommandUI InfoFlags
infoCommand = CommandUI {
commandName = "info",
commandSynopsis = "Display detailed information about a particular package.",
commandDescription = Nothing,
commandUsage = usagePackages "info",
commandDefaultFlags = defaultInfoFlags,
commandOptions = \_ -> [
optionVerbosity infoVerbosity (\v flags -> flags { infoVerbosity = v })
]
}
instance Monoid InfoFlags where
mempty = defaultInfoFlags
mappend a b = InfoFlags {
infoVerbosity = combine infoVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Install flags
-- ------------------------------------------------------------
-- | Install takes the same flags as configure along with a few extras.
--
data InstallFlags = InstallFlags {
installDocumentation :: Flag Bool,
installHaddockIndex :: Flag PathTemplate,
installDryRun :: Flag Bool,
installMaxBackjumps :: Flag Int,
installReorderGoals :: Flag Bool,
installIndependentGoals :: Flag Bool,
installReinstall :: Flag Bool,
installAvoidReinstalls :: Flag Bool,
installOverrideReinstall :: Flag Bool,
installUpgradeDeps :: Flag Bool,
installOnly :: Flag Bool,
installOnlyDeps :: Flag Bool,
installRootCmd :: Flag String,
installSummaryFile :: [PathTemplate],
installLogFile :: Flag PathTemplate,
installBuildReports :: Flag ReportLevel,
installSymlinkBinDir :: Flag FilePath,
installOneShot :: Flag Bool
}
defaultInstallFlags :: InstallFlags
defaultInstallFlags = InstallFlags {
installDocumentation = Flag False,
installHaddockIndex = Flag docIndexFile,
installDryRun = Flag False,
installMaxBackjumps = Flag defaultMaxBackjumps,
installReorderGoals = Flag False,
installIndependentGoals= Flag False,
installReinstall = Flag False,
installAvoidReinstalls = Flag False,
installOverrideReinstall = Flag False,
installUpgradeDeps = Flag False,
installOnly = Flag False,
installOnlyDeps = Flag False,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = Flag NoReports,
installSymlinkBinDir = mempty,
installOneShot = Flag False
}
where
docIndexFile = toPathTemplate ("$datadir" </> "doc" </> "index.html")
defaultMaxBackjumps :: Int
defaultMaxBackjumps = 200
defaultSolver :: Solver
defaultSolver = TopDown
allSolvers :: String
allSolvers = intercalate ", " (map display ([minBound .. maxBound] :: [Solver]))
installCommand :: CommandUI (ConfigFlags, ConfigExFlags, InstallFlags, HaddockFlags)
installCommand = CommandUI {
commandName = "install",
commandSynopsis = "Installs a list of packages.",
commandUsage = usagePackages "install",
commandDescription = Just $ \pname ->
let original = case commandDescription configureCommand of
Just desc -> desc pname ++ "\n"
Nothing -> ""
in original
++ "Examples:\n"
++ " " ++ pname ++ " install "
++ " Package in the current directory\n"
++ " " ++ pname ++ " install foo "
++ " Package from the hackage server\n"
++ " " ++ pname ++ " install foo-1.0 "
++ " Specific version of a package\n"
++ " " ++ pname ++ " install 'foo < 2' "
++ " Constrained package version\n",
commandDefaultFlags = (mempty, mempty, mempty, mempty),
commandOptions = \showOrParseArgs ->
liftOptions get1 set1 (filter ((/="constraint") . optionName) $
configureOptions showOrParseArgs)
++ liftOptions get2 set2 (configureExOptions showOrParseArgs)
++ liftOptions get3 set3 (installOptions showOrParseArgs)
++ liftOptions get4 set4 (haddockOptions showOrParseArgs)
}
where
get1 (a,_,_,_) = a; set1 a (_,b,c,d) = (a,b,c,d)
get2 (_,b,_,_) = b; set2 b (a,_,c,d) = (a,b,c,d)
get3 (_,_,c,_) = c; set3 c (a,b,_,d) = (a,b,c,d)
get4 (_,_,_,d) = d; set4 d (a,b,c,_) = (a,b,c,d)
haddockOptions showOrParseArgs
= [ opt { optionName = "haddock-" ++ name,
optionDescr = [ fmapOptFlags (\(_, lflags) -> ([], map ("haddock-" ++) lflags)) descr
| descr <- optionDescr opt] }
| opt <- commandOptions Cabal.haddockCommand showOrParseArgs
, let name = optionName opt
, name `elem` ["hoogle", "html", "html-location",
"executables", "internal", "css",
"hyperlink-source", "hscolour-css",
"contents-location"]
]
fmapOptFlags :: (OptFlags -> OptFlags) -> OptDescr a -> OptDescr a
fmapOptFlags modify (ReqArg d f p r w) = ReqArg d (modify f) p r w
fmapOptFlags modify (OptArg d f p r i w) = OptArg d (modify f) p r i w
fmapOptFlags modify (ChoiceOpt xs) = ChoiceOpt [(d, modify f, i, w) | (d, f, i, w) <- xs]
fmapOptFlags modify (BoolOpt d f1 f2 r w) = BoolOpt d (modify f1) (modify f2) r w
installOptions :: ShowOrParseArgs -> [OptionField InstallFlags]
installOptions showOrParseArgs =
[ option "" ["documentation"]
"building of documentation"
installDocumentation (\v flags -> flags { installDocumentation = v })
(boolOpt [] [])
, option [] ["doc-index-file"]
"A central index of haddock API documentation (template cannot use $pkgid)"
installHaddockIndex (\v flags -> flags { installHaddockIndex = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["dry-run"]
"Do not install anything, only print what would be installed."
installDryRun (\v flags -> flags { installDryRun = v })
trueArg
] ++
optionSolverFlags installMaxBackjumps (\v flags -> flags { installMaxBackjumps = v })
installReorderGoals (\v flags -> flags { installReorderGoals = v })
installIndependentGoals (\v flags -> flags { installIndependentGoals = v }) ++
[ option [] ["reinstall"]
"Install even if it means installing the same version again."
installReinstall (\v flags -> flags { installReinstall = v })
trueArg
, option [] ["avoid-reinstalls"]
"Do not select versions that would destructively overwrite installed packages."
installAvoidReinstalls (\v flags -> flags { installAvoidReinstalls = v })
trueArg
, option [] ["force-reinstalls"]
"Use to override the check that prevents reinstalling already installed versions of package dependencies."
installOverrideReinstall (\v flags -> flags { installOverrideReinstall = v })
trueArg
, option [] ["upgrade-dependencies"]
"Pick the latest version for all dependencies, rather than trying to pick an installed version."
installUpgradeDeps (\v flags -> flags { installUpgradeDeps = v })
trueArg
, option [] ["only-dependencies"]
"Install only the dependencies necessary to build the given packages"
installOnlyDeps (\v flags -> flags { installOnlyDeps = v })
trueArg
, option [] ["root-cmd"]
"Command used to gain root privileges, when installing with --global."
installRootCmd (\v flags -> flags { installRootCmd = v })
(reqArg' "COMMAND" toFlag flagToList)
, option [] ["symlink-bindir"]
"Add symlinks to installed executables into this directory."
installSymlinkBinDir (\v flags -> flags { installSymlinkBinDir = v })
(reqArgFlag "DIR")
, option [] ["build-summary"]
"Save build summaries to file (name template can use $pkgid, $compiler, $os, $arch)"
installSummaryFile (\v flags -> flags { installSummaryFile = v })
(reqArg' "TEMPLATE" (\x -> [toPathTemplate x]) (map fromPathTemplate))
, option [] ["build-log"]
"Log all builds to file (name template can use $pkgid, $compiler, $os, $arch)"
installLogFile (\v flags -> flags { installLogFile = v })
(reqArg' "TEMPLATE" (toFlag.toPathTemplate)
(flagToList . fmap fromPathTemplate))
, option [] ["remote-build-reporting"]
"Generate build reports to send to a remote server (none, anonymous or detailed)."
installBuildReports (\v flags -> flags { installBuildReports = v })
(reqArg "LEVEL" (readP_to_E (const $ "report level must be 'none', "
++ "'anonymous' or 'detailed'")
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["one-shot"]
"Do not record the packages in the world file."
installOneShot (\v flags -> flags { installOneShot = v })
trueArg
] ++ case showOrParseArgs of -- TODO: remove when "cabal install" avoids
ParseArgs ->
option [] ["only"]
"Only installs the package in the current directory."
installOnly (\v flags -> flags { installOnly = v })
trueArg
: []
_ -> []
instance Monoid InstallFlags where
mempty = InstallFlags {
installDocumentation = mempty,
installHaddockIndex = mempty,
installDryRun = mempty,
installReinstall = mempty,
installAvoidReinstalls = mempty,
installOverrideReinstall = mempty,
installMaxBackjumps = mempty,
installUpgradeDeps = mempty,
installReorderGoals = mempty,
installIndependentGoals= mempty,
installOnly = mempty,
installOnlyDeps = mempty,
installRootCmd = mempty,
installSummaryFile = mempty,
installLogFile = mempty,
installBuildReports = mempty,
installSymlinkBinDir = mempty,
installOneShot = mempty
}
mappend a b = InstallFlags {
installDocumentation = combine installDocumentation,
installHaddockIndex = combine installHaddockIndex,
installDryRun = combine installDryRun,
installReinstall = combine installReinstall,
installAvoidReinstalls = combine installAvoidReinstalls,
installOverrideReinstall = combine installOverrideReinstall,
installMaxBackjumps = combine installMaxBackjumps,
installUpgradeDeps = combine installUpgradeDeps,
installReorderGoals = combine installReorderGoals,
installIndependentGoals= combine installIndependentGoals,
installOnly = combine installOnly,
installOnlyDeps = combine installOnlyDeps,
installRootCmd = combine installRootCmd,
installSummaryFile = combine installSummaryFile,
installLogFile = combine installLogFile,
installBuildReports = combine installBuildReports,
installSymlinkBinDir = combine installSymlinkBinDir,
installOneShot = combine installOneShot
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Upload flags
-- ------------------------------------------------------------
data UploadFlags = UploadFlags {
uploadCheck :: Flag Bool,
uploadUsername :: Flag Username,
uploadPassword :: Flag Password,
uploadVerbosity :: Flag Verbosity
}
defaultUploadFlags :: UploadFlags
defaultUploadFlags = UploadFlags {
uploadCheck = toFlag False,
uploadUsername = mempty,
uploadPassword = mempty,
uploadVerbosity = toFlag normal
}
uploadCommand :: CommandUI UploadFlags
uploadCommand = CommandUI {
commandName = "upload",
commandSynopsis = "Uploads source packages to Hackage",
commandDescription = Just $ \_ ->
"You can store your Hackage login in the ~/.cabal/config file\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " upload [FLAGS] [TARFILES]\n\n"
++ "Flags for upload:",
commandDefaultFlags = defaultUploadFlags,
commandOptions = \_ ->
[optionVerbosity uploadVerbosity (\v flags -> flags { uploadVerbosity = v })
,option ['c'] ["check"]
"Do not upload, just do QA checks."
uploadCheck (\v flags -> flags { uploadCheck = v })
trueArg
,option ['u'] ["username"]
"Hackage username."
uploadUsername (\v flags -> flags { uploadUsername = v })
(reqArg' "USERNAME" (toFlag . Username)
(flagToList . fmap unUsername))
,option ['p'] ["password"]
"Hackage password."
uploadPassword (\v flags -> flags { uploadPassword = v })
(reqArg' "PASSWORD" (toFlag . Password)
(flagToList . fmap unPassword))
]
}
instance Monoid UploadFlags where
mempty = UploadFlags {
uploadCheck = mempty,
uploadUsername = mempty,
uploadPassword = mempty,
uploadVerbosity = mempty
}
mappend a b = UploadFlags {
uploadCheck = combine uploadCheck,
uploadUsername = combine uploadUsername,
uploadPassword = combine uploadPassword,
uploadVerbosity = combine uploadVerbosity
}
where combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * Init flags
-- ------------------------------------------------------------
emptyInitFlags :: IT.InitFlags
emptyInitFlags = mempty
defaultInitFlags :: IT.InitFlags
defaultInitFlags = emptyInitFlags { IT.initVerbosity = toFlag normal }
initCommand :: CommandUI IT.InitFlags
initCommand = CommandUI {
commandName = "init",
commandSynopsis = "Interactively create a .cabal file.",
commandDescription = Just $ \_ -> wrapText $
"Cabalise a project by creating a .cabal, Setup.hs, and "
++ "optionally a LICENSE file.\n\n"
++ "Calling init with no arguments (recommended) uses an "
++ "interactive mode, which will try to guess as much as "
++ "possible and prompt you for the rest. Command-line "
++ "arguments are provided for scripting purposes. "
++ "If you don't want interactive mode, be sure to pass "
++ "the -n flag.\n",
commandUsage = \pname ->
"Usage: " ++ pname ++ " init [FLAGS]\n\n"
++ "Flags for init:",
commandDefaultFlags = defaultInitFlags,
commandOptions = \_ ->
[ option ['n'] ["non-interactive"]
"Non-interactive mode."
IT.nonInteractive (\v flags -> flags { IT.nonInteractive = v })
trueArg
, option ['q'] ["quiet"]
"Do not generate log messages to stdout."
IT.quiet (\v flags -> flags { IT.quiet = v })
trueArg
, option [] ["no-comments"]
"Do not generate explanatory comments in the .cabal file."
IT.noComments (\v flags -> flags { IT.noComments = v })
trueArg
, option ['m'] ["minimal"]
"Generate a minimal .cabal file, that is, do not include extra empty fields. Also implies --no-comments."
IT.minimal (\v flags -> flags { IT.minimal = v })
trueArg
, option [] ["package-dir"]
"Root directory of the package (default = current directory)."
IT.packageDir (\v flags -> flags { IT.packageDir = v })
(reqArgFlag "DIRECTORY")
, option ['p'] ["package-name"]
"Name of the Cabal package to create."
IT.packageName (\v flags -> flags { IT.packageName = v })
(reqArgFlag "PACKAGE")
, option [] ["version"]
"Initial version of the package."
IT.version (\v flags -> flags { IT.version = v })
(reqArg "VERSION" (readP_to_E ("Cannot parse package version: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option [] ["cabal-version"]
"Required version of the Cabal library."
IT.cabalVersion (\v flags -> flags { IT.cabalVersion = v })
(reqArg "VERSION_RANGE" (readP_to_E ("Cannot parse Cabal version range: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['l'] ["license"]
"Project license."
IT.license (\v flags -> flags { IT.license = v })
(reqArg "LICENSE" (readP_to_E ("Cannot parse license: "++)
(toFlag `fmap` parse))
(flagToList . fmap display))
, option ['a'] ["author"]
"Name of the project's author."
IT.author (\v flags -> flags { IT.author = v })
(reqArgFlag "NAME")
, option ['e'] ["email"]
"Email address of the maintainer."
IT.email (\v flags -> flags { IT.email = v })
(reqArgFlag "EMAIL")
, option ['u'] ["homepage"]
"Project homepage and/or repository."
IT.homepage (\v flags -> flags { IT.homepage = v })
(reqArgFlag "URL")
, option ['s'] ["synopsis"]
"Short project synopsis."
IT.synopsis (\v flags -> flags { IT.synopsis = v })
(reqArgFlag "TEXT")
, option ['c'] ["category"]
"Project category."
IT.category (\v flags -> flags { IT.category = v })
(reqArg' "CATEGORY" (\s -> toFlag $ maybe (Left s) Right (readMaybe s))
(flagToList . fmap (either id show)))
, option [] ["is-library"]
"Build a library."
IT.packageType (\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Library))
, option [] ["is-executable"]
"Build an executable."
IT.packageType
(\v flags -> flags { IT.packageType = v })
(noArg (Flag IT.Executable))
, option ['o'] ["expose-module"]
"Export a module from the package."
IT.exposedModules
(\v flags -> flags { IT.exposedModules = v })
(reqArg "MODULE" (readP_to_E ("Cannot parse module name: "++)
((Just . (:[])) `fmap` parse))
(fromMaybe [] . fmap (fmap display)))
, option ['d'] ["dependency"]
"Package dependency."
IT.dependencies (\v flags -> flags { IT.dependencies = v })
(reqArg "PACKAGE" (readP_to_E ("Cannot parse dependency: "++)
((Just . (:[])) `fmap` parse))
(fromMaybe [] . fmap (fmap display)))
, option [] ["source-dir"]
"Directory containing package source."
IT.sourceDirs (\v flags -> flags { IT.sourceDirs = v })
(reqArg' "DIR" (Just . (:[]))
(fromMaybe []))
, option [] ["build-tool"]
"Required external build tool."
IT.buildTools (\v flags -> flags { IT.buildTools = v })
(reqArg' "TOOL" (Just . (:[]))
(fromMaybe []))
, optionVerbosity IT.initVerbosity (\v flags -> flags { IT.initVerbosity = v })
]
}
where readMaybe s = case reads s of
[(x,"")] -> Just x
_ -> Nothing
-- ------------------------------------------------------------
-- * SDist flags
-- ------------------------------------------------------------
-- | Extra flags to @sdist@ beyond runghc Setup sdist
--
data SDistExFlags = SDistExFlags {
sDistFormat :: Flag ArchiveFormat
}
deriving Show
data ArchiveFormat = TargzFormat | ZipFormat -- | ...
deriving (Show, Eq)
defaultSDistExFlags :: SDistExFlags
defaultSDistExFlags = SDistExFlags {
sDistFormat = Flag TargzFormat
}
sdistCommand :: CommandUI (SDistFlags, SDistExFlags)
sdistCommand = Cabal.sdistCommand {
commandDefaultFlags = (commandDefaultFlags Cabal.sdistCommand, defaultSDistExFlags),
commandOptions = \showOrParseArgs ->
liftOptions fst setFst (commandOptions Cabal.sdistCommand showOrParseArgs)
++ liftOptions snd setSnd sdistExOptions
}
where
setFst a (_,b) = (a,b)
setSnd b (a,_) = (a,b)
sdistExOptions =
[option [] ["archive-format"] "archive-format"
sDistFormat (\v flags -> flags { sDistFormat = v })
(choiceOpt
[ (Flag TargzFormat, ([], ["targz"]),
"Produce a '.tar.gz' format archive (default and required for uploading to hackage)")
, (Flag ZipFormat, ([], ["zip"]),
"Produce a '.zip' format archive")
])
]
instance Monoid SDistExFlags where
mempty = SDistExFlags {
sDistFormat = mempty
}
mappend a b = SDistExFlags {
sDistFormat = combine sDistFormat
}
where
combine field = field a `mappend` field b
-- ------------------------------------------------------------
-- * GetOpt Utils
-- ------------------------------------------------------------
reqArgFlag :: ArgPlaceHolder -> SFlags -> LFlags -> Description ->
(b -> Flag String) -> (Flag String -> b -> b) -> OptDescr b
reqArgFlag ad = reqArg ad (succeedReadE Flag) flagToList
liftOptions :: (b -> a) -> (a -> b -> b)
-> [OptionField a] -> [OptionField b]
liftOptions get set = map (liftOption get set)
optionSolver :: (flags -> Flag Solver)
-> (Flag Solver -> flags -> flags)
-> OptionField flags
optionSolver get set =
option [] ["solver"]
("Select dependency solver to use (default: " ++ display defaultSolver ++ "). Choices: " ++ allSolvers ++ ".")
get set
(reqArg "SOLVER" (readP_to_E (const $ "solver must be one of: " ++ allSolvers)
(toFlag `fmap` parse))
(flagToList . fmap display))
optionSolverFlags :: (flags -> Flag Int ) -> (Flag Int -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> (flags -> Flag Bool ) -> (Flag Bool -> flags -> flags)
-> [OptionField flags]
optionSolverFlags getmbj setmbj getrg setrg getig setig =
[ option [] ["max-backjumps"]
("Maximum number of backjumps allowed while solving (default: " ++ show defaultMaxBackjumps ++ "). Use a negative number to enable unlimited backtracking. Use 0 to disable backtracking completely.")
getmbj setmbj
(reqArg "NUM" (readP_to_E ("Cannot parse number: "++)
(fmap toFlag (Parse.readS_to_P reads)))
(map show . flagToList))
, option [] ["reorder-goals"]
"Try to reorder goals according to certain heuristics. Slows things down on average, but may make backtracking faster for some packages."
getrg setrg
trueArg
, option [] ["independent-goals"]
"Treat several goals on the command line as independent. If several goals depend on the same package, different versions can be chosen."
getig setig
trueArg
]
usagePackages :: String -> String -> String
usagePackages name pname =
"Usage: " ++ pname ++ " " ++ name ++ " [FLAGS]\n"
++ " or: " ++ pname ++ " " ++ name ++ " [PACKAGES]\n\n"
++ "Flags for " ++ name ++ ":"
--TODO: do we want to allow per-package flags?
parsePackageArgs :: [String] -> Either String [Dependency]
parsePackageArgs = parsePkgArgs []
where
parsePkgArgs ds [] = Right (reverse ds)
parsePkgArgs ds (arg:args) =
case readPToMaybe parseDependencyOrPackageId arg of
Just dep -> parsePkgArgs (dep:ds) args
Nothing -> Left $
show arg ++ " is not valid syntax for a package name or"
++ " package dependency."
readPToMaybe :: Parse.ReadP a a -> String -> Maybe a
readPToMaybe p str = listToMaybe [ r | (r,s) <- Parse.readP_to_S p str
, all isSpace s ]
parseDependencyOrPackageId :: Parse.ReadP r Dependency
parseDependencyOrPackageId = parse Parse.+++ liftM pkgidToDependency parse
where
pkgidToDependency :: PackageIdentifier -> Dependency
pkgidToDependency p = case packageVersion p of
Version [] _ -> Dependency (packageName p) anyVersion
version -> Dependency (packageName p) (thisVersion version)
showRepo :: RemoteRepo -> String
showRepo repo = remoteRepoName repo ++ ":"
++ uriToString id (remoteRepoURI repo) []
readRepo :: String -> Maybe RemoteRepo
readRepo = readPToMaybe parseRepo
parseRepo :: Parse.ReadP r RemoteRepo
parseRepo = do
name <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "_-.")
_ <- Parse.char ':'
uriStr <- Parse.munch1 (\c -> isAlphaNum c || c `elem` "+-=._/*()@'$:;&!?~")
uri <- maybe Parse.pfail return (parseAbsoluteURI uriStr)
return $ RemoteRepo {
remoteRepoName = name,
remoteRepoURI = uri
}
|
alphaHeavy/cabal
|
cabal-install/Distribution/Client/Setup.hs
|
Haskell
|
bsd-3-clause
| 45,137
|
-- #hide
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.GL.BlendingFactor
-- Copyright : (c) Sven Panne 2002-2005
-- License : BSD-style (see the file libraries/OpenGL/LICENSE)
--
-- Maintainer : sven.panne@aedion.de
-- Stability : provisional
-- Portability : portable
--
-- This is a purely internal module for (un-)marshaling BlendingFactor.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.GL.BlendingFactor (
BlendingFactor(..), marshalBlendingFactor, unmarshalBlendingFactor
) where
import Graphics.Rendering.OpenGL.GL.BasicTypes ( GLenum )
--------------------------------------------------------------------------------
data BlendingFactor =
Zero
| One
| SrcColor
| OneMinusSrcColor
| DstColor
| OneMinusDstColor
| SrcAlpha
| OneMinusSrcAlpha
| DstAlpha
| OneMinusDstAlpha
| ConstantColor
| OneMinusConstantColor
| ConstantAlpha
| OneMinusConstantAlpha
| SrcAlphaSaturate
deriving ( Eq, Ord, Show )
marshalBlendingFactor :: BlendingFactor -> GLenum
marshalBlendingFactor x = case x of
Zero -> 0x0
One -> 0x1
SrcColor -> 0x300
OneMinusSrcColor -> 0x301
DstColor -> 0x306
OneMinusDstColor -> 0x307
SrcAlpha -> 0x302
OneMinusSrcAlpha -> 0x303
DstAlpha -> 0x304
OneMinusDstAlpha -> 0x305
ConstantColor -> 0x8001
OneMinusConstantColor -> 0x8002
ConstantAlpha -> 0x8003
OneMinusConstantAlpha -> 0x8004
SrcAlphaSaturate -> 0x308
unmarshalBlendingFactor :: GLenum -> BlendingFactor
unmarshalBlendingFactor x
| x == 0x0 = Zero
| x == 0x1 = One
| x == 0x300 = SrcColor
| x == 0x301 = OneMinusSrcColor
| x == 0x306 = DstColor
| x == 0x307 = OneMinusDstColor
| x == 0x302 = SrcAlpha
| x == 0x303 = OneMinusSrcAlpha
| x == 0x304 = DstAlpha
| x == 0x305 = OneMinusDstAlpha
| x == 0x8001 = ConstantColor
| x == 0x8002 = OneMinusConstantColor
| x == 0x8003 = ConstantAlpha
| x == 0x8004 = OneMinusConstantAlpha
| x == 0x308 = SrcAlphaSaturate
| otherwise = error ("unmarshalBlendingFactor: illegal value " ++ show x)
|
FranklinChen/hugs98-plus-Sep2006
|
packages/OpenGL/Graphics/Rendering/OpenGL/GL/BlendingFactor.hs
|
Haskell
|
bsd-3-clause
| 2,227
|
{-# LANGUAGE NamedFieldPuns, RecordWildCards #-}
module Graphics.BarChart.Parser.Progression where
import Text.CSV
import System.FilePath
import Graphics.BarChart.Types
import Graphics.BarChart.Parser
import Graphics.BarChart.Rendering
-- | Used by 'writeProgressionChart' to generate a bar chart from
-- progression's @plot.csv@ file.
--
progressionChart :: Bool -> [Label] -> CSV -> BarChart Ratio
progressionChart flip labels csv
= drawMultiBarIntervals
. (if flip then flipMultiBarIntervals else id)
. parseMultiBarIntervals block_labels
$ csv
where block_labels | null labels = replicate (length csv) ""
| otherwise = labels
-- | Reads the @plot.csv@ file generated by progression and creates a
-- corresponding bar chart.
--
writeProgressionChart :: Bool -> Config -> FilePath -> [Label] -> IO ()
writeProgressionChart flip config@Config{..} file block_labels =
do csv <- readCSV file
let chart = progressionChart flip block_labels csv
renderWith config chart
|
sebfisch/haskell-barchart
|
src/Graphics/BarChart/Parser/Progression.hs
|
Haskell
|
bsd-3-clause
| 1,021
|
{-# LANGUAGE
ConstraintKinds
, FlexibleContexts
, FlexibleInstances
, MultiParamTypeClasses
, ScopedTypeVariables
, TypeFamilies
, TypeSynonymInstances
#-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.UI.Toy.Gtk.Diagrams
-- Copyright : (c) 2013 Michael Sloan
-- License : BSD-style (see the LICENSE file)
--
-- Maintainer : Michael Sloan <mgsloan@gmail.com>
-- Stability : experimental
-- Portability : GHC only
--
-- toy-diagrams utilities specific to the toy-gtk backend.
--
--------------------------------------------------------------------------------
module Graphics.UI.Toy.Gtk.Diagrams
(
-- * Convenient Type Synonyms
CairoDiagram, CairoDiagrammable, CairoInteractive
, CairoButton
, CairoDraggable, CairoHandle
-- , CairoSlider
-- * Displaying diagrams
, defaultDisplay
, displayDiagram
-- * Widgets
, mkDefaultButton
) where
import Control.Lens hiding ( transform, (#) )
import Diagrams.Backend.Cairo ( Cairo )
import Diagrams.Backend.Cairo.Text ( textLineBounded )
import Diagrams.Backend.Gtk ( renderToGtk )
import Diagrams.Prelude
import Diagrams.Lens
import Graphics.UI.Gtk ( DrawWindow )
import Graphics.UI.Toy.Gtk ( Gtk )
import Graphics.UI.Toy
import Graphics.UI.Toy.Button
import Graphics.UI.Toy.Diagrams
import Graphics.UI.Toy.Draggable
-- import Graphics.UI.Toy.Slider
type CairoDiagram = Diagram Cairo R2
type CairoDiagrammable q a = Diagrammable Cairo R2 q a
type CairoInteractive a = (Diagrammable Cairo R2 Any a, Interactive Gtk a)
type CairoButton = Button Cairo R2
type CairoDraggable a = Draggable a
type CairoHandle = Draggable CairoDiagram
-- type CairoSlider a = Slider Cairo R2 a
-- | Convenience function for implementing the display function of 'GtkDisplay'.
displayDiagram :: (a -> CairoDiagram)
-> DrawWindow -> InputState Gtk -> a -> IO a
displayDiagram f dw _ x = (renderToGtk dw $ f x) >> return x
-- | Simply @'displayDiagram' 'diagram'@, useful for boilerplate implementations
-- of 'GtkDisplay'.
defaultDisplay :: CairoDiagrammable Any a
=> DrawWindow -> InputState Gtk -> a -> IO a
defaultDisplay = displayDiagram diagram
type instance V (InputState Gtk) = R2
instance Transformable (InputState Gtk) where
transform t is = is { mousePos = (wrapped . _P %~ transform (inv t))
$ mousePos is }
-- | Builds a button containing text. The outside border is a rounded
-- rectangle, and when pressed, it's drawn with a black fill and white lines.
mkDefaultButton :: String -> CairoButton
mkDefaultButton txt = mkButton dia
where
dia b = addTint $ case _buttonState b of
NormalState -> blackLined $ label <> border
HoverState -> blackLined $ label <> fc lightgray border
PressState -> fc white label <> (border # fc black # lc white)
where
addTint
| _buttonHit b = flip overlayScaled (square 1 # fcA redTint)
| otherwise = id
redTint = red `withOpacity` 0.5
border = centerXY . lw 2 $ roundedRect (width label + 5) (height label + 5) 3
label = centerXY . pad 1 . reflectY $ textLineBounded monoStyle txt
-- TODO: reinstate when it's figured out how to not use "UndecidableInstances"
-- TODO: GtkDisplay Button instance
-- TODO: document orphans
-- TODO: can these work for any q?
{-
instance (V a ~ R2, CairoDiagrammable Any a) => GtkDisplay (CairoDraggable a) where
display = displayDiagram diagram
instance CairoDiagrammable Any a => GtkDisplay (CairoSlider a) where
display = displayDiagram diagram
instance (V a ~ R2, CairoDiagrammable Any a) => GtkDisplay (Transformed a) where
display = displayDiagram diagram
-}
|
mgsloan/toy-gtk-diagrams
|
src/Graphics/UI/Toy/Gtk/Diagrams.hs
|
Haskell
|
bsd-3-clause
| 3,832
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Spring13.Week7.Scrabble where
import Data.Char
newtype Score = Score Int
deriving (Eq, Read, Show, Ord, Num)
instance Monoid Score where
mempty = Score 0
mappend = (+)
score :: Char -> Score
score c
| c' `elem` "aeilnorstu" = Score 1
| c' `elem` "dg" = Score 2
| c' `elem` "bcmp" = Score 3
| c' `elem` "fhvwy" = Score 4
| c' `elem` "k" = Score 5
| c' `elem` "jx" = Score 8
| c' `elem` "qz" = Score 10
| otherwise = Score 0
where c' = toLower c
scoreString :: String -> Score
scoreString = foldr (\c x -> score c + x) (Score 0)
getScore :: Score -> Int
getScore (Score n) = n
|
bibaijin/cis194
|
src/Spring13/Week7/Scrabble.hs
|
Haskell
|
bsd-3-clause
| 657
|
{-#LANGUAGE OverloadedStrings, NoMonomorphismRestriction, TupleSections#-}
import Control.Applicative
import Control.Monad
import System.Directory
import System.FilePath
import Data.String
import Data.Conduit
import qualified Data.Conduit.List as CL
import Data.Conduit.Filesystem
import qualified Filesystem.Path as FS
import qualified Filesystem.Path.Rules as FS
import Codec.Compression.GZip
import qualified Codec.Archive.Tar as Tar
import qualified Codec.Archive.Tar.Entry as Tar
import qualified Data.ByteString.Lazy as BL
base :: IO FilePath
base = getHomeDirectory >>= \home -> return $ home </> ".cabal/packages/cabal-src/"
gzipOnly :: Monad m => Conduit FS.FilePath m FS.FilePath
gzipOnly = CL.filter (\e -> let es = FS.extensions e
in last es == "gz" && last (init es) == "tar"
)
getCabal :: FilePath -> IO (Maybe BL.ByteString)
getCabal path = BL.readFile path >>= return. Tar.foldEntries folding Nothing (const Nothing). Tar.read. decompress
where folding e a = if FS.extension (fromString $ Tar.entryPath e) == Just "cabal"
then case Tar.entryContent e of
Tar.NormalFile s _ -> Just s
_ -> a
else a
main :: IO ()
main = do fp <- base
es <- Tar.write <$> toEntries fp (FS.encodeString FS.darwin)
BL.writeFile (fp </> "00-index.tar") es
let cache = fp </> "00-index.cache"
doesFileExist cache >>= \e -> when e (removeFile cache)
cabalFileNameAndContent :: String -> (FS.FilePath -> FilePath)
-> Conduit FS.FilePath IO (Maybe (FS.FilePath, BL.ByteString))
cabalFileNameAndContent fp encode = CL.mapM (\e -> (getCabal $ encode e) >>= \r -> case r of
Just c -> return $ (,c) <$> toCabalFN e
Nothing -> return Nothing
)
where toCabalFN = FS.stripPrefix (fromString fp) .
flip FS.replaceExtension "cabal" . FS.dropExtension
toEntries :: String -> (FS.FilePath -> FilePath) -> IO [Tar.Entry]
toEntries fp encode =
traverse False (fromString fp) $$ gzipOnly =$
cabalFileNameAndContent fp encode =$
CL.catMaybes =$
CL.mapM (\a@(e,_) -> putStrLn (encode e) >> return a) =$
CL.map (\(p,e) -> case Tar.toTarPath False $ encode p
of Left _ -> Nothing
Right r -> Just (r, e)
) =$
CL.catMaybes =$
CL.map (uncurry Tar.fileEntry) =$
CL.consume
|
philopon/cabal-src-recache
|
Main.hs
|
Haskell
|
bsd-3-clause
| 2,611
|
--
--
--
----------------
-- Exercise 9.2.
----------------
--
--
--
module E'9''2 where
mult :: Integer -> Integer -> Integer
mult 0 _ = 0
mult left right
= right + mult ( left - 1 ) right
{- GHCi>
mult 1 2
mult 2 2
-}
-- 2
-- 4
-- GHCi> mult 0 ( fact ( -2 ) )
-- 0
-- Explanation: the "right" argument is never evaluated because of the order
-- of arguments in the definition and Haskells laziness.
-- GHCi> mult ( fact ( -2 ) ) 0
-- <interactive>: out of memory
-- Explanation: this time the left argument is the first one to be evaluated.
-- "fact ( -2 )" never terminates and results in a memory exception.
|
pascal-knodel/haskell-craft
|
_/links/E'9''2.hs
|
Haskell
|
mit
| 667
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Control.Concurrent.Utils
( Lock()
, Exclusive(..)
, Synchronised(..)
, withLock
) where
import Control.Distributed.Process
( Process
)
import qualified Control.Distributed.Process as Process (catch)
import Control.Exception (SomeException, throw)
import qualified Control.Exception as Exception (catch)
import Control.Concurrent.MVar
( MVar
, tryPutMVar
, newMVar
, takeMVar
)
import Control.Monad.IO.Class (MonadIO, liftIO)
newtype Lock = Lock { mvar :: MVar () }
class Exclusive a where
new :: IO a
acquire :: (MonadIO m) => a -> m ()
release :: (MonadIO m) => a -> m ()
instance Exclusive Lock where
new = return . Lock =<< newMVar ()
acquire = liftIO . takeMVar . mvar
release l = liftIO (tryPutMVar (mvar l) ()) >> return ()
class Synchronised e m where
synchronised :: (Exclusive e, Monad m) => e -> m b -> m b
synchronized :: (Exclusive e, Monad m) => e -> m b -> m b
synchronized = synchronised
instance Synchronised Lock IO where
synchronised = withLock
instance Synchronised Lock Process where
synchronised = withLockP
withLockP :: (Exclusive e) => e -> Process a -> Process a
withLockP excl act = do
Process.catch (do { liftIO $ acquire excl
; result <- act
; liftIO $ release excl
; return result
})
(\(e :: SomeException) -> (liftIO $ release excl) >> throw e)
withLock :: (Exclusive e) => e -> IO a -> IO a
withLock excl act = do
Exception.catch (do { acquire excl
; result <- act
; release excl
; return result
})
(\(e :: SomeException) -> release excl >> throw e)
|
qnikst/distributed-process-extras
|
src/Control/Concurrent/Utils.hs
|
Haskell
|
bsd-3-clause
| 1,855
|
module Main where
import Control.Monad (void)
import Graphics.Vty
main :: IO ()
main = do
cfg <- standardIOConfig
vty <- mkVty cfg
let line1 = charFill (defAttr `withBackColor` blue) ' ' 10 1
line2 = charFill (defAttr `withBackColor` green) ' ' 10 1
img = translate 10 5 (line1 `vertJoin` line2)
pic = picForImage img
update vty pic
void $ nextEvent vty
shutdown vty
|
jtdaugherty/vty
|
test/Issue76.hs
|
Haskell
|
bsd-3-clause
| 400
|
module IRTS.CodegenC (codegenC) where
import Idris.AbsSyntax
import IRTS.Bytecode
import IRTS.Lang
import IRTS.Simplified
import IRTS.Defunctionalise
import IRTS.System
import IRTS.CodegenCommon
import Idris.Core.TT
import Util.System
import Numeric
import Data.Char
import Data.Bits
import Data.List (intercalate)
import System.Process
import System.Exit
import System.IO
import System.Directory
import System.FilePath ((</>), (<.>))
import Control.Monad
import Debug.Trace
codegenC :: CodeGenerator
codegenC ci = do codegenC' (simpleDecls ci)
(outputFile ci)
(outputType ci)
(includes ci)
(compileObjs ci)
(map mkLib (compileLibs ci) ++
map incdir (importDirs ci))
(compilerFlags ci)
(exportDecls ci)
(interfaces ci)
(debugLevel ci)
when (interfaces ci) $
codegenH (exportDecls ci)
where mkLib l = "-l" ++ l
incdir i = "-I" ++ i
codegenC' :: [(Name, SDecl)] ->
String -> -- output file name
OutputType -> -- generate executable if True, only .o if False
[FilePath] -> -- include files
[String] -> -- extra object files
[String] -> -- extra compiler flags (libraries)
[String] -> -- extra compiler flags (anything)
[ExportIFace] ->
Bool -> -- interfaces too (so make a .o instead)
DbgLevel ->
IO ()
codegenC' defs out exec incs objs libs flags exports iface dbg
= do -- print defs
let bc = map toBC defs
let h = concatMap toDecl (map fst bc)
let cc = concatMap (uncurry toC) bc
let hi = concatMap ifaceC (concatMap getExp exports)
d <- getDataDir
mprog <- readFile (d </> "rts" </> "idris_main" <.> "c")
let cout = headers incs ++ debug dbg ++ h ++ cc ++
(if (exec == Executable) then mprog else hi)
case exec of
MavenProject -> putStrLn ("FAILURE: output type not supported")
Raw -> writeSource out cout
_ -> do
(tmpn, tmph) <- tempfile ".c"
hPutStr tmph cout
hFlush tmph
hClose tmph
comp <- getCC
libFlags <- getLibFlags
incFlags <- getIncFlags
envFlags <- getEnvFlags
let args = [gccDbg dbg] ++
gccFlags iface ++
-- # Any flags defined here which alter the RTS API must also be added to config.mk
["-DHAS_PTHREAD", "-DIDRIS_ENABLE_STATS",
"-I."] ++ objs ++ envFlags ++
(if (exec == Executable) then [] else ["-c"]) ++
[tmpn] ++
(if not iface then concatMap words libFlags else []) ++
concatMap words incFlags ++
(if not iface then concatMap words libs else []) ++
concatMap words flags ++
["-o", out]
-- putStrLn (show args)
exit <- rawSystem comp args
when (exit /= ExitSuccess) $
putStrLn ("FAILURE: " ++ show comp ++ " " ++ show args)
where
getExp (Export _ _ exp) = exp
headers xs =
concatMap
(\h -> "#include \"" ++ h ++ "\"\n")
(xs ++ ["idris_rts.h", "idris_bitstring.h", "idris_stdfgn.h"])
debug TRACE = "#define IDRIS_TRACE\n\n"
debug _ = ""
-- We're using signed integers now. Make sure we get consistent semantics
-- out of them from gcc. See e.g. http://thiemonagel.de/2010/01/signed-integer-overflow/
gccFlags i = if i then ["-fwrapv"]
else ["-fwrapv", "-fno-strict-overflow"]
gccDbg DEBUG = "-g"
gccDbg TRACE = "-O2"
gccDbg _ = "-O2"
cname :: Name -> String
cname n = "_idris_" ++ concatMap cchar (showCG n)
where cchar x | isAlpha x || isDigit x = [x]
| otherwise = "_" ++ show (fromEnum x) ++ "_"
indent :: Int -> String
indent n = replicate (n*4) ' '
creg RVal = "RVAL"
creg (L i) = "LOC(" ++ show i ++ ")"
creg (T i) = "TOP(" ++ show i ++ ")"
creg Tmp = "REG1"
toDecl :: Name -> String
toDecl f = "void " ++ cname f ++ "(VM*, VAL*);\n"
toC :: Name -> [BC] -> String
toC f code
= -- "/* " ++ show code ++ "*/\n\n" ++
"void " ++ cname f ++ "(VM* vm, VAL* oldbase) {\n" ++
indent 1 ++ "INITFRAME;\n" ++
concatMap (bcc 1) code ++ "}\n\n"
showCStr :: String -> String
showCStr s = '"' : foldr ((++) . showChar) "\"" s
where
showChar :: Char -> String
showChar '"' = "\\\""
showChar '\\' = "\\\\"
showChar c
-- Note: we need the double quotes around the codes because otherwise
-- "\n3" would get encoded as "\x0a3", which is incorrect.
-- Instead, we opt for "\x0a""3" and let the C compiler deal with it.
| ord c < 0x10 = "\"\"\\x0" ++ showHex (ord c) "\"\""
| ord c < 0x20 = "\"\"\\x" ++ showHex (ord c) "\"\""
| ord c < 0x7f = [c] -- 0x7f = \DEL
| otherwise = showHexes (utf8bytes (ord c))
utf8bytes :: Int -> [Int]
utf8bytes x = let (h : bytes) = split [] x in
headHex h (length bytes) : map toHex bytes
where
split acc 0 = acc
split acc x = let xbits = x .&. 0x3f
xrest = shiftR x 6 in
split (xbits : acc) xrest
headHex h 1 = h + 0xc0
headHex h 2 = h + 0xe0
headHex h 3 = h + 0xf0
headHex h n = error "Can't happen: Invalid UTF8 character"
toHex i = i + 0x80
showHexes = foldr ((++) . showUTF8) ""
showUTF8 c = "\"\"\\x" ++ showHex c "\"\""
bcc :: Int -> BC -> String
bcc i (ASSIGN l r) = indent i ++ creg l ++ " = " ++ creg r ++ ";\n"
bcc i (ASSIGNCONST l c)
= indent i ++ creg l ++ " = " ++ mkConst c ++ ";\n"
where
mkConst (I i) = "MKINT(" ++ show i ++ ")"
mkConst (BI i) | i < (2^30) = "MKINT(" ++ show i ++ ")"
| otherwise = "MKBIGC(vm,\"" ++ show i ++ "\")"
mkConst (Fl f) = "MKFLOAT(vm, " ++ show f ++ ")"
mkConst (Ch c) = "MKINT(" ++ show (fromEnum c) ++ ")"
mkConst (Str s) = "MKSTR(vm, " ++ showCStr s ++ ")"
mkConst (B8 x) = "idris_b8const(vm, " ++ show x ++ "U)"
mkConst (B16 x) = "idris_b16const(vm, " ++ show x ++ "U)"
mkConst (B32 x) = "idris_b32const(vm, " ++ show x ++ "UL)"
mkConst (B64 x) = "idris_b64const(vm, " ++ show x ++ "ULL)"
-- if it's a type constant, we won't use it, but equally it shouldn't
-- report an error. These might creep into generated for various reasons
-- (especially if erasure is disabled).
mkConst c | isTypeConst c = "MKINT(42424242)"
mkConst c = error $ "mkConst of (" ++ show c ++ ") not implemented"
bcc i (UPDATE l r) = indent i ++ creg l ++ " = " ++ creg r ++ ";\n"
bcc i (MKCON l loc tag []) | tag < 256
= indent i ++ creg l ++ " = NULL_CON(" ++ show tag ++ ");\n"
bcc i (MKCON l loc tag args)
= indent i ++ alloc loc tag ++
indent i ++ setArgs 0 args ++ "\n" ++
indent i ++ creg l ++ " = " ++ creg Tmp ++ ";\n"
-- "MKCON(vm, " ++ creg l ++ ", " ++ show tag ++ ", " ++
-- show (length args) ++ concatMap showArg args ++ ");\n"
where showArg r = ", " ++ creg r
setArgs i [] = ""
setArgs i (x : xs) = "SETARG(" ++ creg Tmp ++ ", " ++ show i ++ ", " ++ creg x ++
"); " ++ setArgs (i + 1) xs
alloc Nothing tag
= "allocCon(" ++ creg Tmp ++ ", vm, " ++ show tag ++ ", " ++
show (length args) ++ ", 0);\n"
alloc (Just old) tag
= "updateCon(" ++ creg Tmp ++ ", " ++ creg old ++ ", " ++ show tag ++ ", " ++
show (length args) ++ ");\n"
bcc i (PROJECT l loc a) = indent i ++ "PROJECT(vm, " ++ creg l ++ ", " ++ show loc ++
", " ++ show a ++ ");\n"
bcc i (PROJECTINTO r t idx)
= indent i ++ creg r ++ " = GETARG(" ++ creg t ++ ", " ++ show idx ++ ");\n"
bcc i (CASE True r code def)
| length code < 4 = showCase i def code
where
showCode :: Int -> [BC] -> String
showCode i bc = "{\n" ++ concatMap (bcc (i + 1)) bc ++
indent i ++ "}\n"
showCase :: Int -> Maybe [BC] -> [(Int, [BC])] -> String
showCase i Nothing [(t, c)] = indent i ++ showCode i c
showCase i (Just def) [] = indent i ++ showCode i def
showCase i def ((t, c) : cs)
= indent i ++ "if (CTAG(" ++ creg r ++ ") == " ++ show t ++ ") " ++ showCode i c
++ indent i ++ "else\n" ++ showCase i def cs
bcc i (CASE safe r code def)
= indent i ++ "switch(" ++ ctag safe ++ "(" ++ creg r ++ ")) {\n" ++
concatMap (showCase i) code ++
showDef i def ++
indent i ++ "}\n"
where
ctag True = "CTAG"
ctag False = "TAG"
showCase i (t, bc) = indent i ++ "case " ++ show t ++ ":\n"
++ concatMap (bcc (i+1)) bc ++ indent (i + 1) ++ "break;\n"
showDef i Nothing = ""
showDef i (Just c) = indent i ++ "default:\n"
++ concatMap (bcc (i+1)) c ++ indent (i + 1) ++ "break;\n"
bcc i (CONSTCASE r code def)
| intConsts code
-- = indent i ++ "switch(GETINT(" ++ creg r ++ ")) {\n" ++
-- concatMap (showCase i) code ++
-- showDef i def ++
-- indent i ++ "}\n"
= concatMap (iCase (creg r)) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| strConsts code
= concatMap (strCase ("GETSTR(" ++ creg r ++ ")")) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| bigintConsts code
= concatMap (biCase (creg r)) code ++
indent i ++ "{\n" ++ showDefS i def ++ indent i ++ "}\n"
| otherwise = error $ "Can't happen: Can't compile const case " ++ show code
where
intConsts ((I _, _ ) : _) = True
intConsts ((Ch _, _ ) : _) = True
intConsts ((B8 _, _ ) : _) = True
intConsts ((B16 _, _ ) : _) = True
intConsts ((B32 _, _ ) : _) = True
intConsts ((B64 _, _ ) : _) = True
intConsts _ = False
bigintConsts ((BI _, _ ) : _) = True
bigintConsts _ = False
strConsts ((Str _, _ ) : _) = True
strConsts _ = False
strCase sv (s, bc) =
indent i ++ "if (strcmp(" ++ sv ++ ", " ++ show s ++ ") == 0) {\n" ++
concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
biCase bv (BI b, bc) =
indent i ++ "if (bigEqConst(" ++ bv ++ ", " ++ show b ++ ")) {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (I b, bc) =
indent i ++ "if (GETINT(" ++ v ++ ") == " ++ show b ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (Ch b, bc) =
indent i ++ "if (GETINT(" ++ v ++ ") == " ++ show (fromEnum b) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B8 w, bc) =
indent i ++ "if (GETBITS8(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B16 w, bc) =
indent i ++ "if (GETBITS16(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B32 w, bc) =
indent i ++ "if (GETBITS32(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
iCase v (B64 w, bc) =
indent i ++ "if (GETBITS64(" ++ v ++ ") == " ++ show (fromEnum w) ++ ") {\n"
++ concatMap (bcc (i+1)) bc ++ indent i ++ "} else\n"
showCase i (t, bc) = indent i ++ "case " ++ show t ++ ":\n"
++ concatMap (bcc (i+1)) bc ++
indent (i + 1) ++ "break;\n"
showDef i Nothing = ""
showDef i (Just c) = indent i ++ "default:\n"
++ concatMap (bcc (i+1)) c ++
indent (i + 1) ++ "break;\n"
showDefS i Nothing = ""
showDefS i (Just c) = concatMap (bcc (i+1)) c
bcc i (CALL n) = indent i ++ "CALL(" ++ cname n ++ ");\n"
bcc i (TAILCALL n) = indent i ++ "TAILCALL(" ++ cname n ++ ");\n"
bcc i (SLIDE n) = indent i ++ "SLIDE(vm, " ++ show n ++ ");\n"
bcc i REBASE = indent i ++ "REBASE;\n"
bcc i (RESERVE 0) = ""
bcc i (RESERVE n) = indent i ++ "RESERVE(" ++ show n ++ ");\n"
bcc i (ADDTOP 0) = ""
bcc i (ADDTOP n) = indent i ++ "ADDTOP(" ++ show n ++ ");\n"
bcc i (TOPBASE n) = indent i ++ "TOPBASE(" ++ show n ++ ");\n"
bcc i (BASETOP n) = indent i ++ "BASETOP(" ++ show n ++ ");\n"
bcc i STOREOLD = indent i ++ "STOREOLD;\n"
bcc i (OP l fn args) = indent i ++ doOp (creg l ++ " = ") fn args ++ ";\n"
bcc i (FOREIGNCALL l rty (FStr fn) args)
= indent i ++
c_irts (toFType rty) (creg l ++ " = ")
(fn ++ "(" ++ showSep "," (map fcall args) ++ ")") ++ ";\n"
where fcall (t, arg) = irts_c (toFType t) (creg arg)
bcc i (NULL r) = indent i ++ creg r ++ " = NULL;\n" -- clear, so it'll be GCed
bcc i (ERROR str) = indent i ++ "fprintf(stderr, " ++ show str ++ "); fprintf(stderr, \"\\n\"); exit(-1);\n"
-- bcc i c = error (show c) -- indent i ++ "// not done yet\n"
-- Deconstruct the Foreign type in the defunctionalised expression and build
-- a foreign type description for c_irts and irts_c
toAType (FCon i)
| i == sUN "C_IntChar" = ATInt ITChar
| i == sUN "C_IntNative" = ATInt ITNative
| i == sUN "C_IntBits8" = ATInt (ITFixed IT8)
| i == sUN "C_IntBits16" = ATInt (ITFixed IT16)
| i == sUN "C_IntBits32" = ATInt (ITFixed IT32)
| i == sUN "C_IntBits64" = ATInt (ITFixed IT64)
toAType t = error (show t ++ " not defined in toAType")
toFType (FCon c)
| c == sUN "C_Str" = FString
| c == sUN "C_Float" = FArith ATFloat
| c == sUN "C_Ptr" = FPtr
| c == sUN "C_MPtr" = FManagedPtr
| c == sUN "C_Unit" = FUnit
toFType (FApp c [_,ity])
| c == sUN "C_IntT" = FArith (toAType ity)
toFType (FApp c [_])
| c == sUN "C_Any" = FAny
toFType t = FAny
c_irts (FArith (ATInt ITNative)) l x = l ++ "MKINT((i_int)(" ++ x ++ "))"
c_irts (FArith (ATInt ITChar)) l x = c_irts (FArith (ATInt ITNative)) l x
c_irts (FArith (ATInt (ITFixed ity))) l x
= l ++ "idris_b" ++ show (nativeTyWidth ity) ++ "const(vm, " ++ x ++ ")"
c_irts FString l x = l ++ "MKSTR(vm, " ++ x ++ ")"
c_irts FUnit l x = x
c_irts FPtr l x = l ++ "MKPTR(vm, " ++ x ++ ")"
c_irts FManagedPtr l x = l ++ "MKMPTR(vm, " ++ x ++ ")"
c_irts (FArith ATFloat) l x = l ++ "MKFLOAT(vm, " ++ x ++ ")"
c_irts FAny l x = l ++ x
irts_c (FArith (ATInt ITNative)) x = "GETINT(" ++ x ++ ")"
irts_c (FArith (ATInt ITChar)) x = irts_c (FArith (ATInt ITNative)) x
irts_c (FArith (ATInt (ITFixed ity))) x
= "(" ++ x ++ "->info.bits" ++ show (nativeTyWidth ity) ++ ")"
irts_c FString x = "GETSTR(" ++ x ++ ")"
irts_c FUnit x = x
irts_c FPtr x = "GETPTR(" ++ x ++ ")"
irts_c FManagedPtr x = "GETMPTR(" ++ x ++ ")"
irts_c (FArith ATFloat) x = "GETFLOAT(" ++ x ++ ")"
irts_c FAny x = x
bitOp v op ty args = v ++ "idris_b" ++ show (nativeTyWidth ty) ++ op ++ "(vm, " ++ intercalate ", " (map creg args) ++ ")"
bitCoerce v op input output arg
= v ++ "idris_b" ++ show (nativeTyWidth input) ++ op ++ show (nativeTyWidth output) ++ "(vm, " ++ creg arg ++ ")"
signedTy :: NativeTy -> String
signedTy t = "int" ++ show (nativeTyWidth t) ++ "_t"
doOp v (LPlus (ATInt ITNative)) [l, r] = v ++ "ADD(" ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus (ATInt ITNative)) [l, r] = v ++ "INTOP(-," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes (ATInt ITNative)) [l, r] = v ++ "MULT(" ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LUDiv ITNative) [l, r] = v ++ "UINTOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv (ATInt ITNative)) [l, r] = v ++ "INTOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LURem ITNative) [l, r] = v ++ "UINTOP(%," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSRem (ATInt ITNative)) [l, r] = v ++ "INTOP(%," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LAnd ITNative) [l, r] = v ++ "INTOP(&," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LOr ITNative) [l, r] = v ++ "INTOP(|," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LXOr ITNative) [l, r] = v ++ "INTOP(^," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSHL ITNative) [l, r] = v ++ "INTOP(<<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLSHR ITNative) [l, r] = v ++ "UINTOP(>>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LASHR ITNative) [l, r] = v ++ "INTOP(>>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LCompl ITNative) [x] = v ++ "INTOP(~," ++ creg x ++ ")"
doOp v (LEq (ATInt ITNative)) [l, r] = v ++ "INTOP(==," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt (ATInt ITNative)) [l, r] = v ++ "INTOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe (ATInt ITNative)) [l, r] = v ++ "INTOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt (ATInt ITNative)) [l, r] = v ++ "INTOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe (ATInt ITNative)) [l, r] = v ++ "INTOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLt ITNative) [l, r] = v ++ "UINTOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LLe ITNative) [l, r] = v ++ "UINTOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LGt ITNative) [l, r] = v ++ "UINTOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LGe ITNative) [l, r] = v ++ "UINTOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LPlus (ATInt ITChar)) [l, r] = doOp v (LPlus (ATInt ITNative)) [l, r]
doOp v (LMinus (ATInt ITChar)) [l, r] = doOp v (LMinus (ATInt ITNative)) [l, r]
doOp v (LTimes (ATInt ITChar)) [l, r] = doOp v (LTimes (ATInt ITNative)) [l, r]
doOp v (LUDiv ITChar) [l, r] = doOp v (LUDiv ITNative) [l, r]
doOp v (LSDiv (ATInt ITChar)) [l, r] = doOp v (LSDiv (ATInt ITNative)) [l, r]
doOp v (LURem ITChar) [l, r] = doOp v (LURem ITNative) [l, r]
doOp v (LSRem (ATInt ITChar)) [l, r] = doOp v (LSRem (ATInt ITNative)) [l, r]
doOp v (LAnd ITChar) [l, r] = doOp v (LAnd ITNative) [l, r]
doOp v (LOr ITChar) [l, r] = doOp v (LOr ITNative) [l, r]
doOp v (LXOr ITChar) [l, r] = doOp v (LXOr ITNative) [l, r]
doOp v (LSHL ITChar) [l, r] = doOp v (LSHL ITNative) [l, r]
doOp v (LLSHR ITChar) [l, r] = doOp v (LLSHR ITNative) [l, r]
doOp v (LASHR ITChar) [l, r] = doOp v (LASHR ITNative) [l, r]
doOp v (LCompl ITChar) [x] = doOp v (LCompl ITNative) [x]
doOp v (LEq (ATInt ITChar)) [l, r] = doOp v (LEq (ATInt ITNative)) [l, r]
doOp v (LSLt (ATInt ITChar)) [l, r] = doOp v (LSLt (ATInt ITNative)) [l, r]
doOp v (LSLe (ATInt ITChar)) [l, r] = doOp v (LSLe (ATInt ITNative)) [l, r]
doOp v (LSGt (ATInt ITChar)) [l, r] = doOp v (LSGt (ATInt ITNative)) [l, r]
doOp v (LSGe (ATInt ITChar)) [l, r] = doOp v (LSGe (ATInt ITNative)) [l, r]
doOp v (LLt ITChar) [l, r] = doOp v (LLt ITNative) [l, r]
doOp v (LLe ITChar) [l, r] = doOp v (LLe ITNative) [l, r]
doOp v (LGt ITChar) [l, r] = doOp v (LGt ITNative) [l, r]
doOp v (LGe ITChar) [l, r] = doOp v (LGe ITNative) [l, r]
doOp v (LPlus ATFloat) [l, r] = v ++ "FLOATOP(+," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus ATFloat) [l, r] = v ++ "FLOATOP(-," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes ATFloat) [l, r] = v ++ "FLOATOP(*," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv ATFloat) [l, r] = v ++ "FLOATOP(/," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LEq ATFloat) [l, r] = v ++ "FLOATBOP(==," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt ATFloat) [l, r] = v ++ "FLOATBOP(<," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe ATFloat) [l, r] = v ++ "FLOATBOP(<=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt ATFloat) [l, r] = v ++ "FLOATBOP(>," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe ATFloat) [l, r] = v ++ "FLOATBOP(>=," ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LIntFloat ITBig) [x] = v ++ "idris_castBigFloat(vm, " ++ creg x ++ ")"
doOp v (LFloatInt ITBig) [x] = v ++ "idris_castFloatBig(vm, " ++ creg x ++ ")"
doOp v (LPlus (ATInt ITBig)) [l, r] = v ++ "idris_bigPlus(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LMinus (ATInt ITBig)) [l, r] = v ++ "idris_bigMinus(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LTimes (ATInt ITBig)) [l, r] = v ++ "idris_bigTimes(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSDiv (ATInt ITBig)) [l, r] = v ++ "idris_bigDivide(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSRem (ATInt ITBig)) [l, r] = v ++ "idris_bigMod(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LEq (ATInt ITBig)) [l, r] = v ++ "idris_bigEq(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLt (ATInt ITBig)) [l, r] = v ++ "idris_bigLt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSLe (ATInt ITBig)) [l, r] = v ++ "idris_bigLe(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGt (ATInt ITBig)) [l, r] = v ++ "idris_bigGt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LSGe (ATInt ITBig)) [l, r] = v ++ "idris_bigGe(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v (LIntFloat ITNative) [x] = v ++ "idris_castIntFloat(" ++ creg x ++ ")"
doOp v (LFloatInt ITNative) [x] = v ++ "idris_castFloatInt(" ++ creg x ++ ")"
doOp v (LSExt ITNative ITBig) [x] = v ++ "idris_castIntBig(vm, " ++ creg x ++ ")"
doOp v (LTrunc ITBig ITNative) [x] = v ++ "idris_castBigInt(vm, " ++ creg x ++ ")"
doOp v (LStrInt ITBig) [x] = v ++ "idris_castStrBig(vm, " ++ creg x ++ ")"
doOp v (LIntStr ITBig) [x] = v ++ "idris_castBigStr(vm, " ++ creg x ++ ")"
doOp v (LIntStr ITNative) [x] = v ++ "idris_castIntStr(vm, " ++ creg x ++ ")"
doOp v (LStrInt ITNative) [x] = v ++ "idris_castStrInt(vm, " ++ creg x ++ ")"
doOp v (LIntStr (ITFixed _)) [x] = v ++ "idris_castBitsStr(vm, " ++ creg x ++ ")"
doOp v LFloatStr [x] = v ++ "idris_castFloatStr(vm, " ++ creg x ++ ")"
doOp v LStrFloat [x] = v ++ "idris_castStrFloat(vm, " ++ creg x ++ ")"
doOp v (LSLt (ATInt (ITFixed ty))) [x, y] = bitOp v "SLt" ty [x, y]
doOp v (LSLe (ATInt (ITFixed ty))) [x, y] = bitOp v "SLte" ty [x, y]
doOp v (LEq (ATInt (ITFixed ty))) [x, y] = bitOp v "Eq" ty [x, y]
doOp v (LSGe (ATInt (ITFixed ty))) [x, y] = bitOp v "SGte" ty [x, y]
doOp v (LSGt (ATInt (ITFixed ty))) [x, y] = bitOp v "SGt" ty [x, y]
doOp v (LLt (ITFixed ty)) [x, y] = bitOp v "Lt" ty [x, y]
doOp v (LLe (ITFixed ty)) [x, y] = bitOp v "Lte" ty [x, y]
doOp v (LGe (ITFixed ty)) [x, y] = bitOp v "Gte" ty [x, y]
doOp v (LGt (ITFixed ty)) [x, y] = bitOp v "Gt" ty [x, y]
doOp v (LSHL (ITFixed ty)) [x, y] = bitOp v "Shl" ty [x, y]
doOp v (LLSHR (ITFixed ty)) [x, y] = bitOp v "LShr" ty [x, y]
doOp v (LASHR (ITFixed ty)) [x, y] = bitOp v "AShr" ty [x, y]
doOp v (LAnd (ITFixed ty)) [x, y] = bitOp v "And" ty [x, y]
doOp v (LOr (ITFixed ty)) [x, y] = bitOp v "Or" ty [x, y]
doOp v (LXOr (ITFixed ty)) [x, y] = bitOp v "Xor" ty [x, y]
doOp v (LCompl (ITFixed ty)) [x] = bitOp v "Compl" ty [x]
doOp v (LPlus (ATInt (ITFixed ty))) [x, y] = bitOp v "Plus" ty [x, y]
doOp v (LMinus (ATInt (ITFixed ty))) [x, y] = bitOp v "Minus" ty [x, y]
doOp v (LTimes (ATInt (ITFixed ty))) [x, y] = bitOp v "Times" ty [x, y]
doOp v (LUDiv (ITFixed ty)) [x, y] = bitOp v "UDiv" ty [x, y]
doOp v (LSDiv (ATInt (ITFixed ty))) [x, y] = bitOp v "SDiv" ty [x, y]
doOp v (LURem (ITFixed ty)) [x, y] = bitOp v "URem" ty [x, y]
doOp v (LSRem (ATInt (ITFixed ty))) [x, y] = bitOp v "SRem" ty [x, y]
doOp v (LSExt (ITFixed from) ITBig) [x]
= v ++ "MKBIGSI(vm, (" ++ signedTy from ++ ")" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LSExt ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, GETINT(" ++ creg x ++ "))"
doOp v (LSExt ITChar (ITFixed to)) [x]
= doOp v (LSExt ITNative (ITFixed to)) [x]
doOp v (LSExt (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)((" ++ signedTy from ++ ")" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ "))"
doOp v (LSExt (ITFixed from) ITChar) [x]
= doOp v (LSExt (ITFixed from) ITNative) [x]
doOp v (LSExt (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from < nativeTyWidth to = bitCoerce v "S" from to x
doOp v (LZExt ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, (uintptr_t)GETINT(" ++ creg x ++ "))"
doOp v (LZExt ITChar (ITFixed to)) [x]
= doOp v (LZExt ITNative (ITFixed to)) [x]
doOp v (LZExt (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LZExt (ITFixed from) ITChar) [x]
= doOp v (LZExt (ITFixed from) ITNative) [x]
doOp v (LZExt (ITFixed from) ITBig) [x]
= v ++ "MKBIGUI(vm, " ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LZExt ITNative ITBig) [x]
= v ++ "MKBIGUI(vm, (uintptr_t)GETINT(" ++ creg x ++ "))"
doOp v (LZExt (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from < nativeTyWidth to = bitCoerce v "Z" from to x
doOp v (LTrunc ITNative (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, GETINT(" ++ creg x ++ "))"
doOp v (LTrunc ITChar (ITFixed to)) [x]
= doOp v (LTrunc ITNative (ITFixed to)) [x]
doOp v (LTrunc (ITFixed from) ITNative) [x]
= v ++ "MKINT((i_int)" ++ creg x ++ "->info.bits" ++ show (nativeTyWidth from) ++ ")"
doOp v (LTrunc (ITFixed from) ITChar) [x]
= doOp v (LTrunc (ITFixed from) ITNative) [x]
doOp v (LTrunc ITBig (ITFixed to)) [x]
= v ++ "idris_b" ++ show (nativeTyWidth to) ++ "const(vm, ISINT(" ++ creg x ++ ") ? GETINT(" ++ creg x ++ ") : mpz_get_ui(GETMPZ(" ++ creg x ++ ")))"
doOp v (LTrunc (ITFixed from) (ITFixed to)) [x]
| nativeTyWidth from > nativeTyWidth to = bitCoerce v "T" from to x
doOp v LFExp [x] = v ++ flUnOp "exp" (creg x)
doOp v LFLog [x] = v ++ flUnOp "log" (creg x)
doOp v LFSin [x] = v ++ flUnOp "sin" (creg x)
doOp v LFCos [x] = v ++ flUnOp "cos" (creg x)
doOp v LFTan [x] = v ++ flUnOp "tan" (creg x)
doOp v LFASin [x] = v ++ flUnOp "asin" (creg x)
doOp v LFACos [x] = v ++ flUnOp "acos" (creg x)
doOp v LFATan [x] = v ++ flUnOp "atan" (creg x)
doOp v LFSqrt [x] = v ++ flUnOp "sqrt" (creg x)
doOp v LFFloor [x] = v ++ flUnOp "floor" (creg x)
doOp v LFCeil [x] = v ++ flUnOp "ceil" (creg x)
doOp v LFNegate [x] = v ++ "MKFLOAT(vm, -GETFLOAT(" ++ (creg x) ++ "))"
-- String functions which don't need to know we're UTF8
doOp v LStrConcat [l,r] = v ++ "idris_concat(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LStrLt [l,r] = v ++ "idris_strlt(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LStrEq [l,r] = v ++ "idris_streq(vm, " ++ creg l ++ ", " ++ creg r ++ ")"
doOp v LReadStr [_] = v ++ "idris_readStr(vm, stdin)"
doOp v LWriteStr [_,s]
= v ++ "MKINT((i_int)(idris_writeStr(stdout"
++ ",GETSTR("
++ creg s ++ "))))"
-- String functions which need to know we're UTF8
doOp v LStrHead [x] = v ++ "idris_strHead(vm, " ++ creg x ++ ")"
doOp v LStrTail [x] = v ++ "idris_strTail(vm, " ++ creg x ++ ")"
doOp v LStrCons [x, y] = v ++ "idris_strCons(vm, " ++ creg x ++ "," ++ creg y ++ ")"
doOp v LStrIndex [x, y] = v ++ "idris_strIndex(vm, " ++ creg x ++ "," ++ creg y ++ ")"
doOp v LStrRev [x] = v ++ "idris_strRev(vm, " ++ creg x ++ ")"
doOp v LStrLen [x] = v ++ "idris_strlen(vm, " ++ creg x ++ ")"
doOp v LStrSubstr [x,y,z] = v ++ "idris_substr(vm, " ++ creg x ++ "," ++ creg y ++ "," ++ creg z ++ ")"
doOp v LFork [x] = v ++ "MKPTR(vm, vmThread(vm, " ++ cname (sMN 0 "EVAL") ++ ", " ++ creg x ++ "))"
doOp v LPar [x] = v ++ creg x -- "MKPTR(vm, vmThread(vm, " ++ cname (MN 0 "EVAL") ++ ", " ++ creg x ++ "))"
doOp v (LChInt ITNative) args = v ++ creg (last args)
doOp v (LChInt ITChar) args = doOp v (LChInt ITNative) args
doOp v (LIntCh ITNative) args = v ++ creg (last args)
doOp v (LIntCh ITChar) args = doOp v (LIntCh ITNative) args
doOp v LSystemInfo [x] = v ++ "idris_systemInfo(vm, " ++ creg x ++ ")"
doOp v LNoOp args = v ++ creg (last args)
-- Pointer primitives (declared as %extern in Builtins.idr)
doOp v (LExternal rf) [_,x]
| rf == sUN "prim__readFile"
= v ++ "idris_readStr(vm, GETPTR(" ++ creg x ++ "))"
doOp v (LExternal wf) [_,x,s]
| wf == sUN "prim__writeFile"
= v ++ "MKINT((i_int)(idris_writeStr(GETPTR(" ++ creg x
++ "),GETSTR("
++ creg s ++ "))))"
doOp v (LExternal vm) [] | vm == sUN "prim__vm" = v ++ "MKPTR(vm, vm)"
doOp v (LExternal si) [] | si == sUN "prim__stdin" = v ++ "MKPTR(vm, stdin)"
doOp v (LExternal so) [] | so == sUN "prim__stdout" = v ++ "MKPTR(vm, stdout)"
doOp v (LExternal se) [] | se == sUN "prim__stderr" = v ++ "MKPTR(vm, stderr)"
doOp v (LExternal nul) [] | nul == sUN "prim__null" = v ++ "MKPTR(vm, NULL)"
doOp v (LExternal eqp) [x, y] | eqp == sUN "prim__eqPtr"
= v ++ "MKINT((i_int)(GETPTR(" ++ creg x ++ ") == GETPTR(" ++ creg y ++ ")))"
doOp v (LExternal eqp) [x, y] | eqp == sUN "prim__eqManagedPtr"
= v ++ "MKINT((i_int)(GETMPTR(" ++ creg x ++ ") == GETMPTR(" ++ creg y ++ ")))"
doOp v (LExternal rp) [p, i] | rp == sUN "prim__registerPtr"
= v ++ "MKMPTR(vm, GETPTR(" ++ creg p ++ "), GETINT(" ++ creg i ++ "))"
doOp _ op args = error $ "doOp not implemented (" ++ show (op, args) ++ ")"
flUnOp :: String -> String -> String
flUnOp name val = "MKFLOAT(vm, " ++ name ++ "(GETFLOAT(" ++ val ++ ")))"
-------------------- Interface file generation
-- First, the wrappers in the C file
ifaceC :: Export -> String
ifaceC (ExportData n) = "typedef VAL " ++ cdesc n ++ ";\n"
ifaceC (ExportFun n cn ret args)
= ctype ret ++ " " ++ cdesc cn ++
"(VM* vm" ++ showArgs (zip argNames args) ++ ") {\n"
++ mkBody n (zip argNames args) ret ++ "}\n\n"
where showArgs [] = ""
showArgs ((n, t) : ts) = ", " ++ ctype t ++ " " ++ n ++
showArgs ts
argNames = zipWith (++) (repeat "arg") (map show [0..])
mkBody n as t = indent 1 ++ "INITFRAME;\n" ++
indent 1 ++ "RESERVE(" ++ show (max (length as) 3) ++ ");\n" ++
push 0 as ++ call n ++ retval t
where push i [] = ""
push i ((n, t) : ts) = indent 1 ++ c_irts (toFType t)
("TOP(" ++ show i ++ ") = ") n
++ ";\n" ++ push (i + 1) ts
call _ = indent 1 ++ "STOREOLD;\n" ++
indent 1 ++ "BASETOP(0);\n" ++
indent 1 ++ "ADDTOP(" ++ show (length as) ++ ");\n" ++
indent 1 ++ "CALL(" ++ cname n ++ ");\n"
retval (FIO t)
= indent 1 ++ "TOP(0) = NULL;\n" ++
indent 1 ++ "TOP(1) = NULL;\n" ++
indent 1 ++ "TOP(2) = RVAL;\n" ++
indent 1 ++ "STOREOLD;\n" ++
indent 1 ++ "BASETOP(0);\n" ++
indent 1 ++ "ADDTOP(3);\n" ++
indent 1 ++ "CALL(" ++ cname (sUN "call__IO") ++ ");\n" ++
retval t
retval t = indent 1 ++ "return " ++ irts_c (toFType t) "RVAL" ++ ";\n"
ctype (FCon c)
| c == sUN "C_Str" = "char*"
| c == sUN "C_Float" = "float"
| c == sUN "C_Ptr" = "void*"
| c == sUN "C_MPtr" = "void*"
| c == sUN "C_Unit" = "void"
ctype (FApp c [_,ity])
| c == sUN "C_IntT" = carith ity
ctype (FApp c [_])
| c == sUN "C_Any" = "VAL"
ctype (FStr s) = s
ctype FUnknown = "void*"
ctype (FIO t) = ctype t
ctype t = error "Can't happen: Not a valid interface type " ++ show t
carith (FCon i)
| i == sUN "C_IntChar" = "char"
| i == sUN "C_IntNative" = "int"
carith t = error "Can't happen: Not an exportable arithmetic type"
cdesc (FStr s) = s
cdesc s = error "Can't happen: Not a valid C name"
-- Then, the header files
codegenH :: [ExportIFace] -> IO ()
codegenH es = mapM_ writeIFace es
writeIFace :: ExportIFace -> IO ()
writeIFace (Export ffic hdr exps)
| ffic == sNS (sUN "FFI_C") ["FFI_C"]
= do let hfile = "#ifndef " ++ hdr_guard hdr ++ "\n" ++
"#define " ++ hdr_guard hdr ++ "\n\n" ++
"#include <idris_rts.h>\n\n" ++
concatMap hdr_export exps ++ "\n" ++
"#endif\n\n"
writeFile hdr hfile
| otherwise = return ()
hdr_guard x = "__" ++ map hchar x
where hchar x | isAlphaNum x = toUpper x
hchar _ = '_'
hdr_export :: Export -> String
hdr_export (ExportData n) = "typedef VAL " ++ cdesc n ++ ";\n"
hdr_export (ExportFun n cn ret args)
= ctype ret ++ " " ++ cdesc cn ++
"(VM* vm" ++ showArgs (zip argNames args) ++ ");\n"
where showArgs [] = ""
showArgs ((n, t) : ts) = ", " ++ ctype t ++ " " ++ n ++
showArgs ts
argNames = zipWith (++) (repeat "arg") (map show [0..])
|
mrmonday/Idris-dev
|
src/IRTS/CodegenC.hs
|
Haskell
|
bsd-3-clause
| 32,663
|
module Case1 where
fib n
| n <= 1 = 1
| otherwise = case (fib (n-1), fib (n-2)) of
(n1, n2) -> n1 + n2 + 1
|
RefactoringTools/HaRe
|
old/testing/evalMonad/Case1.hs
|
Haskell
|
bsd-3-clause
| 152
|
import Test.Cabal.Prelude
main = setupAndCabalTest $ do
skipUnless =<< ghcVersionIs (>= mkVersion [8,1])
withPackageDb $ do
withDirectory "mylib" $ setup_install_with_docs ["--ipid", "mylib-0.1.0.0"]
withDirectory "mysql" $ setup_install_with_docs ["--ipid", "mysql-0.1.0.0"]
withDirectory "postgresql" $ setup_install_with_docs ["--ipid", "postgresql-0.1.0.0"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=mysql-0.1.0.0:Database.MySQL"]
withDirectory "mylib" $
setup_install_with_docs ["--ipid", "mylib-0.1.0.0",
"--instantiate-with", "Database=postgresql-0.1.0.0:Database.PostgreSQL"]
withDirectory "src" $ setup_install_with_docs []
withDirectory "exe" $ do
setup_install_with_docs []
runExe' "exe" [] >>= assertOutputContains "minemysql minepostgresql"
|
mydaum/cabal
|
cabal-testsuite/PackageTests/Backpack/Includes2/setup-external.test.hs
|
Haskell
|
bsd-3-clause
| 948
|
euclideanDistance :: [Double] -> [Double] -> Double
euclideanDistance p q = sqrt . sum $ zipWith (\ u v -> (u-v)^2) p q
|
imanmafi/Algorithm-Implementations
|
Euclidean_distance/Haskell/jcla1/euclidean_distance.hs
|
Haskell
|
mit
| 120
|
{-# LANGUAGE PolyKinds, GADTs, KindSignatures, DataKinds, FlexibleInstances #-}
module T7438a where
data Thrist :: k -> k -> * where
Nil :: Thrist a a
|
urbanslug/ghc
|
testsuite/tests/polykinds/T7438a.hs
|
Haskell
|
bsd-3-clause
| 157
|
{-# htermination fmToList_GE :: (Ord a, Ord k) => FiniteMap (Either a k) b -> (Either a k) -> [((Either a k),b)] #-}
import FiniteMap
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_fmToList_GE_10.hs
|
Haskell
|
mit
| 135
|
module Main(main) where
import System.Directory (Permissions)
import System.IO (FilePath)
data Info = Info {
infoPath :: FilePath
, infoPerms :: Maybe Permissions
, infoSize :: Maybe Integer
, infoModTime :: Maybe ClockTime
} deriving (Eq, Ord, Show)
|
rockdragon/julia-programming
|
code/haskell/ControlVisit.hs
|
Haskell
|
mit
| 310
|
{-# htermination (/=) :: Float -> Float -> Bool #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_SLASHEQ_8.hs
|
Haskell
|
mit
| 52
|
{-# LANGUAGE OverloadedStrings, DeriveDataTypeable, NamedFieldPuns #-}
-- | Just re-exports a few of the types from
-- "Service.Twilio.Types". There are a number of helper functions
-- which are currently exposed by "Service.Twilio.Types" that aren't
-- exposed here.
module Service.Twilio (
-- * Base Twilio types
Price, -- type synonym for 'Int'
Passport (..),
PhoneNumber (..),
SendStatus (..),
APIKind (..),
SMSKind (..),
Id (..), uri,
SMSCore (..),
SMS (..),
-- * Parsing
FromFormUrlencoded (..),
-- * Request signing
requestSignature
) where
import Prelude hiding (id)
import Service.Twilio.Types
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base64 as B64
import Data.Monoid
import Data.List
import Data.Ord
import Crypto.Hash.SHA1 (hash)
import Crypto.MAC.HMAC (hmac)
-- | Given a Passport, a target URL, the raw query string, and a set
-- of body parameters, this function computes the canonical request
-- signature Twilio uses to authenticate itself.
--
-- A more flexible form of 'requestSignature' could be used with the
-- API inconsistencies for HTTP call requests and HTTPS call
-- requests. See the bottom of <http://www.twilio.com/docs/security>
-- for more details.
requestSignature :: Passport
-> ByteString -- ^ The full URL
-> ByteString -- ^ The raw query string including the "?"
-> [(ByteString, ByteString)] -- ^ Post parameters in Body
-> ByteString
requestSignature (Passport _ token) url qs headers =
encode $ url <> qs <> canonize headers
where encode = B64.encode . hmac hash 64 token
canonize = mconcat . map (uncurry mappend) . sortBy (comparing fst)
|
reifyhealth/twill
|
src/Service/Twilio.hs
|
Haskell
|
mit
| 1,748
|
module Control.Concurrent.ForkOrDoPool where
import Control.Concurrent
import Control.Concurrent.MVar as MVar
import Data.List as List
import Data.Maybe as Maybe
import Prelude.Extensions as PreludeExt
type ForkOrDoPool = [(ThreadId, MVar (IO ()))]
createPool :: Int -> IO ForkOrDoPool
createPool = \number_of_threads -> do
let work_lock = MVar.newEmptyMVar
work_locks <- (mapM id (replicate number_of_threads work_lock))
thread_ids <- (mapM ((.) forkIO workerThread) work_locks)
(return (zip thread_ids work_locks))
workerThread :: (MVar (IO ())) -> IO ()
workerThread = \work_lock -> do
work <- (MVar.readMVar work_lock)
work
(MVar.takeMVar work_lock)
(workerThread work_lock)
submitTask :: ForkOrDoPool -> IO () -> IO Bool
submitTask = \threadpool task -> do
let {submit_first = do
let (thread_id, work_lock) = (List.head threadpool)
(MVar.tryPutMVar work_lock task)}
let {try_submit = do
success <- submit_first
(ifElse success (return True) (submitTask (List.tail threadpool) task))}
(ifElse (List.null threadpool) (return False) try_submit)
forkOrDo :: ForkOrDoPool -> IO () -> IO ()
forkOrDo = \threadpool task -> do
success <- (submitTask threadpool task)
(doIf (not success) task)
|
stevedonnelly/haskell
|
code/Control/Concurrent/ForkOrDoPool.hs
|
Haskell
|
mit
| 1,282
|
module Language.PCPL.Syntax
( Program(..)
, Domino(..)
, Symbol
, syms
, unsyms
) where
import Language.UTM.Syntax
-- | PCPL program
data Program = Program
{ startDomino :: Input -> Domino
, dominos :: [Domino]
, separator :: Symbol
}
data Domino = Domino [Symbol] [Symbol]
deriving (Eq, Show)
|
davidlazar/PCPL
|
src/Language/PCPL/Syntax.hs
|
Haskell
|
mit
| 347
|
module PostgREST.Parsers where
import Protolude hiding (try, intercalate)
import Control.Monad ((>>))
import Data.Text (intercalate)
import Data.List (init, last)
import Data.Tree
import PostgREST.QueryBuilder (operators)
import PostgREST.Types
import Text.ParserCombinators.Parsec hiding (many, (<|>))
import PostgREST.RangeQuery (NonnegRange,allRange)
pRequestSelect :: Text -> Parser ReadRequest
pRequestSelect rootNodeName = do
fieldTree <- pFieldForest
return $ foldr treeEntry (Node (readQuery, (rootNodeName, Nothing, Nothing)) []) fieldTree
where
readQuery = Select [] [rootNodeName] [] Nothing allRange
treeEntry :: Tree SelectItem -> ReadRequest -> ReadRequest
treeEntry (Node fld@((fn, _),_,alias) fldForest) (Node (q, i) rForest) =
case fldForest of
[] -> Node (q {select=fld:select q}, i) rForest
_ -> Node (q, i) newForest
where
newForest =
foldr treeEntry (Node (Select [] [fn] [] Nothing allRange, (fn, Nothing, alias)) []) fldForest:rForest
pRequestFilter :: (Text, Text) -> Either ParseError (Path, Filter)
pRequestFilter (k, v) = (,) <$> path <*> (Filter <$> fld <*> op <*> val)
where
treePath = parse pTreePath ("failed to parser tree path (" ++ toS k ++ ")") $ toS k
opVal = parse pOpValueExp ("failed to parse filter (" ++ toS v ++ ")") $ toS v
path = fst <$> treePath
fld = snd <$> treePath
op = fst <$> opVal
val = snd <$> opVal
pRequestOrder :: (Text, Text) -> Either ParseError (Path, [OrderTerm])
pRequestOrder (k, v) = (,) <$> path <*> ord'
where
treePath = parse pTreePath ("failed to parser tree path (" ++ toS k ++ ")") $ toS k
path = fst <$> treePath
ord' = parse pOrder ("failed to parse order (" ++ toS v ++ ")") $ toS v
pRequestRange :: (ByteString, NonnegRange) -> Either ParseError (Path, NonnegRange)
pRequestRange (k, v) = (,) <$> path <*> pure v
where
treePath = parse pTreePath ("failed to parser tree path (" ++ toS k ++ ")") $ toS k
path = fst <$> treePath
ws :: Parser Text
ws = toS <$> many (oneOf " \t")
lexeme :: Parser a -> Parser a
lexeme p = ws *> p <* ws
pTreePath :: Parser (Path,Field)
pTreePath = do
p <- pFieldName `sepBy1` pDelimiter
jp <- optionMaybe pJsonPath
return (init p, (last p, jp))
pFieldForest :: Parser [Tree SelectItem]
pFieldForest = pFieldTree `sepBy1` lexeme (char ',')
pFieldTree :: Parser (Tree SelectItem)
pFieldTree = try (Node <$> pSimpleSelect <*> between (char '{') (char '}') pFieldForest)
<|> Node <$> pSelect <*> pure []
pStar :: Parser Text
pStar = toS <$> (string "*" *> pure ("*"::ByteString))
pFieldName :: Parser Text
pFieldName = do
matches <- (many1 (letter <|> digit <|> oneOf "_") `sepBy1` dash) <?> "field name (* or [a..z0..9_])"
return $ intercalate "-" $ map toS matches
where
isDash :: GenParser Char st ()
isDash = try ( char '-' >> notFollowedBy (char '>') )
dash :: Parser Char
dash = isDash *> pure '-'
pJsonPathStep :: Parser Text
pJsonPathStep = toS <$> try (string "->" *> pFieldName)
pJsonPath :: Parser [Text]
pJsonPath = (<>) <$> many pJsonPathStep <*> ( (:[]) <$> (string "->>" *> pFieldName) )
pField :: Parser Field
pField = lexeme $ (,) <$> pFieldName <*> optionMaybe pJsonPath
aliasSeparator :: Parser ()
aliasSeparator = char ':' >> notFollowedBy (char ':')
pSimpleSelect :: Parser SelectItem
pSimpleSelect = lexeme $ try ( do
alias <- optionMaybe ( try(pFieldName <* aliasSeparator) )
fld <- pField
return (fld, Nothing, alias)
)
pSelect :: Parser SelectItem
pSelect = lexeme $
try (
do
alias <- optionMaybe ( try(pFieldName <* aliasSeparator) )
fld <- pField
cast' <- optionMaybe (string "::" *> many letter)
return (fld, toS <$> cast', alias)
)
<|> do
s <- pStar
return ((s, Nothing), Nothing, Nothing)
pOperator :: Parser Operator
pOperator = toS <$> (pOp <?> "operator (eq, gt, ...)")
where pOp = foldl (<|>) empty $ map (try . string . toS . fst) operators
pValue :: Parser FValue
pValue = VText <$> (toS <$> many anyChar)
pDelimiter :: Parser Char
pDelimiter = char '.' <?> "delimiter (.)"
pOperatiorWithNegation :: Parser Operator
pOperatiorWithNegation = try ( (<>) <$> ( toS <$> string "not." ) <*> pOperator) <|> pOperator
pOpValueExp :: Parser (Operator, FValue)
pOpValueExp = (,) <$> pOperatiorWithNegation <*> (pDelimiter *> pValue)
pOrder :: Parser [OrderTerm]
pOrder = lexeme pOrderTerm `sepBy` char ','
pOrderTerm :: Parser OrderTerm
pOrderTerm =
try ( do
c <- pField
d <- optionMaybe (try $ pDelimiter *> (
try(string "asc" *> pure OrderAsc)
<|> try(string "desc" *> pure OrderDesc)
))
nls <- optionMaybe (pDelimiter *> (
try(string "nullslast" *> pure OrderNullsLast)
<|> try(string "nullsfirst" *> pure OrderNullsFirst)
))
return $ OrderTerm c d nls
)
<|> OrderTerm <$> pField <*> pure Nothing <*> pure Nothing
|
NotBrianZach/postgrest
|
src/PostgREST/Parsers.hs
|
Haskell
|
mit
| 5,173
|
module System.Flannel.ParamsSpec
( spec
) where
import System.Flannel.Params
import Test.Hspec
spec :: Spec
spec = do
describe "defaultParams" $ do
it "sets every field as empty" $ do
isSet "test" defaultParams `shouldBe` False
getOption "test" defaultParams `shouldBe` Nothing
getArg "test" defaultParams `shouldBe` Nothing
getRemaining defaultParams `shouldBe` []
describe "setFlag" $ do
let params = setFlag "test" defaultParams
it "sets the specified flag" $ do
isSet "test" params `shouldBe` True
describe "setOption" $ do
let params = setOption "test" "alpha" defaultParams
it "sets the specified option" $ do
getOption "test" params `shouldBe` Just "alpha"
describe "setArg" $ do
let params = setArg "test" "beta" defaultParams
it "sets the specified argument" $ do
getArg "test" params `shouldBe` Just "beta"
describe "addRemaining" $ do
let params = addRemaining ["1", "2"] defaultParams
it "adds the arguments" $ do
getRemaining params `shouldBe` ["1", "2"]
|
nahiluhmot/flannel
|
spec/System/Flannel/ParamsSpec.hs
|
Haskell
|
mit
| 1,178
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-unused-do-bind #-}
module Network.RestClientSpec (main, spec) where
import Helper
import Network.RestClient
main :: IO ()
main = hspec spec
withServer :: (IO Req -> IO a) -> IO a
withServer = withHttpServer status200 [("Content-Type", "text/plain")] "OK"
spec :: Spec
spec = do
describe "get" $ do
it "performs a GET request" $ withServer $ \r -> do
get "http://localhost:3000"
reqMethod <$> r `shouldReturn` "GET"
it "returns server response" $ withServer $ \_ -> do
get "http://localhost:3000" `shouldReturn` "OK"
describe "post" $ do
it "performs a POST request" $ withServer $ \r -> do
post "http://localhost:3000" ""
reqMethod <$> r `shouldReturn` "POST"
it "attaches a body to the request" $ withServer $ \r -> do
post "http://localhost:3000" "foobar"
reqBody <$> r `shouldReturn` "foobar"
it "returns server response" $ withServer $ \_ -> do
post "http://localhost:3000" "" `shouldReturn` "OK"
|
sol/rest-client
|
test/Network/RestClientSpec.hs
|
Haskell
|
mit
| 1,063
|
module Y2017.M04.D19.Exercise where
import Data.Aeson
-- below imports available from 1HaskellADay git repository
import Wikidata.Query.Aeson
import Wikidata.Query.Endpoint
import Y2017.M04.D18.Exercise
{--
Okay, today we're going to do the same thing as yesterday, but with a different
SPARQL query. Recall that we wanted eye-colors by frequency in wikidata.
Our original query was eyeColors in Y2017.M04.D14.Exercise
Create a value of SPARL that gives an eye color query. Query wikidata and
return the results as a list of EyeColor values
--}
eyeColorQuery :: SPARQL
eyeColorQuery = undefined
data EyeColor = Eyes { color :: String, count :: Int }
deriving Eq
instance FromJSON EyeColor where
parseJSON = undefined
eyeColors :: SPARQL -> IO [EyeColor]
eyeColors queryVal = undefined
-- What is the most-mentioned eye-color? What is the least-mentioned one?
mostMentioned, leastMentioned :: [EyeColor] -> EyeColor
mostMentioned = undefined
leastMentioned = undefined
|
geophf/1HaskellADay
|
exercises/HAD/Y2017/M04/D19/Exercise.hs
|
Haskell
|
mit
| 987
|
module Spear.Math.Vector.Vector3
(
Vector3(..)
, Right3
, Up3
, Forward3
, Position3
-- * Construction
, unitx3
, unity3
, unitz3
, zero3
, vec3
, orbit
-- * Operations
, cross
)
where
import Spear.Math.Vector.Class
import Foreign.C.Types (CFloat)
import Foreign.Storable
type Right3 = Vector3
type Up3 = Vector3
type Forward3 = Vector3
type Position3 = Vector3
-- | Represents a vector in 3D.
data Vector3 = Vector3
{-# UNPACK #-} !Float
{-# UNPACK #-} !Float
{-# UNPACK #-} !Float
deriving (Eq, Show)
instance Num Vector3 where
Vector3 ax ay az + Vector3 bx by bz = Vector3 (ax + bx) (ay + by) (az + bz)
Vector3 ax ay az - Vector3 bx by bz = Vector3 (ax - bx) (ay - by) (az - bz)
Vector3 ax ay az * Vector3 bx by bz = Vector3 (ax * bx) (ay * by) (az * bz)
abs (Vector3 ax ay az) = Vector3 (abs ax) (abs ay) (abs az)
signum (Vector3 ax ay az) = Vector3 (signum ax) (signum ay) (signum az)
fromInteger i = Vector3 i' i' i' where i' = fromInteger i
instance Fractional Vector3 where
Vector3 ax ay az / Vector3 bx by bz = Vector3 (ax / bx) (ay / by) (az / bz)
fromRational r = Vector3 r' r' r' where r' = fromRational r
instance Ord Vector3 where
Vector3 ax ay az <= Vector3 bx by bz
= (ax <= bx)
|| (az == bx && ay <= by)
|| (ax == bx && ay == by && az <= bz)
Vector3 ax ay az >= Vector3 bx by bz
= (ax >= bx)
|| (ax == bx && ay >= by)
|| (ax == bx && ay == by && az >= bz)
Vector3 ax ay az < Vector3 bx by bz
= (ax < bx)
|| (az == bx && ay < by)
|| (ax == bx && ay == by && az < bz)
Vector3 ax ay az > Vector3 bx by bz
= (ax > bx)
|| (ax == bx && ay > by)
|| (ax == bx && ay == by && az > bz)
max (Vector3 ax ay az) (Vector3 bx by bz) = Vector3 (Prelude.max ax bx) (Prelude.max ay by) (Prelude.max az bz)
min (Vector3 ax ay az) (Vector3 bx by bz) = Vector3 (Prelude.min ax bx) (Prelude.min ay by) (Prelude.min az bz)
instance VectorClass Vector3 where
{-# INLINABLE fromList #-}
fromList (ax:ay:az:_) = Vector3 ax ay az
{-# INLINABLE x #-}
x (Vector3 ax _ _ ) = ax
{-# INLINABLE y #-}
y (Vector3 _ ay _ ) = ay
{-# INLINABLE z #-}
z (Vector3 _ _ az) = az
{-# INLINABLE (!) #-}
(Vector3 ax _ _) ! 0 = ax
(Vector3 _ ay _) ! 1 = ay
(Vector3 _ _ az) ! 2 = az
_ ! _ = 0
{-# INLINABLE dot #-}
Vector3 ax ay az `dot` Vector3 bx by bz = ax*bx + ay*by + az*bz
{-# INLINABLE normSq #-}
normSq (Vector3 ax ay az) = ax*ax + ay*ay + az*az
{-# INLINABLE norm #-}
norm = sqrt . normSq
{-# INLINABLE scale #-}
scale s (Vector3 ax ay az) = Vector3 (s*ax) (s*ay) (s*az)
{-# INLINABLE neg #-}
neg (Vector3 ax ay az) = Vector3 (-ax) (-ay) (-az)
{-# INLINABLE normalise #-}
normalise v =
let n' = norm v
n = if n' == 0 then 1 else n'
in scale (1.0 / n) v
sizeFloat = sizeOf (undefined :: CFloat)
instance Storable Vector3 where
sizeOf _ = 3*sizeFloat
alignment _ = alignment (undefined :: CFloat)
peek ptr = do
ax <- peekByteOff ptr 0
ay <- peekByteOff ptr $ 1*sizeFloat
az <- peekByteOff ptr $ 2*sizeFloat
return (Vector3 ax ay az)
poke ptr (Vector3 ax ay az) = do
pokeByteOff ptr 0 ax
pokeByteOff ptr (1*sizeFloat) ay
pokeByteOff ptr (2*sizeFloat) az
-- | Unit vector along the X axis.
unitx3 = Vector3 1 0 0
-- | Unit vector along the Y axis.
unity3 = Vector3 0 1 0
-- | Unit vector along the Z axis.
unitz3 = Vector3 0 0 1
-- | Zero vector.
zero3 = Vector3 0 0 0
-- | Create a 3D vector from the given values.
vec3 :: Float -> Float -> Float -> Vector3
vec3 ax ay az = Vector3 ax ay az
-- | Create a 3D vector as a point on a sphere.
orbit :: Vector3 -- ^ Sphere center.
-> Float -- ^ Sphere radius
-> Float -- ^ Azimuth angle.
-> Float -- ^ Zenith angle.
-> Vector3
orbit center radius anglex angley =
let ax = anglex * pi / 180
ay = angley * pi / 180
sx = sin ax
sy = sin ay
cx = cos ax
cy = cos ay
px = x center + radius*cy*sx
py = y center + radius*sy
pz = z center + radius*cx*cy
in
vec3 px py pz
-- | Compute the given vectors' cross product.
cross :: Vector3 -> Vector3 -> Vector3
(Vector3 ax ay az) `cross` (Vector3 bx by bz) =
Vector3 (ay * bz - az * by) (az * bx - ax * bz) (ax * by - ay * bx)
|
jeannekamikaze/Spear
|
Spear/Math/Vector/Vector3.hs
|
Haskell
|
mit
| 4,913
|
import Control.Monad (forM_)
import Data.Array
import qualified Data.Char as Char
import qualified Data.List as List
data Light = On | Off
deriving (Eq, Show)
type Lights = Array Coordinates Light
type Coordinates = (Int, Int)
main = do
lightsList <- List.transpose <$> map (map parseInput) <$> lines <$> getContents
let gridBounds = ((0, 0), (length (head lightsList) - 1, length lightsList - 1))
let lights = listArray gridBounds (concat lightsList) // cornersOn gridBounds
let steps = iterate step lights
print $ countLights (steps !! 100)
trim :: String -> String
trim = takeWhile (not . Char.isSpace) . dropWhile Char.isSpace
parseInput :: Char -> Light
parseInput '.' = Off
parseInput '#' = On
step :: Lights -> Lights
step lights = (array gridBounds $ map stepLight $ assocs lights) // cornersOn gridBounds
where
stepLight this@(coordinates, state) = case switchedOnNeighbors coordinates of
2 -> (coordinates, state)
3 -> (coordinates, On)
_ -> (coordinates, Off)
switchedOnNeighbors coordinates = countSwitchedOn $ map (lights !) $ neighbours coordinates
neighbours (x, y) =
filter
(inRange gridBounds)
[ (x - 1, y - 1),
(x, y - 1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
]
gridBounds = bounds lights
cornersOn :: (Coordinates, Coordinates) -> [(Coordinates, Light)]
cornersOn ((startX, startY), (endX, endY)) =
map (\c -> (c, On)) [(startX, startY), (endX, startY), (startX, endY), (endX, endY)]
countLights :: Lights -> Int
countLights = countSwitchedOn . elems
countSwitchedOn :: [Light] -> Int
countSwitchedOn = length . filter (== On)
printLights :: Lights -> IO ()
printLights lights =
forM_ [snd (fst gridBounds) .. snd (snd gridBounds)] $ \y -> do
forM_ [fst (fst gridBounds) .. fst (snd gridBounds)] $ \x -> do
case lights ! (x, y) of
Off -> putStr "."
On -> putStr "#"
putStrLn ""
where
gridBounds = bounds lights
|
SamirTalwar/advent-of-code
|
2015/AOC_18_2.hs
|
Haskell
|
mit
| 2,075
|
{-# LANGUAGE MultiParamTypeClasses #-}
module Database.EventSafe.ConcSpec
( spec
) where
import Control.Concurrent
import Control.Monad
import Database.EventSafe.Conc
import Database.EventSafe.Types
import Test.Hspec
data EventExample = EventExample deriving (Show, Eq, Ord)
data ResourceRefExample = ResourceRefExample
newtype EventCount = EventCount Int deriving (Show, Eq)
type ESTVarExample = ESTVar [] EventExample
instance ResourceRef EventExample ResourceRefExample where
concerns _ _ = True
instance Resource EventExample EventCount where
firstEvent _ = Just $ EventCount 1
applyEvent _ (EventCount c) = Just $ EventCount $ c + 1
buildResource [] = Nothing
buildResource es = Just $ EventCount $ length es
spec :: Spec
spec = do
describe "ESTVar" $ do
describe "emptyPoolM" $ do
it "returns an empty pool" $ do
pool <- emptyPoolM :: IO ESTVarExample
c <- getResourceM pool ResourceRefExample
c `shouldBe` (Nothing :: Maybe EventCount)
describe "addEventM" $ do
it "adds events concurrently to a ESTVar" $ do
pool <- emptyPoolM :: IO ESTVarExample
let n = 1000
mvars <- replicateM n newEmptyMVar
forM_ mvars $ \mvar -> forkIO $ do
addEventM pool EventExample
putMVar mvar True
mapM_ takeMVar mvars
c <- getResourceM pool ResourceRefExample
c `shouldBe` Just (EventCount n)
|
thoferon/eventsafe
|
tests/Database/EventSafe/ConcSpec.hs
|
Haskell
|
mit
| 1,492
|
{-# LANGUAGE Safe, ScopedTypeVariables #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.GroupWiths
-- Copyright : (c) Uli Köhler 2014
-- License : Apache License v2.0
-- Maintainer : ukoehler@techoverflow.net
-- Stability : provisional
-- Portability : portable
--
-- A collection of grouping utility functions.
-- For a given function that assigns a key to objects,
-- provides functions that group said objects into a multimap
-- by said key.
--
-- This can be used similarly to the SQL GROUP BY statement.
--
-- Provides a more flexible approach to GHC.Exts.groupWith
--
-- > groupWith (take 1) ["a","ab","bc"] == Map.fromList [("a",["a","ab"]), ("b",["bc"])]
--
-- In order to use monadic / applicative functions as key generators,
-- use the A- or M-postfixed variants like 'groupWithA' or 'groupWithMultipleM'
--
--
--
-----------------------------------------------------------------------------
module Control.GroupWith(
MultiMap,
groupWith,
groupWithMultiple,
groupWithUsing,
groupWithA,
groupWithM,
groupWithMultipleM,
groupWithUsingM
) where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Control.Arrow (first, second)
import Control.Applicative (Applicative, (<$>), liftA2, pure)
import Data.Traversable (sequenceA)
type MultiMap a b = Map a [b]
-- | Group values in a list by a key, generated
-- by a given function. The resulting map contains
-- for each generated key the values (from the given list)
-- that yielded said key by applying the function on said value.
groupWith :: (Ord b) =>
(a -> b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> MultiMap b a -- ^ The resulting key --> value multimap
groupWith f xs = Map.fromListWith (++) [(f x, [x]) | x <- xs]
-- | Like 'groupWith', but the identifier-generating function
-- may generate multiple keys for each value (or none at all).
-- The corresponding value from the original list will be placed
-- in the identifier-corresponding map entry for each generated
-- identifier.
-- Note that values are added to the
groupWithMultiple :: (Ord b) =>
(a -> [b]) -- ^ The function used to map a list value to its keys
-> [a] -- ^ The list to be grouped
-> MultiMap b a -- ^ The resulting map
groupWithMultiple f xs =
let identifiers x = [(val, [x]) | val <- vals] where vals = f x
in Map.fromListWith (++) $ concat [identifiers x | x <- xs]
-- | Like groupWith, but uses a custom combinator function
groupWithUsing :: (Ord b) =>
(a -> c) -- ^ Transformer function used to map a value to the resulting type
-> (c -> c -> c) -- ^ The combinator used to combine an existing value
-- for a given key with a new value
-> (a -> b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> Map b c -- ^ The resulting key --> transformed value map
groupWithUsing t c f xs = Map.fromListWith c $ map (\v -> (f v, t v)) xs
-- | Fuse the functor from a tuple
fuseT2 :: Applicative f => (f a, f b) -> f (a,b)
fuseT2 = uncurry $ liftA2 (,)
-- | Like 'fuseT2', but only requires the first element to be boxed in the functor
fuseFirst :: Applicative f => (f a, b) -> f (a,b)
fuseFirst = fuseT2 . second pure
-- | Move the applicative functor to the outmost level by first mapping
-- fuseT2First and then applying 'Data.Traversable.sequenceA' to move
-- the functor outside the list
fuseFirstList :: Applicative f => [(f a, b)] -> f [(a,b)]
fuseFirstList = sequenceA . map fuseFirst
-- | Group values in a list by a key, generated by a given applicative function.
-- Applicative version of 'groupWith'. See 'groupWith' for documentation.
groupWithA :: (Ord b, Applicative f) =>
(a -> f b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> f (MultiMap b a) -- ^ The resulting key --> value multimap
groupWithA f xs =
Map.fromListWith (++) <$> fuseFirstList [(f x, [x]) | x <- xs]
-- | Alias for 'groupWithA', with additional monad constraint
groupWithM :: (Ord b, Monad m, Applicative m) =>
(a -> m b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> m (MultiMap b a) -- ^ The resulting key --> value multimap
groupWithM = groupWithA
-- | Like 'groupWithM', but the identifier-generating function
-- may generate multiple keys for each value (or none at all).
-- See 'groupWithMultiple' for further behavioural details.
--
-- Note that it's impossible to define this for applicatives:
-- See http://stackoverflow.com/a/6032260/2597135
groupWithMultipleM :: (Ord b, Monad m, Applicative m) =>
(a -> m [b]) -- ^ The function used to map a list value to its keys
-> [a] -- ^ The list to be grouped
-> m (MultiMap b a) -- ^ The resulting map
groupWithMultipleM f xs =
let identifiers x = (\vals -> [(val, [x]) | val <- vals]) <$> f x
idMap = concat <$> (mapM identifiers xs)
in Map.fromListWith (++) <$> idMap
-- | Like 'groupWithM', but uses a custom combinator function
groupWithUsingM :: (Ord b, Monad m, Applicative m) =>
(a -> m c) -- ^ Transformer function used to map a value to the resulting type
-> (c -> c -> c) -- ^ The combinator used to combine an existing value
-- for a given key with a new value
-> (a -> m b) -- ^ The function used to map a list value to its key
-> [a] -- ^ The list to be grouped
-> m (Map b c) -- ^ The resulting key --> transformed value map
groupWithUsingM t c f xs =
Map.fromListWith c <$> mapM (\v -> fuseT2 (f v, t v)) xs
|
kyclark/group-with
|
Control/GroupWith.hs
|
Haskell
|
apache-2.0
| 5,997
|
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Startups.GameTypes where
import Startups.Base
import Startups.Cards
import Startups.PrettyPrint
import Control.Lens
import qualified Data.Text as T
import qualified Data.Map.Strict as M
import Control.Monad.Operational
import Control.Monad.State.Strict
import Control.Monad.Except
import Data.List.NonEmpty
import Control.Applicative
import System.Random
type PlayerId = T.Text
showPlayerId :: PlayerId -> PrettyDoc
showPlayerId = emph . pe
data GameState = GameState { _playermap :: M.Map PlayerId PlayerState
, _discardpile :: [Card]
, _rnd :: StdGen
}
type Neighborhood = (PlayerId, PlayerId)
data PlayerState = PlayerState { _pCompany :: CompanyProfile
, _pCompanyStage :: CompanyStage
, _pCards :: [Card]
, _pFunds :: Funding
, _pNeighborhood :: Neighborhood
, _pPoachingResults :: [PoachingOutcome]
}
makeLenses ''GameState
makeLenses ''PlayerState
cardEffects :: Traversal' PlayerState Effect
cardEffects = pCards . traverse . cEffect . traverse
playerEffects :: PlayerId -> Traversal' GameState Effect
playerEffects pid = playermap . ix pid . cardEffects
neighbor :: Neighbor -> Lens' PlayerState PlayerId
neighbor NLeft = pNeighborhood . _1
neighbor NRight = pNeighborhood . _2
type Message = PrettyDoc
data PlayerAction = PlayerAction ActionType Card
deriving Eq
data ActionType = Play | Drop | BuildCompany
deriving Eq
_NonEmpty :: Prism' [a] (NonEmpty a)
_NonEmpty = prism' toList nonEmpty
-- | This describe the capabilities needed to write the rules, when no
-- interaction with the player is required.
type NonInteractive m = (MonadState GameState m, MonadError Message m, Functor m, Applicative m)
type GameStateOnly m = (MonadState GameState m, Functor m, Applicative m)
data CommunicationType = PlayerCom PlayerId Communication
| BroadcastCom Communication
data Communication = RawMessage PrettyDoc
| ActionRecapMsg Age Turn GameState (M.Map PlayerId (PlayerAction, Exchange))
data GameInstr p a where
PlayerDecision :: Age -> Turn -> PlayerId -> NonEmpty Card -> GameInstr p (p (PlayerAction, Exchange))
AskCard :: Age -> PlayerId -> NonEmpty Card -> Message -> GameInstr p (p Card)
GetPromise :: p a -> GameInstr p a
Message :: CommunicationType -> GameInstr p ()
ThrowError :: Message -> GameInstr p a -- ^ Used for the error instance
CatchError :: GameMonad p a -> (Message -> GameMonad p a) -> GameInstr p a
type GameMonad p = ProgramT (GameInstr p) (State GameState)
-- | Ask the player which card he would like to play.
playerDecision :: Age -> Turn -> PlayerId -> NonEmpty Card -> GameMonad p (p (PlayerAction, Exchange))
playerDecision a t p c = singleton (PlayerDecision a t p c)
-- | Tell some information to a specific player
tellPlayer :: PlayerId -> Message -> GameMonad p ()
tellPlayer p = singleton . Message . PlayerCom p . RawMessage
-- | Broadcast some information
generalMessage :: Message -> GameMonad p ()
generalMessage = singleton . Message . BroadcastCom . RawMessage
-- | Awaits a promise
getPromise :: p a -> GameMonad p a
getPromise = singleton . GetPromise
-- | Gives a quick rundown of all actions
actionRecap :: Age -> Turn -> M.Map PlayerId (PlayerAction, Exchange) -> GameMonad p ()
actionRecap age turn mm = get >>= \s -> singleton . Message . BroadcastCom $ ActionRecapMsg age turn s mm
instance MonadError PrettyDoc (ProgramT (GameInstr p) (State GameState)) where
throwError = singleton . ThrowError
catchError a handler = singleton (CatchError a handler)
-- | Ask the player to chose a card, along with a descriptive message.
-- This is used for the Recycling and CopyCommunity effects.
-- We define a "safe" version of the `askCard` function, that makes sure the
-- player doesn't introduce a new card in the game.
askCardSafe :: Age -> PlayerId -> NonEmpty Card -> Message -> GameMonad p Card
askCardSafe a p cl m = do
card <- singleton (AskCard a p cl m) >>= getPromise
when (card `notElem` (cl ^. re _NonEmpty)) (throwError (showPlayerId p <+> "tried to play a non proposed card"))
return card
instance PrettyE PlayerAction where
pe (PlayerAction a c) = a' <+> cardName c
where
a' = case a of
Play -> "played"
Drop -> "dropped"
BuildCompany -> "increase the company stage"
|
bitemyapp/7startups
|
Startups/GameTypes.hs
|
Haskell
|
bsd-3-clause
| 4,991
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
-----------------------------------------------------------------
-- Auto-generated by regenClassifiers
--
-- DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-- @generated
-----------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Ranking.Classifiers.EN_TT (classifiers) where
import Data.String
import Prelude
import qualified Data.HashMap.Strict as HashMap
import Duckling.Ranking.Types
classifiers :: Classifiers
classifiers
= HashMap.fromList
[("Bhai Dooj",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> to|till|before <hour-of-day>",
Classifier{okData =
ClassData{prior = -2.5649493574615367, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("integer (numeric)noon|midnight|EOD|end of day",
-0.916290731874155),
("hour", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -8.004270767353637e-2,
unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("hour", -0.7308875085427924),
("integer (numeric)time-of-day (latent)", -0.7308875085427924)],
n = 12}}),
("week",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time> timezone",
Classifier{okData =
ClassData{prior = -9.237332013101517e-2,
unseen = -4.23410650459726,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.4277482359480516),
("hhhmm", -1.65455834771457),
("<time-of-day> am|pm", -2.0222831278398874),
("hh:mm", -2.2735975561207935), ("hour", -1.821612432377736),
("minute", -1.128465251817791)],
n = 31},
koData =
ClassData{prior = -2.4277482359480516,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("hhhmm", -1.3862943611198906), ("hh:mm", -1.791759469228055),
("minute", -1.0986122886681098)],
n = 3}}),
("Thursday",
Classifier{okData =
ClassData{prior = -7.79615414697118e-2,
unseen = -3.6635616461296463,
likelihoods = HashMap.fromList [("", 0.0)], n = 37},
koData =
ClassData{prior = -2.5902671654458267,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("integer (numeric)",
Classifier{okData =
ClassData{prior = -0.53208562319284, unseen = -6.186208623900494,
likelihoods = HashMap.fromList [("", 0.0)], n = 484},
koData =
ClassData{prior = -0.8852249122992647, unseen = -5.834810737062605,
likelihoods = HashMap.fromList [("", 0.0)], n = 340}}),
("<year> (bc|ad)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the nth <day-of-week> of <month-or-greater>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("daymonth", -0.6931471805599453),
("ordinals (first..twentieth,thirtieth,...)Mondaythis|last|next <cycle>",
-0.6931471805599453)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> hence|ago",
Classifier{okData =
ClassData{prior = -5.406722127027582e-2,
unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("week", -1.563975538357343), ("day", -1.8152899666382492),
("year", -2.662587827025453),
("<integer> <unit-of-duration>", -1.0531499145913523),
("a <unit-of-duration>", -2.662587827025453),
("month", -2.662587827025453),
("fortnight", -2.662587827025453)],
n = 18},
koData =
ClassData{prior = -2.9444389791664407,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("year", -1.5040773967762742),
("<integer> <unit-of-duration>", -1.5040773967762742)],
n = 1}}),
("noon|midnight|EOD|end of day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter to|till|before <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("noon|midnight|EOD|end of day", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Karva Chauth",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<cycle> after|before <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("day (grain)tomorrow", -1.6739764335716716),
("dayday", -1.1631508098056809),
("day (grain)yesterday", -1.6739764335716716)],
n = 4},
koData =
ClassData{prior = -0.6931471805599453, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("dayhour", -1.6739764335716716),
("year (grain)Christmas", -2.0794415416798357),
("dayday", -2.0794415416798357),
("day (grain)intersect", -1.6739764335716716),
("day (grain)Easter Sunday", -2.0794415416798357),
("yearday", -2.0794415416798357)],
n = 4}}),
("Easter Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Navaratri",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Martin Luther King's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("integer (20..90)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Shemini Atzeret",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in <duration> at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("dayhour", -1.3862943611198906),
("yearhour", -1.3862943611198906),
("<integer> <unit-of-duration><time-of-day> am|pm",
-0.9808292530117262)],
n = 2},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("dayhour", -1.3862943611198906),
("yearhour", -1.3862943611198906),
("<integer> <unit-of-duration>time-of-day (latent)",
-0.9808292530117262)],
n = 2}}),
("Maha Shivaratri",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ramadan",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Lazarus Saturday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect 2 numbers",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("compose by multiplicationinteger (0..19)",
-0.2231435513142097)],
n = 3},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("powers of tensinteger (0..19)", -0.2231435513142097)],
n = 3}}),
("mm/yyyy",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("from|since|after <time>",
Classifier{okData =
ClassData{prior = -1.701375407759786, unseen = -4.51085950651685,
likelihoods =
HashMap.fromList
[("<day-of-month> (ordinal or number) <named-month>",
-3.8066624897703196),
("July", -3.8066624897703196),
("intersect", -3.8066624897703196),
("year (latent)", -3.4011973816621555),
("day", -2.4203681286504293),
("the <day-of-month> (ordinal)", -3.4011973816621555),
("the <day-of-month> (number)", -3.8066624897703196),
("time-of-day (latent)", -2.5538995212749516),
("year", -3.4011973816621555),
("<time-of-day> am|pm", -2.5538995212749516),
("hh:mm", -2.890371757896165),
("<day-of-month> (ordinal)", -3.4011973816621555),
("hour", -1.9348603128687285), ("month", -3.4011973816621555),
("minute", -2.890371757896165),
("August", -3.8066624897703196)],
n = 27},
koData =
ClassData{prior = -0.201421728167374, unseen = -5.631211781821365,
likelihoods =
HashMap.fromList
[("<integer> to|till|before <hour-of-day>", -4.018183201256536),
("week", -4.9344739331306915),
("<day-of-month> (ordinal or number) <named-month>",
-4.9344739331306915),
("today", -4.9344739331306915),
("intersect", -2.9885637840753785),
("<time> for <duration>", -4.241326752570746),
("second", -4.9344739331306915), ("now", -3.3250360206965914),
("tomorrow", -4.529008825022527),
("this|last|next <cycle>", -4.9344739331306915),
("day", -1.7774735119805785),
("the <day-of-month> (ordinal)", -4.529008825022527),
("the <day-of-month> (number)", -3.548179572010801),
("time-of-day (latent)", -2.492126897761487),
("<time-of-day> am|pm", -4.241326752570746),
("hh:mm", -4.529008825022527), ("nograin", -3.3250360206965914),
("intersect by \",\", \"of\", \"from\", \"'s\"",
-4.529008825022527),
("<named-month>|<named-day> <day-of-month> (ordinal)",
-4.9344739331306915),
("<day-of-month> (ordinal)", -3.835861644462582),
("Easter Sunday", -4.9344739331306915),
("Christmas", -4.241326752570746),
("hour", -2.3317842476863078), ("month", -4.9344739331306915),
("<datetime> - <datetime> (interval)", -2.7372493557944724),
("<time-of-day> - <time-of-day> (interval)",
-2.9885637840753785),
("<named-month> <day-of-month> (non ordinal)",
-4.529008825022527),
("minute", -1.9900349539642512),
("right now", -4.9344739331306915),
("<month> dd-dd (interval)", -4.241326752570746),
("part of days", -4.9344739331306915),
("dd-dd <month> (interval)", -4.529008825022527)],
n = 121}}),
("integer after|past <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("integer (numeric)noon|midnight|EOD|end of day",
-1.791759469228055),
("hour", -0.8754687373538999),
("integer (numeric)time-of-day (latent)", -1.3862943611198906),
("integer (20..90)time-of-day (latent)", -1.791759469228055)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> last <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)week (grain)year (latent)",
-2.268683541318364),
("daymonth", -2.268683541318364),
("ordinal (digits)day (grain)May", -2.6741486494265287),
("ordinals (first..twentieth,thirtieth,...)week (grain)intersect",
-2.6741486494265287),
("weekmonth", -1.7578579175523736),
("ordinal (digits)week (grain)October", -2.6741486494265287),
("ordinal (digits)week (grain)intersect", -2.6741486494265287),
("ordinal (digits)week (grain)year (latent)",
-2.6741486494265287),
("weekyear", -1.9810014688665833),
("ordinals (first..twentieth,thirtieth,...)week (grain)October",
-2.6741486494265287),
("ordinals (first..twentieth,thirtieth,...)day (grain)May",
-2.6741486494265287)],
n = 9},
koData =
ClassData{prior = -infinity, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [], n = 0}}),
("Yom HaShoah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal or number) <named-month>",
Classifier{okData =
ClassData{prior = -1.1631508098056809, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("ordinal (digits)December", -1.6094379124341003),
("ordinal (digits)February", -2.3025850929940455),
("integer (numeric)April", -2.3025850929940455),
("month", -1.2039728043259361)],
n = 5},
koData =
ClassData{prior = -0.3746934494414107,
unseen = -3.4965075614664802,
likelihoods =
HashMap.fromList
[("ordinal (digits)October", -2.772588722239781),
("ordinal (digits)July", -2.0794415416798357),
("integer (numeric)September", -2.367123614131617),
("ordinal (digits)August", -2.772588722239781),
("ordinal (digits)April", -2.772588722239781),
("month", -0.9808292530117262),
("integer (numeric)July", -2.0794415416798357)],
n = 11}}),
("<time> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.19105523676270922,
unseen = -4.6443908991413725,
likelihoods =
HashMap.fromList
[("<day-of-month> (ordinal)in|during the <part-of-day>",
-3.9415818076696905),
("dayhour", -1.499234772300486),
("Mondayearly morning", -3.536116699561526),
("time-of-day (latent)tonight", -3.536116699561526),
("hourhour", -2.236833715431265),
("<time-of-day> o'clockin|during the <part-of-day>",
-3.9415818076696905),
("todaypart of days", -3.9415818076696905),
("minutehour", -2.6888188391743224),
("at <time-of-day>in|during the <part-of-day>",
-3.536116699561526),
("time-of-day (latent)this <part-of-day>", -3.9415818076696905),
("Mondayin|during the <part-of-day>", -3.9415818076696905),
("intersectpart of days", -3.0252910757955354),
("Saturdaypart of days", -3.9415818076696905),
("intersectin|during the <part-of-day>", -3.9415818076696905),
("<day-of-month> (ordinal or number) of <named-month>in|during the <part-of-day>",
-3.9415818076696905),
("the <day-of-month> (ordinal)in|during the <part-of-day>",
-3.9415818076696905),
("tomorrowpart of days", -2.33214389523559),
("hh:mmin|during the <part-of-day>", -3.0252910757955354),
("time-of-day (latent)in|during the <part-of-day>",
-3.9415818076696905),
("hhmm (latent)in|during the <part-of-day>",
-3.9415818076696905),
("yesterdaypart of days", -3.536116699561526),
("<day-of-month> (ordinal or number) of <month>in|during the <part-of-day>",
-3.9415818076696905),
("Mondaypart of days", -3.9415818076696905)],
n = 38},
koData =
ClassData{prior = -1.749199854809259, unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("dayhour", -3.068052935133617),
("yearhour", -3.068052935133617),
("monthhour", -3.068052935133617),
("hourhour", -1.9694406464655074),
("at <time-of-day>in|during the <part-of-day>",
-3.068052935133617),
("year (latent)in|during the <part-of-day>",
-3.068052935133617),
("Februaryin|during the <part-of-day>", -3.068052935133617),
("tomorrowpart of days", -3.068052935133617),
("time-of-day (latent)in|during the <part-of-day>",
-2.151762203259462)],
n = 8}}),
("dd/mm",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods = HashMap.fromList [("", 0.0)], n = 28}}),
("today",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("at <time-of-day>",
Classifier{okData =
ClassData{prior = -0.22957444164450025,
unseen = -5.308267697401205,
likelihoods =
HashMap.fromList
[("<time> timezone", -3.917010546939185),
("noon|midnight|EOD|end of day", -4.204692619390966),
("integer after|past <hour-of-day>", -3.917010546939185),
("<time-of-day> o'clock", -4.61015772749913),
("half after|past <hour-of-day>", -4.61015772749913),
("hhhmm", -3.6938669956249752),
("<hour-of-day> <integer>", -3.917010546939185),
("time-of-day (latent)", -1.6397432619294292),
("hhmm (latent)", -3.917010546939185),
("<time-of-day> am|pm", -2.0074680420547466),
("hh:mm", -3.3573947590037623),
("about|exactly <time-of-day>", -4.204692619390966),
("hour", -1.11365016603265),
("<time-of-day> sharp|exactly", -4.61015772749913),
("minute", -1.9360090780726016)],
n = 93},
koData =
ClassData{prior = -1.5841201044498106,
unseen = -4.1588830833596715,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0076405104623831),
("<time-of-day> am|pm", -3.044522437723423),
("hour", -0.924258901523332)],
n = 24}}),
("December",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("absorption of , after named day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("Wednesday", -2.8622008809294686),
("Saturday", -2.8622008809294686),
("Monday", -2.456735772821304), ("Friday", -1.6094379124341003),
("day", -0.8472978603872037), ("Sunday", -2.8622008809294686),
("on <day>", -2.169053700369523)],
n = 14},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("September",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("tonight",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last|past|next <duration>",
Classifier{okData =
ClassData{prior = -3.17486983145803e-2,
unseen = -4.2626798770413155,
likelihoods =
HashMap.fromList
[("week", -2.456735772821304), ("second", -2.639057329615259),
("day", -2.3025850929940455), ("year", -2.639057329615259),
("<integer> <unit-of-duration>", -0.7827593392496325),
("hour", -2.639057329615259), ("month", -2.639057329615259),
("minute", -2.639057329615259)],
n = 31},
koData =
ClassData{prior = -3.4657359027997265,
unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("<integer> <unit-of-duration>", -1.6094379124341003),
("hour", -1.6094379124341003)],
n = 1}}),
("the ides of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("March", -0.6931471805599453), ("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (ordinal or number) of <named-month>",
Classifier{okData =
ClassData{prior = -0.7621400520468967, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)March",
-1.6094379124341003),
("ordinal (digits)February", -1.8971199848858813),
("month", -0.916290731874155),
("ordinal (digits)March", -1.8971199848858813)],
n = 7},
koData =
ClassData{prior = -0.6286086594223742,
unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -1.4816045409242156),
("month", -0.8938178760220964),
("integer (numeric)July", -1.4816045409242156)],
n = 8}}),
("integer (0..19)",
Classifier{okData =
ClassData{prior = -1.227009259181436e-2,
unseen = -4.418840607796598,
likelihoods = HashMap.fromList [("", 0.0)], n = 81},
koData =
ClassData{prior = -4.406719247264253, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("in|during <named-month>|year",
Classifier{okData =
ClassData{prior = -0.2744368457017603,
unseen = -3.8066624897703196,
likelihoods =
HashMap.fromList
[("<year> (bc|ad)", -1.9924301646902063),
("October", -3.0910424533583156),
("year (latent)", -1.2992829841302609),
("year", -0.9509762898620451), ("March", -2.6855773452501515),
("month", -2.3978952727983707)],
n = 19},
koData =
ClassData{prior = -1.4271163556401458,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("October", -1.5040773967762742),
("year (latent)", -1.5040773967762742),
("year", -1.5040773967762742), ("month", -1.5040773967762742)],
n = 6}}),
("<part-of-day> at <time-of-day>",
Classifier{okData =
ClassData{prior = -8.223809823697212e-2,
unseen = -4.406719247264253,
likelihoods =
HashMap.fromList
[("this <part-of-day>hh:mm", -3.7013019741124937),
("tonighthh:mm", -3.7013019741124937),
("hourhour", -0.8979415932059586),
("hourminute", -3.0081547935525483),
("in|during the <part-of-day>time-of-day (latent)",
-3.7013019741124937),
("this <part-of-day>time-of-day (latent)", -1.9965538818740682),
("early morningtime-of-day (latent)", -3.7013019741124937),
("tonight<time-of-day> o'clock", -3.7013019741124937),
("tonighttime-of-day (latent)", -2.7850112422383386),
("part of dayshh:mm", -3.7013019741124937),
("part of daystime-of-day (latent)", -1.6218604324326575)],
n = 35},
koData =
ClassData{prior = -2.538973871058276, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("hourhour", -1.4469189829363254),
("this <part-of-day>time-of-day (latent)", -2.1400661634962708),
("tonighttime-of-day (latent)", -2.1400661634962708),
("part of daystime-of-day (latent)", -2.1400661634962708)],
n = 3}}),
("between <time-of-day> and <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("minuteminute", -1.6739764335716716),
("hh:mmhh:mm", -2.0794415416798357),
("<time-of-day> am|pmtime-of-day (latent)",
-2.0794415416798357),
("hhhmmhhhmm", -2.0794415416798357),
("minutehour", -1.6739764335716716),
("<time-of-day> am|pm<time-of-day> am|pm",
-2.0794415416798357)],
n = 4},
koData =
ClassData{prior = -1.0986122886681098,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -1.791759469228055),
("minutehour", -1.3862943611198906),
("hhhmmtime-of-day (latent)", -1.791759469228055)],
n = 2}}),
("Halloween",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Passover",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from <month> dd-dd (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3978952727983707,
likelihoods =
HashMap.fromList
[("Julyinteger (numeric)integer (numeric)", -1.6094379124341003),
("Augustordinal (digits)integer (numeric)",
-1.6094379124341003),
("month", -0.916290731874155),
("Augustordinal (digits)ordinal (digits)",
-1.6094379124341003)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("Good Friday",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("October",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods = HashMap.fromList [("", 0.0)], n = 22},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("month (grain)",
Classifier{okData =
ClassData{prior = -0.12136085700426748,
unseen = -3.4965075614664802,
likelihoods = HashMap.fromList [("", 0.0)], n = 31},
koData =
ClassData{prior = -2.169053700369523, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("<integer> more <unit-of-duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (numeric)minute (grain)", -0.6931471805599453),
("minute", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> o'clock",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.0794415416798357),
("<part-of-day> at <time-of-day>", -2.0794415416798357),
("time-of-day (latent)", -1.1631508098056809),
("hour", -0.8266785731844679)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [], n = 0}}),
("Vesak",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Earth Hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in|within|after <duration>",
Classifier{okData =
ClassData{prior = -0.12260232209233239,
unseen = -4.709530201312334,
likelihoods =
HashMap.fromList
[("week", -3.3141860046725258),
("<integer> more <unit-of-duration>", -4.007333185232471),
("three-quarters of an hour", -3.0910424533583156),
("<integer> + '\"", -3.3141860046725258),
("number.number hours", -4.007333185232471),
("second", -3.6018680771243066),
("half a <time-grain>", -3.0910424533583156),
("day", -3.3141860046725258), ("year", -4.007333185232471),
("<integer> <unit-of-duration>", -1.6094379124341003),
("a <unit-of-duration>", -3.0910424533583156),
("quarter of an hour", -3.0910424533583156),
("hour", -2.503255788456197),
("about|exactly <duration>", -4.007333185232471),
("half an hour (abbrev).", -3.6018680771243066),
("<integer> and an half hour", -4.007333185232471),
("minute", -1.2992829841302609)],
n = 46},
koData =
ClassData{prior = -2.159484249353372, unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("day", -2.70805020110221), ("quarter", -2.70805020110221),
("year", -1.791759469228055),
("<integer> <unit-of-duration>", -1.791759469228055),
("a <unit-of-duration>", -2.3025850929940455)],
n = 6}}),
("the closest <day> to <time>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("dayday", -1.0986122886681098),
("Christmastoday", -1.791759469228055),
("Monday<named-month>|<named-day> <day-of-month> (ordinal)",
-1.791759469228055),
("Monday<named-month> <day-of-month> (non ordinal)",
-1.791759469228055)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("daymonth", -1.3862943611198906),
("MondayOctober", -1.3862943611198906)],
n = 1}}),
("January",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("three-quarters of an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Mattu Pongal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Wednesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half after|past <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> + '\"",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("half <integer> (UK style hour-of-day)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Ganesh Chaturthi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("July",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.5553480614894135,
likelihoods = HashMap.fromList [("", 0.0)], n = 33},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> <part-of-day>",
Classifier{okData =
ClassData{prior = -0.12516314295400605,
unseen = -3.6888794541139363,
likelihoods =
HashMap.fromList
[("time-of-day (latent)tonight", -2.5649493574615367),
("hourhour", -1.3609765531356008),
("<time-of-day> o'clockin|during the <part-of-day>",
-2.9704144655697013),
("minutehour", -1.717651497074333),
("at <time-of-day>in|during the <part-of-day>",
-2.277267285009756),
("time-of-day (latent)this <part-of-day>", -2.9704144655697013),
("hh:mmin|during the <part-of-day>", -2.0541237336955462),
("time-of-day (latent)in|during the <part-of-day>",
-2.277267285009756),
("hhmm (latent)in|during the <part-of-day>",
-2.9704144655697013)],
n = 15},
koData =
ClassData{prior = -2.1400661634962708, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("hourhour", -1.466337068793427),
("time-of-day (latent)in|during the <part-of-day>",
-1.466337068793427)],
n = 2}}),
("hour (grain)",
Classifier{okData =
ClassData{prior = -1.3723081191451507, unseen = -2.995732273553991,
likelihoods = HashMap.fromList [("", 0.0)], n = 18},
koData =
ClassData{prior = -0.2923879634891936, unseen = -4.007333185232471,
likelihoods = HashMap.fromList [("", 0.0)], n = 53}}),
("Parsi New Year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Shavuot",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day> <duration> hence|ago",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("dayyear", -0.6931471805599453),
("Diwali<integer> <unit-of-duration>", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> quarter",
Classifier{okData =
ClassData{prior = -0.4700036292457356, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -1.1786549963416462),
("ordinals (first..twentieth,thirtieth,...)quarter (grain)",
-1.466337068793427),
("quarter", -0.7731898882334817)],
n = 5},
koData =
ClassData{prior = -0.9808292530117262,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.8109302162163288),
("quarter", -0.8109302162163288)],
n = 3}}),
("Boss's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Orthodox Easter Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("one twenty two",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("May",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect",
Classifier{okData =
ClassData{prior = -0.4907042205665696, unseen = -7.174724309836376,
likelihoods =
HashMap.fromList
[("Navaratriyear (latent)", -6.480811139196849),
("Karva Chauthyear (latent)", -6.480811139196849),
("<day-of-month> (ordinal)in|during the <part-of-day>",
-6.480811139196849),
("Maha Shivaratriyear (latent)", -6.480811139196849),
("Ramadanyear (latent)", -5.228048170701481),
("<datetime> - <datetime> (interval)on <day>",
-5.094516778076958),
("Bhai Doojyear (latent)", -6.480811139196849),
("hourday", -4.465908118654584),
("dayhour", -3.4362887014734254),
("<time-of-day> - <time-of-day> (interval)on <day>",
-5.094516778076958),
("Martin Luther King's Dayyear (latent)", -6.075346031088684),
("Shemini Atzeretyear (latent)", -6.480811139196849),
("daymonth", -4.465908118654584),
("monthday", -6.075346031088684),
("monthyear", -4.689051669968793),
("Yom Ha'atzmautyear (latent)", -6.480811139196849),
("Orthodox Good Fridayyear (latent)", -6.075346031088684),
("Vijayadashamiyear (latent)", -6.480811139196849),
("Thai Pongalyear (latent)", -5.787663958636903),
("Thiru Onamyear (latent)", -5.787663958636903),
("hhhmmabsorption of , after named day", -6.480811139196849),
("Tuesdaythe <day-of-month> (ordinal)", -6.480811139196849),
("from <datetime> - <datetime> (interval)July",
-5.564520407322694),
("<day-of-month> (ordinal)Wednesday", -6.480811139196849),
("Krishna Janmashtamiyear (latent)", -6.075346031088684),
("Guru Gobind Singh Jayantiyear (latent)", -6.480811139196849),
("houryear", -5.564520407322694),
("this|next <day-of-week>hh(:mm) - <time-of-day> am|pm",
-6.480811139196849),
("Christmas<time-of-day> am|pm", -6.480811139196849),
("last <day-of-week> of <time>year (latent)",
-6.480811139196849),
("<time-of-day> am|pmintersect by \",\", \"of\", \"from\", \"'s\"",
-5.787663958636903),
("intersectin|during <named-month>|year", -6.480811139196849),
("<time-of-day> am|pmintersect", -5.228048170701481),
("Earth Houryear (latent)", -6.480811139196849),
("Ganesh Chaturthiyear (latent)", -6.480811139196849),
("Octoberyear (latent)", -4.976733742420574),
("intersect<time-of-day> am|pm", -6.480811139196849),
("Mattu Pongalyear (latent)", -6.480811139196849),
("Saturday<part-of-day> at <time-of-day>", -6.480811139196849),
("Shavuotyear (latent)", -6.480811139196849),
("Parsi New Yearyear (latent)", -5.564520407322694),
("at <time-of-day>in|within|after <duration>",
-6.480811139196849),
("Thursdayhh(:mm) - <time-of-day> am|pm", -6.480811139196849),
("todayin|within|after <duration>", -6.480811139196849),
("<named-month>|<named-day> <day-of-month> (ordinal)year (latent)",
-6.075346031088684),
("Marchyear (latent)", -6.480811139196849),
("intersect by \",\", \"of\", \"from\", \"'s\"hhhmm",
-6.075346031088684),
("Sukkotyear (latent)", -6.075346031088684),
("hhhmmintersect", -6.075346031088684),
("intersect by \",\", \"of\", \"from\", \"'s\"year (latent)",
-6.075346031088684),
("Clean Mondayyear (latent)", -6.075346031088684),
("monthhour", -6.075346031088684),
("<day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-5.564520407322694),
("todayat <time-of-day>", -6.480811139196849),
("Thursday<time> timezone", -4.082915866398478),
("tonight<time-of-day> am|pm", -6.480811139196849),
("time-of-day (latent)tonight", -6.075346031088684),
("from|since|after <time>December", -6.480811139196849),
("<time-of-day> am|pmon <day>", -4.465908118654584),
("this <time>hh(:mm) - <time-of-day> am|pm",
-6.480811139196849),
("yyyy-mm-ddhh:mm:ss", -6.075346031088684),
("dayday", -3.7082224169570672),
("<time> <part-of-day>at <time-of-day>", -6.075346031088684),
("tonightat <time-of-day>", -5.382198850528739),
("<time-of-day> am|pmabsorption of , after named day",
-5.787663958636903),
("Dayananda Saraswati Jayantiyear (latent)",
-6.480811139196849),
("today<time-of-day> am|pm", -6.480811139196849),
("Februarythe <day-of-month> (ordinal)", -6.075346031088684),
("at <time-of-day><time> <part-of-day>", -6.480811139196849),
("<day-of-month> (ordinal)intersect", -6.075346031088684),
("hourhour", -3.5103966736271475),
("Mahavir Jayantiyear (latent)", -6.075346031088684),
("Navaratriin|during <named-month>|year", -6.480811139196849),
("Wednesdaythis|last|next <cycle>", -6.480811139196849),
("Lentyear (latent)", -6.480811139196849),
("intersect<named-month> <day-of-month> (non ordinal)",
-4.609008962295257),
("Boghiyear (latent)", -6.480811139196849),
("dayyear", -1.9216848917101639),
("Karva Chauthin|during <named-month>|year",
-6.480811139196849),
("Thursdayfrom|since|after <time>", -6.075346031088684),
("<time-of-day> o'clockin|during the <part-of-day>",
-6.480811139196849),
("Thursdayat <time-of-day>", -5.787663958636903),
("Islamic New Yearyear (latent)", -6.075346031088684),
("Laylat al-Qadryear (latent)", -5.564520407322694),
("part of days<time-of-day> am|pm", -6.480811139196849),
("Shrove Tuesdayyear (latent)", -6.480811139196849),
("intersect by \",\", \"of\", \"from\" for year<time-of-day> am|pm",
-5.787663958636903),
("hourminute", -6.075346031088684),
("<time-of-day> am|pmtomorrow", -5.564520407322694),
("Yom Kippuryear (latent)", -6.480811139196849),
("<day-of-month> (ordinal)Tuesday", -5.787663958636903),
("<part-of-day> of <time>year (latent)", -6.480811139196849),
("minutehour", -5.228048170701481),
("Kaanum Pongalyear (latent)", -6.075346031088684),
("Maha Saptamiyear (latent)", -6.480811139196849),
("at <time-of-day>in|during the <part-of-day>",
-5.787663958636903),
("time-of-day (latent)tomorrow", -5.564520407322694),
("part of daysat <time-of-day>", -4.871373226762748),
("absorption of , after named day<named-month> <day-of-month> (non ordinal)",
-4.465908118654584),
("for <duration> from <time>December", -6.480811139196849),
("tomorrow<time-of-day> sharp|exactly", -6.480811139196849),
("Thursdayfrom <datetime> - <datetime> (interval)",
-5.228048170701481),
("intersect by \",\", \"of\", \"from\" for yearhhhmm",
-5.228048170701481),
("time-of-day (latent)this <part-of-day>", -6.480811139196849),
("Pentecostyear (latent)", -6.480811139196849),
("Thursdayfrom <time-of-day> - <time-of-day> (interval)",
-5.228048170701481),
("<day-of-month> (ordinal)February", -6.480811139196849),
("Eid al-Fitryear (latent)", -5.094516778076958),
("Vasant Panchamiin|during <named-month>|year",
-6.480811139196849),
("Mondayin|during the <part-of-day>", -6.480811139196849),
("Chhathin|during <named-month>|year", -6.480811139196849),
("Diwaliin|during <named-month>|year", -6.480811139196849),
("this <part-of-day><time-of-day> am|pm", -6.480811139196849),
("Vaisakhiin|during <named-month>|year", -6.480811139196849),
("Guru Ravidass Jayantiyear (latent)", -5.228048170701481),
("Raksha Bandhanyear (latent)", -6.480811139196849),
("daysecond", -6.075346031088684),
("tomorrowfrom <time-of-day> - <time-of-day> (interval)",
-6.075346031088684),
("Ratha-Yatrayear (latent)", -6.480811139196849),
("Ashurayear (latent)", -6.480811139196849),
("Tuesdayin|during <named-month>|year", -6.480811139196849),
("Chinese New Yearyear (latent)", -6.480811139196849),
("tomorrowintersect", -6.480811139196849),
("Lag BaOmeryear (latent)", -6.480811139196849),
("last weekend of <named-month>year (latent)",
-6.480811139196849),
("Eid al-Adhayear (latent)", -4.340744975700578),
("intersectin|during the <part-of-day>", -6.480811139196849),
("Palm Sundayyear (latent)", -6.480811139196849),
("Christmasat <time-of-day>", -6.480811139196849),
("Passoveryear (latent)", -6.480811139196849),
("Lazarus Saturdayyear (latent)", -6.480811139196849),
("<day-of-month> (ordinal or number) <named-month>year (latent)",
-6.480811139196849),
("hhhmmon <day>", -5.564520407322694),
("Yom HaShoahyear (latent)", -6.075346031088684),
("Thursday<datetime> - <datetime> (interval)",
-6.075346031088684),
("<day-of-month> (ordinal or number) of <named-month>in|during the <part-of-day>",
-6.480811139196849),
("Septemberyear (latent)", -6.075346031088684),
("Thursday<time-of-day> - <time-of-day> (interval)",
-4.976733742420574),
("Halloweenyear (latent)", -6.480811139196849),
("<ordinal> last <cycle> of <time>year (latent)",
-6.075346031088684),
("from <time-of-day> - <time-of-day> (interval)on <day>",
-5.787663958636903),
("intersect by \",\", \"of\", \"from\", \"'s\"<time-of-day> am|pm",
-6.480811139196849),
("at <time-of-day>intersect", -5.564520407322694),
("Rosh Hashanahyear (latent)", -5.787663958636903),
("Dhanterasyear (latent)", -6.480811139196849),
("Tu BiShvatyear (latent)", -6.480811139196849),
("<day-of-month> (ordinal)December", -5.787663958636903),
("Holiyear (latent)", -5.787663958636903),
("<time-of-day> - <time-of-day> (interval)tomorrow",
-6.480811139196849),
("Holika Dahanyear (latent)", -5.787663958636903),
("at <time-of-day>intersect by \",\", \"of\", \"from\", \"'s\"",
-6.075346031088684),
("dayminute", -3.166625134524323),
("Mawlidyear (latent)", -6.480811139196849),
("from <datetime> - <datetime> (interval)on <day>",
-6.075346031088684),
("<datetime> - <datetime> (interval)tomorrow",
-6.480811139196849),
("Jumu'atul-Widayear (latent)", -5.564520407322694),
("minuteday", -2.7313070632664775),
("absorption of , after named dayintersect",
-5.787663958636903),
("intersectyear (latent)", -6.480811139196849),
("Orthodox Easter Sundayyear (latent)", -6.480811139196849),
("time-of-day (latent)in|within|after <duration>",
-6.480811139196849),
("<ordinal> <cycle> of <time>year (latent)",
-6.480811139196849),
("intersecthhhmm", -6.075346031088684),
("the <day-of-month> (ordinal)in|during the <part-of-day>",
-6.480811139196849),
("Boss's Dayyear (latent)", -6.075346031088684),
("hhhmmintersect by \",\", \"of\", \"from\", \"'s\"",
-6.480811139196849),
("Global Youth Service Dayyear (latent)", -6.480811139196849),
("Dhanterasin|during <named-month>|year", -6.480811139196849),
("tonight<time-of-day> o'clock", -6.480811139196849),
("Tisha B'Avyear (latent)", -6.480811139196849),
("Isra and Mi'rajyear (latent)", -5.564520407322694),
("at <time-of-day>on <day>", -4.871373226762748),
("at <time-of-day>absorption of , after named day",
-6.075346031088684),
("time-of-day (latent)<time> <part-of-day>",
-5.787663958636903),
("Christmasyear (latent)", -6.075346031088684),
("Saturdayintersect", -6.480811139196849),
("Naraka Chaturdashiyear (latent)", -6.075346031088684),
("Thai Pongalin|during <named-month>|year", -6.480811139196849),
("dayweek", -6.480811139196849),
("Easter Sundayyear (latent)", -5.787663958636903),
("between <time-of-day> and <time-of-day> (interval)on <day>",
-6.075346031088684),
("weekyear", -5.382198850528739),
("King's Dayyear (latent)", -4.976733742420574),
("hh:mmin|during the <part-of-day>", -5.564520407322694),
("<cycle> after|before <time><time-of-day> am|pm",
-6.075346031088684),
("first|second|third|fourth|fifth <day-of-week> of <time>year (latent)",
-5.787663958636903),
("Hanukkahyear (latent)", -5.787663958636903),
("Rama Navamiyear (latent)", -6.480811139196849),
("February<time> <part-of-day>", -6.480811139196849),
("time-of-day (latent)in|during the <part-of-day>",
-5.787663958636903),
("Great Lentyear (latent)", -6.480811139196849),
("tomorrowat <time-of-day>", -5.787663958636903),
("hhmm (latent)in|during the <part-of-day>",
-6.480811139196849),
("tomorrow<part-of-day> at <time-of-day>", -6.075346031088684),
("Ugadiyear (latent)", -5.094516778076958),
("Vaisakhiyear (latent)", -5.787663958636903),
("absorption of , after named dayintersect by \",\", \"of\", \"from\" for year",
-5.787663958636903),
("last <cycle> of <time>year (latent)", -5.787663958636903),
("at <time-of-day>tomorrow", -6.075346031088684),
("tomorrow<time-of-day> am|pm", -6.480811139196849),
("<named-month> <day-of-month> (non ordinal)year (latent)",
-6.480811139196849),
("Diwaliyear (latent)", -6.480811139196849),
("between <time> and <time>on <day>", -6.075346031088684),
("Black Fridayyear (latent)", -6.075346031088684),
("the <ordinal> last <cycle> of <time>year (latent)",
-6.480811139196849),
("in|during the <part-of-day>at <time-of-day>",
-6.480811139196849),
("Chhathyear (latent)", -6.075346031088684),
("Vasant Panchamiyear (latent)", -6.480811139196849),
("Rabindra Jayantiyear (latent)", -5.228048170701481),
("this <part-of-day>at <time-of-day>", -5.228048170701481),
("St Patrick's Dayyear (latent)", -6.480811139196849),
("Thursday<time> (timezone)", -6.480811139196849),
("<day-of-month> (ordinal or number) of <month>in|during the <part-of-day>",
-6.480811139196849),
("Pargat Diwasyear (latent)", -5.228048170701481),
("<datetime> - <datetime> (interval)July", -6.075346031088684),
("on <day><named-month> <day-of-month> (non ordinal)",
-5.787663958636903),
("Februaryintersect", -6.480811139196849),
("Simchat Torahyear (latent)", -6.480811139196849),
("minuteyear", -6.480811139196849)],
n = 502},
koData =
ClassData{prior = -0.9472529574781219, unseen = -6.843749949006225,
likelihoods =
HashMap.fromList
[("Thursdayhhhmm", -4.645458704902203),
("hourday", -3.546846416234093),
("<hour-of-day> <integer><time-of-day> am|pm",
-6.149536101678477),
("<day-of-month> (ordinal)August", -6.149536101678477),
("dayhour", -3.5845867442169403),
("<time> timezoneyear (latent)", -5.456388921118531),
("<time-of-day> - <time-of-day> (interval)on <day>",
-5.2332453698043215),
("Tuesdayfrom|since|after <time>", -5.456388921118531),
("daymonth", -3.441485900576267),
("hourquarter", -5.2332453698043215),
("monthyear", -4.896773133183109),
("<time-of-day> am|pmyear (latent)", -5.456388921118531),
("Thai Pongalyear (latent)", -5.456388921118531),
("intersecthh:mm", -6.149536101678477),
("from <datetime> - <datetime> (interval)July",
-5.744070993570313),
("<day-of-month> (ordinal)Wednesday", -6.149536101678477),
("houryear", -4.357776632450422),
("from <time-of-day> - <time-of-day> (interval)July",
-6.149536101678477),
("<day-of-month> (ordinal)October", -6.149536101678477),
("<time-of-day> am|pmintersect by \",\", \"of\", \"from\", \"'s\"",
-5.456388921118531),
("hournograin", -4.896773133183109),
("<time-of-day> am|pmintersect", -4.896773133183109),
("Octoberyear (latent)", -6.149536101678477),
("Good Fridayyear (latent)", -5.744070993570313),
("time-of-day (latent)intersect by \",\", \"of\", \"from\", \"'s\"",
-5.744070993570313),
("early morningat <time-of-day>", -6.149536101678477),
("until <time>on <day>", -5.456388921118531),
("part of days<time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("time-of-day (latent)intersect", -5.456388921118531),
("todayin <number> (implicit minutes)", -6.149536101678477),
("<named-month>|<named-day> <day-of-month> (ordinal)year (latent)",
-5.456388921118531),
("this <part-of-day><time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("about|exactly <time-of-day>year (latent)",
-6.149536101678477),
("hh:mmon <day>", -4.896773133183109),
("hhhmmintersect", -5.744070993570313),
("absorption of , after named dayJuly", -5.2332453698043215),
("from|since|after <time>July", -5.744070993570313),
("intersect by \",\", \"of\", \"from\", \"'s\"year (latent)",
-5.2332453698043215),
("Clean Mondayyear (latent)", -6.149536101678477),
("monthhour", -6.149536101678477),
("<day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-5.744070993570313),
("hourmonth", -4.009469938182206),
("todayat <time-of-day>", -6.149536101678477),
("hhhmmyear (latent)", -4.896773133183109),
("from|since|after <time>December", -6.149536101678477),
("from|since|after <time><time-of-day> am|pm",
-6.149536101678477),
("<time-of-day> am|pmon <day>", -4.896773133183109),
("Mondayyear (latent)", -5.456388921118531),
("dayday", -4.009469938182206),
("on <day>September", -5.456388921118531),
("time-of-day (latent)September", -5.744070993570313),
("hourhour", -4.0700945599986404),
("time-of-day (latent)on <day>", -4.645458704902203),
("Thursdaydd/mm", -6.149536101678477),
("time-of-day (latent)<cycle> after|before <time>",
-5.744070993570313),
("dayyear", -3.1291112155341145),
("New Year's Dayyear (latent)", -5.2332453698043215),
("time-of-day (latent)Sunday", -5.744070993570313),
("Thursdayfrom|since|after <time>", -4.444788009440051),
("Thursdayat <time-of-day>", -4.540098189244376),
("<integer> to|till|before <hour-of-day>September",
-6.149536101678477),
("Aprilyear (latent)", -6.149536101678477),
("the <day-of-month> (ordinal)July", -6.149536101678477),
("the <day-of-month> (number)July", -6.149536101678477),
("monthminute", -6.149536101678477),
("<time-of-day> am|pmtomorrow", -5.744070993570313),
("Thursdayhh:mm", -5.2332453698043215),
("<day-of-month> (ordinal)Tuesday", -5.744070993570313),
("minutemonth", -4.009469938182206),
("time-of-day (latent)Friday", -5.744070993570313),
("minutehour", -6.149536101678477),
("part of daysat <time-of-day>", -5.0509238130103675),
("time-of-day (latent)this|last|next <cycle>",
-3.7981608445149995),
("Augustyear (latent)", -5.744070993570313),
("week-endin|during <named-month>|year", -6.149536101678477),
("time-of-day (latent)Tuesday", -5.744070993570313),
("tomorrowfrom <time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("tonight<time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("Sundayyear (latent)", -5.2332453698043215),
("hourweek", -5.2332453698043215),
("the <day-of-month> (ordinal)Monday", -5.456388921118531),
("Christmasat <time-of-day>", -6.149536101678477),
("from|since|after <time>year (latent)", -5.0509238130103675),
("hhhmmon <day>", -5.2332453698043215),
("yesterday<time-of-day> am|pm", -6.149536101678477),
("<day-of-month> (ordinal)July", -5.456388921118531),
("intersect by \",\", \"of\", \"from\", \"'s\"hh:mm",
-6.149536101678477),
("Thursday<time-of-day> - <time-of-day> (interval)",
-6.149536101678477),
("in|during <named-month>|yearyear (latent)",
-6.149536101678477),
("at <time-of-day>intersect", -5.2332453698043215),
("hh:mmyear (latent)", -4.763241740558586),
("Holiyear (latent)", -6.149536101678477),
("until <time><time-of-day> am|pm", -6.149536101678477),
("at <time-of-day>intersect by \",\", \"of\", \"from\", \"'s\"",
-5.744070993570313),
("dayminute", -3.316322757622261),
("yyyy-mm-ddhh:mm", -5.744070993570313),
("intersectfrom|since|after <time>", -5.744070993570313),
("intersectSeptember", -4.277733924776886),
("minuteday", -2.9925356805283636),
("absorption of , after named dayintersect",
-6.149536101678477),
("intersectyear (latent)", -6.149536101678477),
("Februaryin|during the <part-of-day>", -6.149536101678477),
("<duration> after|before|from|past <time>December",
-6.149536101678477),
("time-of-day (latent)July", -5.456388921118531),
("Saturdayyear (latent)", -6.149536101678477),
("hhhmmintersect by \",\", \"of\", \"from\", \"'s\"",
-6.149536101678477),
("<day-of-month> (ordinal)Monday", -4.896773133183109),
("at <time-of-day>on <day>", -5.2332453698043215),
("absorption of , after named daySeptember",
-4.896773133183109),
("Naraka Chaturdashiyear (latent)", -6.149536101678477),
("from|since|after <time>on <day>", -5.2332453698043215),
("dayweek", -6.149536101678477),
("Easter Sundayyear (latent)", -5.744070993570313),
("Thursday<time-of-day> am|pm", -4.896773133183109),
("weekyear", -5.744070993570313),
("time-of-day (latent)Thursday", -5.744070993570313),
("<named-month> <day-of-month> (non ordinal)until <time>",
-6.149536101678477),
("<day-of-month> (ordinal)April", -6.149536101678477),
("yyyy-mm-dd<time-of-day> - <time-of-day> (interval)",
-5.744070993570313),
("intersect by \",\", \"of\", \"from\" for yearhh:mm",
-5.456388921118531),
("Sundayfrom|since|after <time>", -6.149536101678477),
("absorption of , after named dayFebruary",
-5.2332453698043215),
("time-of-day (latent)in|during the <part-of-day>",
-5.744070993570313),
("July<integer> to|till|before <hour-of-day>",
-6.149536101678477),
("tomorrowat <time-of-day>", -6.149536101678477),
("daynograin", -5.744070993570313),
("Fridayin|during <named-month>|year", -6.149536101678477),
("<integer> to|till|before <hour-of-day>July",
-5.744070993570313),
("last <cycle> of <time>year (latent)", -5.744070993570313),
("tomorrow<time-of-day> am|pm", -6.149536101678477),
("<named-month> <day-of-month> (non ordinal)year (latent)",
-5.456388921118531),
("Diwaliyear (latent)", -5.744070993570313),
("<time-of-day> - <time-of-day> (interval)July",
-6.149536101678477),
("this <part-of-day>at <time-of-day>", -5.2332453698043215),
("Fridayyear (latent)", -5.2332453698043215),
("time-of-day (latent)April", -6.149536101678477),
("minuteyear", -3.9523115243422575)],
n = 318}}),
("one eleven",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("after lunch/work/school",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("early morning",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("in <number> (implicit minutes)",
Classifier{okData =
ClassData{prior = -1.329135947279942, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.3184537311185346),
("integer (0..19)", -1.2992829841302609)],
n = 9},
koData =
ClassData{prior = -0.30748469974796055,
unseen = -3.332204510175204,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.20479441264601328),
("integer (0..19)", -1.6863989535702288)],
n = 25}}),
("<ordinal> <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("daymonth", -1.791759469228055),
("ordinal (digits)quarter (grain)year (latent)",
-2.1972245773362196),
("quarteryear", -2.1972245773362196),
("ordinals (first..twentieth,thirtieth,...)day (grain)October",
-2.1972245773362196),
("ordinal (digits)day (grain)this|last|next <cycle>",
-2.1972245773362196),
("ordinals (first..twentieth,thirtieth,...)week (grain)intersect",
-2.1972245773362196),
("weekmonth", -1.791759469228055),
("ordinals (first..twentieth,thirtieth,...)week (grain)October",
-2.1972245773362196)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("year (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6635616461296463,
likelihoods = HashMap.fromList [("", 0.0)], n = 37},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from <datetime> - <datetime> (interval)",
Classifier{okData =
ClassData{prior = -1.3737155789130304, unseen = -4.532599493153256,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -3.4231762883809305),
("minuteminute", -2.91235066461494),
("the <day-of-month> (number)the <day-of-month> (ordinal)",
-3.828641396489095),
("<day-of-month> (ordinal)<day-of-month> (ordinal)",
-3.4231762883809305),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal)",
-3.4231762883809305),
("hh:mmhh:mm", -2.91235066461494),
("dayday", -1.9568392195875037),
("the <day-of-month> (ordinal)the <day-of-month> (number)",
-3.828641396489095),
("the <day-of-month> (number)the <day-of-month> (number)",
-3.4231762883809305),
("<named-month>|<named-day> <day-of-month> (ordinal)<day-of-month> (ordinal)",
-3.828641396489095),
("<time-of-day> am|pmtime-of-day (latent)", -3.828641396489095),
("hourhour", -3.4231762883809305),
("minutehour", -3.4231762883809305),
("<day-of-month> (ordinal)the <day-of-month> (ordinal)",
-3.828641396489095),
("the <day-of-month> (ordinal)<day-of-month> (ordinal)",
-3.828641396489095),
("<time-of-day> am|pm<time-of-day> am|pm", -3.828641396489095),
("<day-of-month> (ordinal)the <day-of-month> (number)",
-3.828641396489095)],
n = 20},
koData =
ClassData{prior = -0.29191040856130207, unseen = -5.14166355650266,
likelihoods =
HashMap.fromList
[("hourday", -2.570849079588725),
("dayhour", -3.056356895370426),
("<day-of-month> (ordinal)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("time-of-day (latent)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("time-of-day (latent)<day-of-month> (ordinal)",
-4.442651256490317),
("time-of-day (latent)intersect", -4.442651256490317),
("<day-of-month> (ordinal)time-of-day (latent)",
-3.7495040759303713),
("<named-month>|<named-day> <day-of-month> (ordinal)year (latent)",
-4.442651256490317),
("hh:mmtime-of-day (latent)", -3.5263605246161616),
("hh:mm<time-of-day> am|pm", -4.037186148382152),
("minuteminute", -3.5263605246161616),
("<day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("time-of-day (latent)time-of-day (latent)",
-3.5263605246161616),
("hh:mmhh:mm", -4.442651256490317),
("dayday", -1.916922612182061),
("the <day-of-month> (ordinal)the <day-of-month> (number)",
-4.037186148382152),
("the <day-of-month> (number)the <day-of-month> (number)",
-4.442651256490317),
("time-of-day (latent)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("<day-of-month> (ordinal)intersect", -4.442651256490317),
("hourhour", -3.1898882879949486),
("time-of-day (latent)the <day-of-month> (ordinal)",
-4.442651256490317),
("dayyear", -4.442651256490317),
("the <day-of-month> (ordinal)time-of-day (latent)",
-4.442651256490317),
("the <day-of-month> (number)time-of-day (latent)",
-4.442651256490317),
("minutehour", -3.3440389678222067),
("the <day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("the <day-of-month> (number)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.442651256490317),
("hh:mmintersect", -4.037186148382152),
("time-of-day (latent)<day-of-month> (ordinal or number) of <month>",
-4.037186148382152),
("<day-of-month> (ordinal)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("<day-of-month> (ordinal)<day-of-month> (ordinal or number) of <month>",
-4.037186148382152),
("time-of-day (latent)<time-of-day> am|pm", -4.442651256490317),
("time-of-day (latent)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("time-of-day (latent)the <day-of-month> (number)",
-4.037186148382152),
("the <day-of-month> (ordinal)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("the <day-of-month> (number)<day-of-month> (ordinal or number) <named-month>",
-4.442651256490317),
("the <day-of-month> (number)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("<day-of-month> (ordinal)<day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("time-of-day (latent)<day-of-month> (ordinal or number) of <named-month>",
-4.037186148382152),
("<day-of-month> (ordinal)the <day-of-month> (number)",
-4.442651256490317),
("the <day-of-month> (ordinal)intersect", -4.037186148382152),
("<named-month> <day-of-month> (non ordinal)time-of-day (latent)",
-4.442651256490317),
("the <day-of-month> (number)intersect", -4.037186148382152)],
n = 59}}),
("Saturday",
Classifier{okData =
ClassData{prior = -0.11778303565638351,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -2.1972245773362196,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("the <cycle> of <time>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("week (grain)<named-month>|<named-day> <day-of-month> (ordinal)",
-1.845826690498331),
("weekmonth", -1.845826690498331),
("week (grain)October", -1.845826690498331),
("week (grain)<named-month> <day-of-month> (non ordinal)",
-1.845826690498331),
("weekday", -1.3350010667323402)],
n = 6},
koData =
ClassData{prior = -1.9459101490553135,
unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("second (grain) March", -1.5040773967762742),
("secondmonth", -1.5040773967762742)],
n = 1}}),
("number.number hours",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("hour (grain)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("from <time-of-day> - <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -0.7308875085427924,
unseen = -3.6888794541139363,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -2.5649493574615367),
("minuteminute", -1.8718021769015913),
("hh:mmhh:mm", -1.8718021769015913),
("<time-of-day> am|pmtime-of-day (latent)",
-2.5649493574615367),
("hourhour", -2.5649493574615367),
("hourminute", -2.5649493574615367),
("minutehour", -2.0541237336955462),
("time-of-day (latent)<time-of-day> sharp|exactly",
-2.9704144655697013),
("time-of-day (latent)hh:mm", -2.9704144655697013),
("<time-of-day> am|pm<time-of-day> am|pm",
-2.5649493574615367)],
n = 13},
koData =
ClassData{prior = -0.6567795363890705,
unseen = -3.7376696182833684,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -1.9218125974762528),
("hh:mm<time-of-day> am|pm", -2.6149597780361984),
("minuteminute", -2.6149597780361984),
("time-of-day (latent)time-of-day (latent)",
-1.9218125974762528),
("hh:mmhh:mm", -3.0204248861443626),
("hourhour", -1.7676619176489945),
("minutehour", -1.7676619176489945),
("time-of-day (latent)<time-of-day> am|pm",
-3.0204248861443626)],
n = 14}}),
("integer 21..99",
Classifier{okData =
ClassData{prior = -0.916290731874155, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (20..90)integer (0..19)", -0.2876820724517809)],
n = 2},
koData =
ClassData{prior = -0.5108256237659907, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("integer (numeric)integer (numeric)", -0.2231435513142097)],
n = 3}}),
("Global Youth Service Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Tisha B'Av",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm-dd",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("year (latent)",
Classifier{okData =
ClassData{prior = -0.14681486833704485,
unseen = -5.393627546352362,
likelihoods =
HashMap.fromList
[("integer (numeric)", -2.7779564107075706e-2),
("intersect 2 numbers", -4.00277736869661)],
n = 215},
koData =
ClassData{prior = -1.9910923718485463,
unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("integer (numeric)", -1.55814461804655),
("negative numbers", -0.41871033485818493),
("compose by multiplication", -2.2512917986064953)],
n = 34}}),
("<time> for <duration>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.5553480614894135,
likelihoods =
HashMap.fromList
[("minuteminute", -2.833213344056216),
("<time-of-day> am|pm<integer> <unit-of-duration>",
-2.1400661634962708),
("dayday", -1.4469189829363254),
("hourminute", -1.916922612182061),
("hhhmm<integer> <unit-of-duration>", -2.833213344056216),
("intersect<integer> <unit-of-duration>", -2.1400661634962708),
("<day-of-month> (ordinal or number) <named-month><integer> <unit-of-duration>",
-2.4277482359480516),
("from|since|after <time><integer> <unit-of-duration>",
-2.1400661634962708)],
n = 12},
koData =
ClassData{prior = -1.9459101490553135, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("monthday", -1.540445040947149),
("December<integer> <unit-of-duration>", -1.540445040947149)],
n = 2}}),
("hhhmm",
Classifier{okData =
ClassData{prior = -2.247285585205863e-2,
unseen = -3.828641396489095,
likelihoods = HashMap.fromList [("", 0.0)], n = 44},
koData =
ClassData{prior = -3.8066624897703196,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("as soon as possible",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Mahavir Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Monday",
Classifier{okData =
ClassData{prior = -0.21357410029805904,
unseen = -3.1354942159291497,
likelihoods = HashMap.fromList [("", 0.0)], n = 21},
koData =
ClassData{prior = -1.6486586255873816,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("dd/mm/yyyy",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yesterday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<ordinal> quarter <year>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)year (latent)",
-0.6931471805599453),
("quarteryear", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Dayananda Saraswati Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm:ss",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Hanukkah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("first|second|third|fourth|fifth <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6635616461296463,
likelihoods =
HashMap.fromList
[("daymonth", -0.8649974374866046),
("ordinals (first..twentieth,thirtieth,...)Tuesdaythis|last|next <cycle>",
-2.9444389791664407),
("ordinals (first..twentieth,thirtieth,...)TuesdaySeptember",
-2.9444389791664407),
("ordinals (first..twentieth,thirtieth,...)Tuesdayintersect",
-2.9444389791664407),
("ordinals (first..twentieth,thirtieth,...)WednesdayOctober",
-2.538973871058276),
("ordinals (first..twentieth,thirtieth,...)Wednesdayintersect",
-2.538973871058276),
("ordinals (first..twentieth,thirtieth,...)Mondaythis|last|next <cycle>",
-1.6916760106710724),
("ordinals (first..twentieth,thirtieth,...)TuesdayOctober",
-2.538973871058276)],
n = 15},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> <integer>",
Classifier{okData =
ClassData{prior = -9.53101798043249e-2,
unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("at <time-of-day>integer (20..90)", -2.5649493574615367),
("time-of-day (latent)integer (numeric)", -2.159484249353372),
("time-of-day (latent)integer 21..99", -2.159484249353372),
("hour", -0.8602012652231115),
("at <time-of-day>integer (numeric)", -2.159484249353372),
("time-of-day (latent)integer (20..90)", -1.8718021769015913)],
n = 10},
koData =
ClassData{prior = -2.3978952727983707,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("hour", -1.3862943611198906),
("time-of-day (latent)integer (20..90)", -1.3862943611198906)],
n = 1}}),
("Rama Navami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<hour-of-day> quarter",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 2}}),
("half a <time-grain>",
Classifier{okData =
ClassData{prior = -0.1823215567939546,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("hour (grain)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 5},
koData =
ClassData{prior = -1.791759469228055, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("hour (grain)", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1}}),
("King's Day",
Classifier{okData =
ClassData{prior = -0.11778303565638351,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("", 0.0)], n = 8},
koData =
ClassData{prior = -2.1972245773362196,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("Valentine's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("from the <day-of-month> (ordinal or number) to the <day-of-month> (ordinal or number) of <named-month> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4011973816621555,
likelihoods =
HashMap.fromList
[("ordinal (digits)ordinal (digits)July", -1.9810014688665833),
("integer (numeric)integer (numeric)July", -1.9810014688665833),
("integer (numeric)ordinal (digits)July", -1.9810014688665833),
("ordinal (digits)integer (numeric)July", -1.9810014688665833),
("month", -0.8023464725249373)],
n = 12},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("April",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Maha Saptami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("end of month",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week (grain)",
Classifier{okData =
ClassData{prior = -4.546237407675729e-2,
unseen = -4.477336814478207,
likelihoods = HashMap.fromList [("", 0.0)], n = 86},
koData =
ClassData{prior = -3.1135153092103742, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("<part-of-day> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.2188758248682006,
likelihoods =
HashMap.fromList
[("part of daysintersect by \",\", \"of\", \"from\", \"'s\"",
-2.4849066497880004),
("part of daysintersect", -2.4849066497880004),
("hourday", -0.9808292530117262),
("part of daysthe <day-of-month> (ordinal)",
-2.4849066497880004),
("part of daysthe <day-of-month> (number)",
-2.4849066497880004),
("part of daysthis <time>", -2.4849066497880004),
("part of daysthe <day-of-month> (ordinal or number) of <named-month>",
-2.4849066497880004),
("part of daysChristmas", -2.0794415416798357)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("from <time> for <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("<time-of-day> am|pm<integer> <unit-of-duration>",
-1.7047480922384253),
("dayday", -1.2992829841302609),
("hourminute", -1.7047480922384253),
("intersect<integer> <unit-of-duration>", -1.7047480922384253),
("<day-of-month> (ordinal or number) <named-month><integer> <unit-of-duration>",
-1.7047480922384253)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <day-of-week> from <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("integer (numeric)Fridaynow", -2.2512917986064953),
("integer (0..19)Tuesdaynow", -2.2512917986064953),
("integer (0..19)Sundaynow", -2.2512917986064953),
("integer (0..19)Fridaynow", -2.2512917986064953),
("daynograin", -0.9985288301111273),
("integer (numeric)Sundaynow", -2.2512917986064953),
("integer (numeric)Tuesdaynow", -2.2512917986064953)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("Shrove Tuesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("now",
Classifier{okData =
ClassData{prior = -6.899287148695143e-2,
unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14},
koData =
ClassData{prior = -2.70805020110221, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<day-of-month> (ordinal or number) of <named-month>",
Classifier{okData =
ClassData{prior = -0.7339691750802004,
unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -2.740840023925201),
("ordinals (first..twentieth,thirtieth,...)March",
-1.6422277352570913),
("ordinal (digits)February", -2.740840023925201),
("integer (numeric)February", -2.3353749158170367),
("month", -0.8690378470236094),
("ordinal (digits)March", -2.3353749158170367),
("integer (numeric)July", -2.740840023925201)],
n = 12},
koData =
ClassData{prior = -0.6539264674066639,
unseen = -3.5263605246161616,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -1.550597412411167),
("ordinal (digits)February", -2.803360380906535),
("month", -0.8574502318512216),
("integer (numeric)July", -1.550597412411167)],
n = 13}}),
("this <part-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("hour", -0.6931471805599453),
("part of days", -0.6931471805599453)],
n = 14},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Kaanum Pongal",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Yom Kippur",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> minutes to|till|before <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("integer (numeric)minute (grain)time-of-day (latent)",
-0.6931471805599453),
("minutehour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Rabindra Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("powers of tens",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Pargat Diwas",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Friday",
Classifier{okData =
ClassData{prior = -0.3184537311185346, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -1.2992829841302609,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6}}),
("in|during the <part-of-day>",
Classifier{okData =
ClassData{prior = -6.899287148695143e-2,
unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("early morning", -2.740840023925201),
("hour", -0.7259370033829361),
("part of days", -0.7949298748698876)],
n = 14},
koData =
ClassData{prior = -2.70805020110221, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("hour", -0.916290731874155),
("part of days", -0.916290731874155)],
n = 1}}),
("St Patrick's Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("tomorrow",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods = HashMap.fromList [("", 0.0)], n = 22},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh(:mm) - <time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.10536051565782628,
unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0116009116784799),
("hh:mm", -1.9924301646902063), ("hour", -1.0116009116784799),
("minute", -1.9924301646902063)],
n = 9},
koData =
ClassData{prior = -2.3025850929940455,
unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("time-of-day (latent)", -1.0986122886681098),
("hour", -1.0986122886681098)],
n = 1}}),
("this|last|next <cycle>",
Classifier{okData =
ClassData{prior = -0.42050298547270487,
unseen = -5.241747015059643,
likelihoods =
HashMap.fromList
[("week", -1.2474579162656747),
("month (grain)", -2.3460702049337847),
("year (grain)", -2.528391761727739),
("week (grain)", -1.2474579162656747),
("quarter", -3.6270040503958487), ("year", -2.528391761727739),
("month", -2.3460702049337847),
("quarter (grain)", -3.6270040503958487)],
n = 88},
koData =
ClassData{prior = -1.0691984034618165, unseen = -4.653960350157523,
likelihoods =
HashMap.fromList
[("week", -1.8111775550851565),
("month (grain)", -2.4471663218051534),
("year (grain)", -3.0349529867072724),
("second", -3.0349529867072724),
("week (grain)", -1.8111775550851565),
("day", -2.2464956263430023), ("quarter", -3.0349529867072724),
("year", -3.0349529867072724),
("second (grain) ", -3.0349529867072724),
("month", -2.4471663218051534),
("quarter (grain)", -3.0349529867072724),
("day (grain)", -2.2464956263430023)],
n = 46}}),
("Simchat Torah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("New Year's Eve",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)year (latent)",
-0.6931471805599453),
("quarteryear", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Raksha Bandhan",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ashura",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Ratha-Yatra",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Palm Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Eid al-Adha",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods = HashMap.fromList [("", 0.0)], n = 16},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("by <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("noon|midnight|EOD|end of day", -1.9459101490553135),
("time-of-day (latent)", -1.9459101490553135),
("<time-of-day> am|pm", -1.9459101490553135),
("hh:mm", -1.9459101490553135), ("hour", -1.540445040947149),
("minute", -1.540445040947149)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.4818380868927383,
unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-1.5260563034950494),
("ordinal (digits)", -0.24512245803298496)],
n = 21},
koData =
ClassData{prior = -0.9614111671546247, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-0.6286086594223742),
("ordinal (digits)", -0.7621400520468967)],
n = 13}}),
("last weekend of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("October", -0.9555114450274363), ("July", -1.8718021769015913),
("month", -0.7731898882334817)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("the <day-of-month> (number)",
Classifier{okData =
ClassData{prior = -0.8649974374866046,
unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 8},
koData =
ClassData{prior = -0.5465437063680699,
unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 11}}),
("Lag BaOmer",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("fractional number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14}}),
("Guru Ravidass Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Sunday",
Classifier{okData =
ClassData{prior = -0.40546510810816444,
unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -1.0986122886681098,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("Chinese New Year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("February",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("minute (grain)",
Classifier{okData =
ClassData{prior = -7.696104113612832e-2,
unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("", 0.0)], n = 25},
koData =
ClassData{prior = -2.6026896854443837,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("last|this|next <season>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> quarter",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.6931471805599453),
("quarter", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)quarter (grain)", -0.6931471805599453),
("quarter", -0.6931471805599453)],
n = 1}}),
("Orthodox Good Friday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("time-of-day (latent)",
Classifier{okData =
ClassData{prior = -0.6970764586998348, unseen = -4.867534450455582,
likelihoods =
HashMap.fromList
[("integer (numeric)", -9.763846956391606e-2),
("integer (0..19)", -2.374905754573672)],
n = 127},
koData =
ClassData{prior = -0.689233281238809, unseen = -4.875197323201151,
likelihoods =
HashMap.fromList
[("integer (numeric)", -0.15800424914324832),
("integer (0..19)", -1.923095471289142)],
n = 128}}),
("beginning of year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("last <day-of-week> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("daymonth", -0.916290731874155),
("SundayMarch", -2.0149030205422647),
("MondayMarch", -2.0149030205422647),
("FridayOctober", -1.6094379124341003),
("Sundayintersect", -2.0149030205422647)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <unit-of-duration>",
Classifier{okData =
ClassData{prior = -0.6164133863285559, unseen = -5.308267697401205,
likelihoods =
HashMap.fromList
[("week", -2.6642475784438173),
("integer (0..19)year (grain)", -3.22386336637924),
("integer (numeric)day (grain)", -2.6642475784438173),
("integer (0..19)second (grain) ", -3.917010546939185),
("integer (0..19)hour (grain)", -3.1060803307228566),
("second", -3.5115454388310208),
("integer (numeric)second (grain) ", -4.204692619390966),
("integer (numeric)year (grain)", -3.3573947590037623),
("day", -2.412933150162911), ("year", -2.6642475784438173),
("integer (numeric)week (grain)", -3.22386336637924),
("integer (0..19)month (grain)", -3.6938669956249752),
("integer (20..90)minute (grain)", -4.61015772749913),
("hour", -2.738355550597539), ("month", -3.3573947590037623),
("integer (numeric)minute (grain)", -2.5952547069568657),
("integer (0..19)minute (grain)", -3.22386336637924),
("integer (numeric)month (grain)", -4.204692619390966),
("minute", -2.167810692129926),
("integer (numeric)hour (grain)", -3.6938669956249752),
("integer (0..19)day (grain)", -3.6938669956249752),
("integer (0..19)week (grain)", -3.3573947590037623)],
n = 88},
koData =
ClassData{prior = -0.7762620872704519, unseen = -5.170483995038151,
likelihoods =
HashMap.fromList
[("week", -3.5553480614894135),
("integer (0..19)year (grain)", -4.0661736852554045),
("integer (numeric)day (grain)", -4.0661736852554045),
("integer (numeric)quarter (grain)", -4.0661736852554045),
("integer (numeric)year (grain)", -4.0661736852554045),
("day", -3.5553480614894135), ("quarter", -3.5553480614894135),
("year", -3.5553480614894135),
("integer (numeric)week (grain)", -4.0661736852554045),
("integer (0..19)month (grain)", -4.0661736852554045),
("hour", -1.157452788691043), ("month", -3.5553480614894135),
("integer (numeric)minute (grain)", -4.471638793363569),
("integer (numeric)month (grain)", -4.0661736852554045),
("minute", -4.471638793363569),
("integer (numeric)hour (grain)", -1.157452788691043),
("integer (0..19)day (grain)", -4.0661736852554045),
("integer (0..19)week (grain)", -4.0661736852554045),
("integer (0..19)quarter (grain)", -4.0661736852554045)],
n = 75}}),
("from the <day-of-month> (ordinal or number) to the <day-of-month> (ordinal or number) <named-month> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods =
HashMap.fromList
[("ordinal (digits)ordinal (digits)July", -1.3217558399823195),
("integer (numeric)integer (numeric)July", -1.3217558399823195),
("month", -0.7621400520468967)],
n = 6},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("Guru Gobind Singh Jayanti",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hhmm (latent)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Krishna Janmashtami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<time-of-day> am|pm",
Classifier{okData =
ClassData{prior = -0.2584829177595186, unseen = -5.697093486505405,
likelihoods =
HashMap.fromList
[("from|since|after <time>", -3.7478219897473863),
("integer after|past <hour-of-day>", -4.307437777682809),
("at <time-of-day>", -2.3978952727983707),
("<time-of-day> o'clock", -4.59511985013459),
("half after|past <hour-of-day>", -4.59511985013459),
("second", -5.000584958242754),
("hh:mm:ss", -5.000584958242754),
("<hour-of-day> <integer>", -4.084294226368599),
("<integer> minutes to|till|before <hour-of-day>",
-5.000584958242754),
("time-of-day (latent)", -1.6863989535702288),
("hhmm (latent)", -5.000584958242754),
("hh:mm", -2.3978952727983707),
("quarter after|past <hour-of-day>", -3.4965075614664802),
("until <time>", -4.307437777682809),
("about|exactly <time-of-day>", -5.000584958242754),
("hour", -1.2393848425491918),
("<time-of-day> sharp|exactly", -5.000584958242754),
("minute", -1.7047480922384253)],
n = 139},
koData =
ClassData{prior = -1.4793847841859027, unseen = -4.624972813284271,
likelihoods =
HashMap.fromList
[("<integer> to|till|before <hour-of-day>", -3.228826155721369),
("from|since|after <time>", -3.228826155721369),
("at <time-of-day>", -3.5165082281731497),
("time-of-day (latent)", -1.1811333123561132),
("hh:mm", -3.5165082281731497),
("until <time>", -3.9219733362813143),
("hour", -1.0316015783851495), ("minute", -2.6692103677859462)],
n = 41}}),
("Yom Ha'atzmaut",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("on <day>",
Classifier{okData =
ClassData{prior = -0.2231435513142097, unseen = -4.04305126783455,
likelihoods =
HashMap.fromList
[("Thursday", -1.8281271133989299),
("absorption of , after named day", -2.639057329615259),
("intersect", -2.639057329615259),
("Saturday", -2.639057329615259),
("Friday", -2.9267394020670396), ("day", -0.8064758658669484),
("the <day-of-month> (ordinal)", -2.9267394020670396),
("intersect by \",\", \"of\", \"from\", \"'s\"",
-2.639057329615259)],
n = 24},
koData =
ClassData{prior = -1.6094379124341003, unseen = -3.044522437723423,
likelihoods =
HashMap.fromList
[("intersect", -1.6094379124341003),
("day", -1.0498221244986778),
("intersect by \",\", \"of\", \"from\", \"'s\"",
-1.6094379124341003)],
n = 6}}),
("Thiru Onam",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Thai Pongal",
Classifier{okData =
ClassData{prior = -0.5596157879354228, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -0.8472978603872037,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3}}),
("Vijayadashami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("part of <named-month>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("March", -0.6931471805599453), ("month", -0.6931471805599453)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("a <unit-of-duration>",
Classifier{okData =
ClassData{prior = -1.0296194171811581,
unseen = -3.5553480614894135,
likelihoods =
HashMap.fromList
[("week", -2.1400661634962708),
("year (grain)", -2.4277482359480516),
("second", -2.833213344056216),
("week (grain)", -2.1400661634962708),
("day", -2.1400661634962708),
("minute (grain)", -2.833213344056216),
("year", -2.4277482359480516),
("second (grain) ", -2.833213344056216),
("minute", -2.833213344056216),
("day (grain)", -2.1400661634962708)],
n = 10},
koData =
ClassData{prior = -0.4418327522790392,
unseen = -3.9318256327243257,
likelihoods =
HashMap.fromList
[("hour (grain)", -2.3025850929940455),
("quarter", -1.3470736479666092),
("minute (grain)", -2.8134107167600364),
("hour", -2.3025850929940455),
("quarter (grain)", -1.3470736479666092),
("minute", -2.8134107167600364)],
n = 18}}),
("at the beginning|end of <year>",
Classifier{okData =
ClassData{prior = -0.13353139262452263,
unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("year (latent)", -1.2237754316221157),
("this|last|next <cycle>", -1.4469189829363254),
("year", -0.7537718023763802)],
n = 7},
koData =
ClassData{prior = -2.0794415416798357, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("this|last|next <cycle>", -0.916290731874155),
("year", -0.916290731874155)],
n = 1}}),
("Dhanteras",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Tu BiShvat",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Whit Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hh:mm",
Classifier{okData =
ClassData{prior = -0.12260232209233239,
unseen = -4.2626798770413155,
likelihoods = HashMap.fromList [("", 0.0)], n = 69},
koData =
ClassData{prior = -2.159484249353372, unseen = -2.3978952727983707,
likelihoods = HashMap.fromList [("", 0.0)], n = 9}}),
("Holi",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("upcoming <integer> <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..19)year (grain)", -2.908720896564361),
("integer (numeric)day (grain)", -2.908720896564361),
("integer (numeric)quarter (grain)", -2.908720896564361),
("integer (numeric)year (grain)", -2.908720896564361),
("day", -2.3978952727983707), ("quarter", -2.3978952727983707),
("year", -2.3978952727983707),
("integer (numeric)week (grain)", -2.908720896564361),
("integer (0..19)month (grain)", -2.908720896564361),
("month", -2.3978952727983707),
("integer (numeric)month (grain)", -2.908720896564361),
("integer (0..19)day (grain)", -2.908720896564361),
("integer (0..19)week (grain)", -2.908720896564361),
("integer (0..19)quarter (grain)", -2.908720896564361)],
n = 20},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("Rosh Hashanah",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> <named-day> ago|back",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("integer (numeric)Thursday", -0.6931471805599453),
("day", -0.6931471805599453)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> upcoming <cycle>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.02535169073515,
likelihoods =
HashMap.fromList
[("week", -2.3978952727983707),
("integer (0..19)year (grain)", -2.908720896564361),
("integer (numeric)day (grain)", -2.908720896564361),
("integer (numeric)quarter (grain)", -2.908720896564361),
("integer (numeric)year (grain)", -2.908720896564361),
("day", -2.3978952727983707), ("quarter", -2.3978952727983707),
("year", -2.3978952727983707),
("integer (numeric)week (grain)", -2.908720896564361),
("integer (0..19)month (grain)", -2.908720896564361),
("month", -2.3978952727983707),
("integer (numeric)month (grain)", -2.908720896564361),
("integer (0..19)day (grain)", -2.908720896564361),
("integer (0..19)week (grain)", -2.908720896564361),
("integer (0..19)quarter (grain)", -2.908720896564361)],
n = 20},
koData =
ClassData{prior = -infinity, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [], n = 0}}),
("Holika Dahan",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter of an hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("second (grain) ",
Classifier{okData =
ClassData{prior = -0.5108256237659907,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -0.916290731874155, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("ordinals (first..twentieth,thirtieth,...)",
Classifier{okData =
ClassData{prior = -3.077165866675366e-2,
unseen = -3.5263605246161616,
likelihoods = HashMap.fromList [("", 0.0)], n = 32},
koData =
ClassData{prior = -3.4965075614664802,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("Mawlid",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Jumu'atul-Wida",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<duration> after|before|from|past <time>",
Classifier{okData =
ClassData{prior = -0.7691330875378672, unseen = -4.189654742026425,
likelihoods =
HashMap.fromList
[("a <unit-of-duration>now", -3.481240089335692),
("<integer> <unit-of-duration>hhhmm", -3.481240089335692),
("a <unit-of-duration>Christmas", -3.481240089335692),
("minuteminute", -3.481240089335692),
("dayday", -3.481240089335692),
("<integer> <unit-of-duration>today", -3.481240089335692),
("<integer> <unit-of-duration>time-of-day (latent)",
-2.382627800667582),
("minutehour", -1.7764919970972666),
("daysecond", -3.481240089335692),
("a <unit-of-duration>right now", -3.481240089335692),
("minutenograin", -3.481240089335692),
("<integer> <unit-of-duration>Christmas", -3.481240089335692),
("<integer> <unit-of-duration>Easter Sunday",
-3.481240089335692),
("secondnograin", -3.481240089335692),
("<integer> <unit-of-duration><time-of-day> am|pm",
-2.5649493574615367),
("yearday", -2.7880929087757464),
("<integer> <unit-of-duration>noon|midnight|EOD|end of day",
-3.481240089335692),
("daynograin", -3.481240089335692),
("<integer> <unit-of-duration>now", -3.0757749812275272)],
n = 19},
koData =
ClassData{prior = -0.6225296133459919, unseen = -4.276666119016055,
likelihoods =
HashMap.fromList
[("quarterhour", -1.318240897874875),
("dayhour", -3.56953269648137),
("<integer> <unit-of-duration>intersect", -3.56953269648137),
("<integer> <unit-of-duration><day-of-month> (ordinal)",
-3.56953269648137),
("a <unit-of-duration><time-of-day> am|pm", -2.065455299705096),
("a <unit-of-duration>time-of-day (latent)",
-2.065455299705096),
("dayday", -2.8763855159214247),
("<integer> <unit-of-duration>time-of-day (latent)",
-3.56953269648137),
("a <unit-of-duration>noon|midnight|EOD|end of day",
-3.164067588373206),
("<integer> <unit-of-duration><day-of-month> (ordinal or number) <named-month>",
-3.56953269648137)],
n = 22}}),
("nth <day-of-week> of <month-or-greater>",
Classifier{okData =
ClassData{prior = -0.2876820724517809, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("daymonth", -0.916290731874155),
("ordinals (first..twentieth,thirtieth,...)Tuesdayintersect",
-2.5257286443082556),
("ordinals (first..twentieth,thirtieth,...)Wednesdayintersect",
-2.120263536200091),
("ordinals (first..twentieth,thirtieth,...)Mondaythis|last|next <cycle>",
-1.6094379124341003),
("ordinals (first..twentieth,thirtieth,...)TuesdayOctober",
-2.120263536200091)],
n = 9},
koData =
ClassData{prior = -1.3862943611198906, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("daymonth", -1.1786549963416462),
("ordinals (first..twentieth,thirtieth,...)TuesdaySeptember",
-1.8718021769015913),
("ordinals (first..twentieth,thirtieth,...)WednesdayOctober",
-1.466337068793427)],
n = 3}}),
("quarter after|past <hour-of-day>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1780538303479458,
likelihoods =
HashMap.fromList
[("noon|midnight|EOD|end of day", -2.03688192726104),
("time-of-day (latent)", -0.9382696385929302),
("hour", -0.7375989431307791)],
n = 10},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyy-mm",
Classifier{okData =
ClassData{prior = -1.0986122886681098,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.40546510810816444,
unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4}}),
("until <time>",
Classifier{okData =
ClassData{prior = -1.252762968495368, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("year (latent)", -2.159484249353372),
("time-of-day (latent)", -2.5649493574615367),
("year", -2.159484249353372),
("<time-of-day> am|pm", -1.8718021769015913),
("hh:mm", -2.159484249353372), ("hour", -2.159484249353372),
("minute", -1.6486586255873816)],
n = 8},
koData =
ClassData{prior = -0.3364722366212129,
unseen = -3.9318256327243257,
likelihoods =
HashMap.fromList
[("intersect", -2.3025850929940455),
("yesterday", -2.8134107167600364),
("day", -2.8134107167600364),
("time-of-day (latent)", -1.6094379124341003),
("<time-of-day> am|pm", -3.2188758248682006),
("hh:mm", -2.3025850929940455), ("hour", -1.4271163556401458),
("minute", -1.8325814637483102)],
n = 20}}),
("the <cycle> after|before <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods =
HashMap.fromList
[("day (grain)tomorrow", -1.252762968495368),
("dayday", -0.8472978603872037),
("day (grain)yesterday", -1.252762968495368)],
n = 2},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("about|exactly <time-of-day>",
Classifier{okData =
ClassData{prior = -0.15415067982725836,
unseen = -3.6109179126442243,
likelihoods =
HashMap.fromList
[("week", -1.791759469228055),
("hh(:mm) - <time-of-day> am|pm", -2.890371757896165),
("this|last|next <cycle>", -1.791759469228055),
("day", -2.4849066497880004),
("time-of-day (latent)", -2.890371757896165),
("hhmm (latent)", -2.4849066497880004),
("<time-of-day> am|pm", -2.890371757896165),
("hour", -2.1972245773362196),
("next <time>", -2.890371757896165),
("this|next <day-of-week>", -2.890371757896165),
("minute", -2.4849066497880004)],
n = 12},
koData =
ClassData{prior = -1.9459101490553135, unseen = -2.833213344056216,
likelihoods =
HashMap.fromList
[("dd/mm", -2.0794415416798357), ("day", -2.0794415416798357),
("time-of-day (latent)", -2.0794415416798357),
("hour", -2.0794415416798357)],
n = 2}}),
("Sukkot",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day> in <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods =
HashMap.fromList
[("monthyear", -1.2992829841302609),
("Marcha <unit-of-duration>", -1.7047480922384253),
("March<integer> <unit-of-duration>", -1.7047480922384253),
("Vijayadashami<integer> <unit-of-duration>",
-1.7047480922384253),
("dayyear", -1.7047480922384253)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("World Vegan Day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect by \",\", \"of\", \"from\", \"'s\"",
Classifier{okData =
ClassData{prior = -0.5596157879354228, unseen = -5.407171771460119,
likelihoods =
HashMap.fromList
[("intersectthis|last|next <cycle>", -4.709530201312334),
("Wednesday<named-month> <day-of-month> (non ordinal)",
-4.709530201312334),
("dayhour", -4.304065093204169),
("daymonth", -2.001480000210124),
("<named-month> <day-of-month> (non ordinal)Friday",
-4.709530201312334),
("Friday<named-month> <day-of-month> (non ordinal)",
-3.4567672328169663),
("Wednesdayintersect", -4.709530201312334),
("from <datetime> - <datetime> (interval)July",
-3.3232358401924436),
("Black Fridaythis|last|next <cycle>", -4.709530201312334),
("<part-of-day> of <time>February", -4.709530201312334),
("Saturday<time-of-day> am|pm", -4.709530201312334),
("Martin Luther King's Daythis|last|next <cycle>",
-4.304065093204169),
("hourmonth", -4.709530201312334),
("Fridayintersect by \",\", \"of\", \"from\" for year",
-4.0163830207523885),
("dayday", -2.458238402705839),
("the <day-of-month> (ordinal)February", -4.304065093204169),
("WednesdayOctober", -4.709530201312334),
("Wednesdaythis|last|next <cycle>", -4.304065093204169),
("intersect<named-month> <day-of-month> (non ordinal)",
-3.4567672328169663),
("dayyear", -4.0163830207523885),
("Saturday<named-month> <day-of-month> (non ordinal)",
-4.709530201312334),
("Thursdayhh:mm", -4.304065093204169),
("TuesdayOctober", -4.709530201312334),
("the <day-of-month> (ordinal)March", -3.6109179126442243),
("Mondaythis|last|next <cycle>", -3.7932394694381792),
("Fridayintersect", -4.0163830207523885),
("Thursday<datetime> - <datetime> (interval)",
-4.304065093204169),
("intersectOctober", -4.304065093204169),
("Thursday<time-of-day> - <time-of-day> (interval)",
-3.7932394694381792),
("Tuesdaythis|last|next <cycle>", -4.304065093204169),
("Sunday<named-month> <day-of-month> (non ordinal)",
-4.709530201312334),
("dayminute", -2.917770732084279),
("minuteday", -3.4567672328169663),
("this|last|next <cycle>Sunday", -4.709530201312334),
("Sundaythis|last|next <cycle>", -4.709530201312334),
("on <day><time-of-day> am|pm", -4.709530201312334),
("intersectintersect", -4.709530201312334),
("weekday", -4.709530201312334),
("dayweek", -3.6109179126442243),
("Monday<named-month> <day-of-month> (non ordinal)",
-4.304065093204169),
("<datetime> - <datetime> (interval)July", -3.7932394694381792),
("on <day><named-month> <day-of-month> (non ordinal)",
-4.0163830207523885)],
n = 76},
koData =
ClassData{prior = -0.8472978603872037, unseen = -5.220355825078324,
likelihoods =
HashMap.fromList
[("week-endJuly", -4.5217885770490405),
("week-endOctober", -3.828641396489095),
("daymonth", -1.7809485531238394),
("TuesdaySeptember", -4.5217885770490405),
("Wednesdayintersect", -4.5217885770490405),
("from <datetime> - <datetime> (interval)July",
-3.4231762883809305),
("from <time-of-day> - <time-of-day> (interval)July",
-4.5217885770490405),
("hournograin", -4.116323468940876),
("from|since|after <time>July", -4.116323468940876),
("hourmonth", -2.575878427993727),
("Fridaythis|last|next <cycle>", -4.5217885770490405),
("SundayFebruary", -4.5217885770490405),
("on <day>September", -3.828641396489095),
("WednesdayOctober", -4.5217885770490405),
("intersectnow", -4.116323468940876),
("week-endintersect", -4.5217885770490405),
("dayyear", -4.5217885770490405),
("FridayJuly", -3.6054978451748854),
("FridaySeptember", -4.116323468940876),
("the <day-of-month> (ordinal)July", -3.6054978451748854),
("WednesdayFebruary", -4.5217885770490405),
("minutemonth", -3.0177111802727663),
("Mondaythis|last|next <cycle>", -4.5217885770490405),
("SundayMarch", -4.5217885770490405),
("MondayFebruary", -4.116323468940876),
("Fridayintersect", -4.5217885770490405),
("intersectOctober", -4.5217885770490405),
("dayminute", -4.5217885770490405),
("SaturdaySeptember", -4.5217885770490405),
("intersectSeptember", -3.1354942159291497),
("Tuesdaynow", -4.116323468940876),
("MondayMarch", -4.5217885770490405),
("FridayOctober", -4.5217885770490405),
("daynograin", -4.116323468940876),
("<integer> to|till|before <hour-of-day>July",
-4.5217885770490405),
("Tuesdayintersect", -4.5217885770490405),
("<time-of-day> - <time-of-day> (interval)July",
-4.5217885770490405),
("<datetime> - <datetime> (interval)July", -4.5217885770490405),
("Sundayintersect", -4.5217885770490405)],
n = 57}}),
("last <time>",
Classifier{okData =
ClassData{prior = -0.7537718023763802, unseen = -3.295836866004329,
likelihoods =
HashMap.fromList
[("Martin Luther King's Day", -2.5649493574615367),
("day", -1.1786549963416462), ("Sunday", -2.5649493574615367),
("Chinese New Year", -1.8718021769015913),
("Easter Sunday", -2.5649493574615367),
("hour", -2.5649493574615367), ("Tuesday", -2.5649493574615367),
("week-end", -2.5649493574615367)],
n = 8},
koData =
ClassData{prior = -0.6359887667199967, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("Monday", -2.639057329615259), ("Friday", -2.2335922215070942),
("day", -1.7227665977411035), ("Sunday", -2.639057329615259),
("hour", -1.540445040947149), ("week-end", -1.540445040947149)],
n = 9}}),
("March",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.295836866004329,
likelihoods = HashMap.fromList [("", 0.0)], n = 25},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-month>|<named-day> <day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.1823215567939546,
unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("Octoberordinal (digits)", -2.0149030205422647),
("Thursdayordinal (digits)", -2.70805020110221),
("day", -2.3025850929940455),
("Augustordinal (digits)", -2.70805020110221),
("Marchordinals (first..twentieth,thirtieth,...)",
-2.3025850929940455),
("Tuesdayordinal (digits)", -2.70805020110221),
("Octoberordinals (first..twentieth,thirtieth,...)",
-2.70805020110221),
("month", -1.2039728043259361),
("Marchordinal (digits)", -2.70805020110221)],
n = 10},
koData =
ClassData{prior = -1.791759469228055, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("from|since|after <time>ordinal (digits)",
-1.9459101490553135),
("Augustordinal (digits)", -1.9459101490553135),
("month", -1.540445040947149)],
n = 2}}),
("Clean Monday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal)",
Classifier{okData =
ClassData{prior = -0.6325225587435105, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-1.3350010667323402),
("ordinal (digits)", -0.3053816495511819)],
n = 17},
koData =
ClassData{prior = -0.7576857016975165, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)",
-0.5306282510621704),
("ordinal (digits)", -0.8873031950009028)],
n = 15}}),
("<time> (timezone)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("<time-of-day> am|pm", -0.6931471805599453),
("hour", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Easter Sunday",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("Christmas",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.70805020110221,
likelihoods = HashMap.fromList [("", 0.0)], n = 13},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<datetime> - <datetime> (interval) timezone",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("minuteminute", -0.8109302162163288),
("hh:mmhh:mm", -1.5040773967762742),
("hhhmmhhhmm", -1.0986122886681098)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("Isra and Mi'raj",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month> (ordinal or number) of <month>",
Classifier{okData =
ClassData{prior = -0.5679840376059393, unseen = -3.784189633918261,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -3.068052935133617),
("ordinals (first..twentieth,thirtieth,...)March",
-1.9694406464655074),
("ordinal (digits)this|last|next <cycle>", -2.374905754573672),
("integer (numeric)this|last|next <cycle>", -2.662587827025453),
("ordinal (digits)February", -3.068052935133617),
("integer (numeric)February", -2.662587827025453),
("month", -0.8708283577973976),
("ordinal (digits)March", -2.662587827025453),
("integer (numeric)July", -3.068052935133617)],
n = 17},
koData =
ClassData{prior = -0.8362480242006186, unseen = -3.58351893845611,
likelihoods =
HashMap.fromList
[("ordinal (digits)July", -1.6094379124341003),
("ordinal (digits)February", -2.8622008809294686),
("month", -0.916290731874155),
("integer (numeric)July", -1.6094379124341003)],
n = 13}}),
("decimal number",
Classifier{okData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<day-of-month>(ordinal or number)/<named-month>/year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)April", -0.6931471805599453),
("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Naraka Chaturdashi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("yyyyqq",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("beginning of month",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("next <time>",
Classifier{okData =
ClassData{prior = -0.1823215567939546,
unseen = -3.4339872044851463,
likelihoods =
HashMap.fromList
[("Martin Luther King's Day", -2.0149030205422647),
("Halloween", -2.70805020110221),
("Boss's Day", -2.70805020110221),
("Monday", -2.3025850929940455), ("day", -1.0986122886681098),
("March", -2.70805020110221), ("month", -2.70805020110221),
("Tuesday", -2.3025850929940455)],
n = 10},
koData =
ClassData{prior = -1.791759469228055, unseen = -2.70805020110221,
likelihoods =
HashMap.fromList
[("Wednesday", -1.9459101490553135),
("Saturday", -1.9459101490553135), ("day", -1.540445040947149)],
n = 2}}),
("<time-of-day> sharp|exactly",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.890371757896165,
likelihoods =
HashMap.fromList
[("at <time-of-day>", -2.1400661634962708),
("time-of-day (latent)", -2.1400661634962708),
("hhmm (latent)", -2.1400661634962708),
("<time-of-day> am|pm", -2.1400661634962708),
("hh:mm", -2.1400661634962708), ("hour", -1.7346010553881064),
("minute", -1.4469189829363254)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> closest <day> to <time>",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)Monday<named-month>|<named-day> <day-of-month> (ordinal)",
-1.791759469228055),
("ordinals (first..twentieth,thirtieth,...)Christmastoday",
-1.791759469228055),
("dayday", -1.0986122886681098),
("ordinal (digits)Christmastoday", -1.791759469228055)],
n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -2.1972245773362196,
likelihoods =
HashMap.fromList
[("daymonth", -1.3862943611198906),
("ordinals (first..twentieth,thirtieth,...)MondayOctober",
-1.3862943611198906)],
n = 1}}),
("Islamic New Year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Lent",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("negative numbers",
Classifier{okData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0},
koData =
ClassData{prior = 0.0, unseen = -3.713572066704308,
likelihoods =
HashMap.fromList
[("integer (numeric)", -5.129329438755058e-2),
("integer (0..19)", -2.995732273553991)],
n = 38}}),
("about|exactly <duration>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("half a <time-grain>", -0.6931471805599453),
("minute", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("Purim",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("<time> before last|after next",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("Wednesday", -1.8718021769015913),
("Friday", -1.466337068793427), ("day", -1.1786549963416462),
("March", -1.8718021769015913), ("month", -1.8718021769015913)],
n = 4},
koData =
ClassData{prior = -infinity, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [], n = 0}}),
("by the end of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("this|last|next <cycle>", -0.8109302162163288),
("year", -1.5040773967762742), ("month", -1.0986122886681098)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("half an hour (abbrev).",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("hhmm (military) am|pm",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<datetime> - <datetime> (interval)",
Classifier{okData =
ClassData{prior = -1.1631508098056809, unseen = -4.770684624465665,
likelihoods =
HashMap.fromList
[("intersecthh:mm", -3.6635616461296463),
("from|since|after <time>hh:mm", -3.152736022363656),
("minuteminute", -1.5841201044498106),
("<time> timezone<time> timezone", -3.3758795736778655),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal)",
-3.6635616461296463),
("hh:mmhh:mm", -2.5649493574615367),
("dayday", -2.5649493574615367),
("hhhmmhhhmm", -3.152736022363656),
("hourhour", -3.6635616461296463),
("<named-month> <day-of-month> (non ordinal)<named-month> <day-of-month> (non ordinal)",
-3.6635616461296463),
("intersect by \",\", \"of\", \"from\", \"'s\"hh:mm",
-3.6635616461296463),
("<time-of-day> am|pmhh:mm:ss", -4.069026754237811),
("hoursecond", -3.6635616461296463),
("<time-of-day> am|pm<time-of-day> am|pm", -3.3758795736778655),
("from|since|after <time>the <day-of-month> (ordinal)",
-3.152736022363656)],
n = 35},
koData =
ClassData{prior = -0.3746934494414107, unseen = -5.308267697401205,
likelihoods =
HashMap.fromList
[("hhhmm<time> timezone", -4.204692619390966),
("<time> timezonehhhmm", -4.204692619390966),
("hourday", -3.00071981506503),
("<named-month> <day-of-month> (non ordinal)July",
-4.61015772749913),
("dayhour", -4.61015772749913),
("daymonth", -3.6938669956249752),
("from|since|after <time><day-of-month> (ordinal or number) of <month>",
-3.6938669956249752),
("<time-of-day> am|pmintersect", -4.61015772749913),
("MondayOctober", -4.204692619390966),
("from|since|after <time>hh:mm", -4.61015772749913),
("hh:mm<time-of-day> am|pm", -3.5115454388310208),
("hhhmmintersect", -4.61015772749913),
("minuteminute", -2.167810692129926),
("from|since|after <time><time-of-day> am|pm",
-3.917010546939185),
("hh:mmhh:mm", -3.917010546939185),
("dayday", -1.8067973465925955),
("from|since|after <time><day-of-month> (ordinal or number) <named-month>",
-3.6938669956249752),
("hhhmmhhhmm", -4.204692619390966),
("hourhour", -3.6938669956249752),
("hourminute", -4.61015772749913),
("minutehour", -3.6938669956249752),
("<time> timezonehh:mm", -4.61015772749913),
("hh:mm<time> timezone", -4.61015772749913),
("the <day-of-month> (ordinal)intersect by \",\", \"of\", \"from\", \"'s\"",
-4.61015772749913),
("from|since|after <time><day-of-month> (ordinal or number) of <named-month>",
-3.6938669956249752),
("hh:mmintersect", -3.6938669956249752),
("<named-month> <day-of-month> (non ordinal)August",
-4.61015772749913),
("Christmastoday", -3.917010546939185),
("about|exactly <time-of-day><time-of-day> am|pm",
-4.61015772749913),
("from|since|after <time>intersect", -3.1060803307228566),
("from|since|after <time>intersect by \",\", \"of\", \"from\", \"'s\"",
-3.6938669956249752),
("Monday<named-month>|<named-day> <day-of-month> (ordinal)",
-4.204692619390966),
("<time-of-day> am|pmhh:mm", -4.61015772749913),
("from|since|after <time>the <day-of-month> (ordinal or number) of <named-month>",
-3.1060803307228566),
("the <day-of-month> (ordinal)<day-of-month> (ordinal or number) <named-month>",
-4.61015772749913),
("from|since|after <time>the <day-of-month> (ordinal)",
-4.61015772749913),
("the <day-of-month> (ordinal)the <day-of-month> (ordinal or number) of <named-month>",
-4.204692619390966),
("Monday<named-month> <day-of-month> (non ordinal)",
-4.61015772749913),
("the <day-of-month> (ordinal)intersect", -4.204692619390966)],
n = 77}}),
("Tuesday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.772588722239781,
likelihoods = HashMap.fromList [("", 0.0)], n = 14},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("New Year's Day",
Classifier{okData =
ClassData{prior = -1.8718021769015913,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.1670540846631662,
unseen = -2.5649493574615367,
likelihoods = HashMap.fromList [("", 0.0)], n = 11}}),
("fortnight",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<integer> and an half hour",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("integer (numeric)", 0.0)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Laylat al-Qadr",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Boghi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("at the beginning|end of <named-month>",
Classifier{okData =
ClassData{prior = -0.2231435513142097,
unseen = -2.5649493574615367,
likelihoods =
HashMap.fromList
[("January", -1.3862943611198906),
("April", -1.3862943611198906), ("month", -0.8754687373538999)],
n = 4},
koData =
ClassData{prior = -1.6094379124341003,
unseen = -1.9459101490553135,
likelihoods =
HashMap.fromList
[("October", -1.0986122886681098),
("month", -1.0986122886681098)],
n = 1}}),
("<time-of-day> - <time-of-day> (interval)",
Classifier{okData =
ClassData{prior = -1.116469906068116, unseen = -4.672828834461907,
likelihoods =
HashMap.fromList
[("from|since|after <time>hh:mm", -3.054001181677967),
("hh:mmtime-of-day (latent)", -3.2771447329921766),
("minuteminute", -1.667706820558076),
("<time> timezone<time> timezone", -3.2771447329921766),
("from|since|after <time><time-of-day> am|pm",
-3.970291913552122),
("hh:mmhh:mm", -2.466214516775848),
("<time-of-day> am|pmtime-of-day (latent)",
-3.2771447329921766),
("hhhmmhhhmm", -3.054001181677967),
("hourhour", -2.466214516775848),
("minutehour", -2.466214516775848),
("<time-of-day> am|pmhh:mm:ss", -3.970291913552122),
("hhhmmtime-of-day (latent)", -3.2771447329921766),
("hoursecond", -3.5648268054439574),
("from|since|after <time>time-of-day (latent)",
-3.2771447329921766),
("<time-of-day> am|pm<time-of-day> am|pm", -3.054001181677967)],
n = 37},
koData =
ClassData{prior = -0.3966544784260094, unseen = -5.220355825078324,
likelihoods =
HashMap.fromList
[("hhhmm<time> timezone", -4.116323468940876),
("<time> timezonehhhmm", -4.116323468940876),
("about|exactly <time-of-day>time-of-day (latent)",
-4.5217885770490405),
("until <time>time-of-day (latent)", -3.2690256085536724),
("from|since|after <time>hh:mm", -4.5217885770490405),
("hh:mmtime-of-day (latent)", -2.5068855565067754),
("hh:mm<time-of-day> am|pm", -3.2690256085536724),
("minuteminute", -2.4423470353692043),
("from|since|after <time><time-of-day> am|pm",
-3.6054978451748854),
("hh:mmhh:mm", -3.828641396489095),
("<time-of-day> am|pmtime-of-day (latent)", -4.116323468940876),
("hhhmmhhhmm", -4.116323468940876),
("hourhour", -2.03688192726104),
("from|since|after <time><integer> to|till|before <hour-of-day>",
-3.828641396489095),
("hourminute", -3.4231762883809305),
("minutehour", -1.6885752329928243),
("<time> timezonehh:mm", -4.5217885770490405),
("hh:mm<time> timezone", -4.5217885770490405),
("hhhmmtime-of-day (latent)", -3.828641396489095),
("until <time><time-of-day> am|pm", -4.116323468940876),
("about|exactly <time-of-day><time-of-day> am|pm",
-4.5217885770490405),
("<time-of-day> am|pmhh:mm", -4.5217885770490405),
("<part-of-day> at <time-of-day>time-of-day (latent)",
-3.828641396489095),
("from|since|after <time>time-of-day (latent)",
-3.0177111802727663),
("at <time-of-day>time-of-day (latent)", -3.828641396489095),
("<time> timezonetime-of-day (latent)", -3.828641396489095),
("<integer> to|till|before <hour-of-day><time-of-day> am|pm",
-4.5217885770490405),
("<integer> to|till|before <hour-of-day>time-of-day (latent)",
-3.828641396489095)],
n = 76}}),
("winter",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("nth <time> after <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("dayday", -0.916290731874155),
("ordinals (first..twentieth,thirtieth,...)Tuesdayintersect",
-0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("dayday", -0.916290731874155),
("ordinals (first..twentieth,thirtieth,...)TuesdayChristmas",
-0.916290731874155)],
n = 1}}),
("Ugadi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<named-month> <day-of-month> (non ordinal)",
Classifier{okData =
ClassData{prior = -0.3646431135879093, unseen = -4.0943445622221,
likelihoods =
HashMap.fromList
[("Augustinteger (numeric)", -2.691243082785829),
("Marchinteger (numeric)", -2.9789251552376097),
("Aprilinteger (numeric)", -3.3843902633457743),
("month", -0.8194409058842375),
("Februaryinteger (numeric)", -2.1316272948504063),
("Septemberinteger (numeric)", -2.691243082785829),
("Octoberinteger (numeric)", -2.691243082785829),
("Julyinteger (numeric)", -1.9980959022258835)],
n = 25},
koData =
ClassData{prior = -1.1856236656577395,
unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("Augustinteger (numeric)", -2.3353749158170367),
("Marchinteger (numeric)", -2.740840023925201),
("Aprilinteger (numeric)", -2.740840023925201),
("month", -0.9490805546971459),
("from|since|after <time>integer (numeric)",
-2.3353749158170367),
("Julyinteger (numeric)", -1.6422277352570913)],
n = 11}}),
("Diwali",
Classifier{okData =
ClassData{prior = -0.2876820724517809,
unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -1.3862943611198906,
unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1}}),
("last night",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this|next <day-of-week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("Thursday", -2.3978952727983707),
("Wednesday", -2.3978952727983707),
("Saturday", -2.3978952727983707),
("Monday", -1.7047480922384253), ("day", -0.8938178760220964),
("Tuesday", -1.9924301646902063)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("ordinal (digits)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.143134726391533,
likelihoods = HashMap.fromList [("", 0.0)], n = 61},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("quarter (grain)",
Classifier{okData =
ClassData{prior = -0.4700036292457356,
unseen = -3.0910424533583156,
likelihoods = HashMap.fromList [("", 0.0)], n = 20},
koData =
ClassData{prior = -0.9808292530117262, unseen = -2.639057329615259,
likelihoods = HashMap.fromList [("", 0.0)], n = 12}}),
("last <cycle> of <time>",
Classifier{okData =
ClassData{prior = -0.916290731874155, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("day (grain)October", -1.9924301646902063),
("daymonth", -1.4816045409242156),
("day (grain)intersect", -1.9924301646902063),
("weekmonth", -1.9924301646902063),
("week (grain)intersect", -2.3978952727983707),
("week (grain)September", -2.3978952727983707)],
n = 6},
koData =
ClassData{prior = -0.5108256237659907, unseen = -3.367295829986474,
likelihoods =
HashMap.fromList
[("daymonth", -2.2335922215070942),
("day (grain)May", -2.2335922215070942),
("week (grain)year (latent)", -1.9459101490553135),
("weekmonth", -1.7227665977411035),
("week (grain)October", -2.2335922215070942),
("weekyear", -1.9459101490553135),
("week (grain)intersect", -2.2335922215070942)],
n = 9}}),
("Chhath",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Vasant Panchami",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<day-of-month>(ordinal) <named-month> year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList
[("ordinal (digits)April", -0.6931471805599453),
("month", -0.6931471805599453)],
n = 1},
koData =
ClassData{prior = -infinity, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [], n = 0}}),
("the <ordinal> last <cycle> of <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("ordinals (first..twentieth,thirtieth,...)week (grain)year (latent)",
-2.1972245773362196),
("daymonth", -2.1972245773362196),
("ordinal (digits)day (grain)May", -2.1972245773362196),
("ordinals (first..twentieth,thirtieth,...)week (grain)intersect",
-2.1972245773362196),
("weekmonth", -1.791759469228055),
("ordinal (digits)week (grain)year (latent)",
-2.1972245773362196),
("weekyear", -1.791759469228055),
("ordinals (first..twentieth,thirtieth,...)week (grain)October",
-2.1972245773362196)],
n = 5},
koData =
ClassData{prior = -infinity, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [], n = 0}}),
("Black Friday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("week-end",
Classifier{okData =
ClassData{prior = -1.252762968495368, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -0.3364722366212129,
unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [("", 0.0)], n = 5}}),
("Great Lent",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Maundy Thursday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("day (grain)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.6888794541139363,
likelihoods = HashMap.fromList [("", 0.0)], n = 38},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Vaisakhi",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.791759469228055,
likelihoods = HashMap.fromList [("", 0.0)], n = 4},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("right now",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("for <duration> from <time>",
Classifier{okData =
ClassData{prior = -0.3364722366212129, unseen = -2.995732273553991,
likelihoods =
HashMap.fromList
[("<integer> <unit-of-duration>intersect", -2.2512917986064953),
("<integer> <unit-of-duration><day-of-month> (ordinal)",
-2.2512917986064953),
("<integer> + '\"from|since|after <time>", -2.2512917986064953),
("dayday", -1.55814461804655),
("minutehour", -1.845826690498331),
("<integer> + '\"<time-of-day> am|pm", -2.2512917986064953),
("<integer> <unit-of-duration><day-of-month> (ordinal or number) <named-month>",
-2.2512917986064953)],
n = 5},
koData =
ClassData{prior = -1.252762968495368, unseen = -2.639057329615259,
likelihoods =
HashMap.fromList
[("dayhour", -1.8718021769015913),
("<integer> + '\"from|since|after <time>", -1.8718021769015913),
("<integer> <unit-of-duration>time-of-day (latent)",
-1.8718021769015913),
("minutehour", -1.8718021769015913)],
n = 2}}),
("compose by multiplication",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods =
HashMap.fromList [("integer (0..19)powers of tens", 0.0)],
n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("end of year",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.4849066497880004,
likelihoods = HashMap.fromList [("", 0.0)], n = 10},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("between <time> and <time>",
Classifier{okData =
ClassData{prior = -0.6931471805599453,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("minuteminute", -1.791759469228055),
("hh:mmhh:mm", -2.1972245773362196),
("<time-of-day> am|pmtime-of-day (latent)",
-2.1972245773362196),
("hhhmmhhhmm", -2.1972245773362196),
("minutehour", -1.791759469228055),
("<time-of-day> am|pm<time-of-day> am|pm",
-2.1972245773362196)],
n = 4},
koData =
ClassData{prior = -0.6931471805599453,
unseen = -2.9444389791664407,
likelihoods =
HashMap.fromList
[("hh:mmtime-of-day (latent)", -2.1972245773362196),
("hhhmmintersect", -2.1972245773362196),
("minuteminute", -1.791759469228055),
("minutehour", -1.791759469228055),
("hh:mmintersect", -2.1972245773362196),
("hhhmmtime-of-day (latent)", -2.1972245773362196)],
n = 4}}),
("<month> dd-dd (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.4657359027997265,
likelihoods =
HashMap.fromList
[("from|since|after <time>integer (numeric)integer (numeric)",
-2.740840023925201),
("from|since|after <time>ordinal (digits)ordinal (digits)",
-2.740840023925201),
("Julyinteger (numeric)integer (numeric)", -1.6422277352570913),
("Augustordinal (digits)integer (numeric)",
-2.3353749158170367),
("from|since|after <time>ordinal (digits)integer (numeric)",
-2.740840023925201),
("month", -0.8690378470236094),
("Augustordinal (digits)ordinal (digits)",
-2.3353749158170367)],
n = 12},
koData =
ClassData{prior = -infinity, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [], n = 0}}),
("intersect by \",\", \"of\", \"from\" for year",
Classifier{okData =
ClassData{prior = -7.410797215372185e-2,
unseen = -3.5263605246161616,
likelihoods =
HashMap.fromList
[("Black Fridaythis|last|next <cycle>", -2.803360380906535),
("Martin Luther King's Daythis|last|next <cycle>",
-2.3978952727983707),
("intersect by \",\", \"of\", \"from\", \"'s\"year (latent)",
-2.1102132003465894),
("dayyear", -0.8574502318512216),
("intersectyear (latent)", -2.1102132003465894),
("<named-month> <day-of-month> (non ordinal)year (latent)",
-1.8870696490323797)],
n = 13},
koData =
ClassData{prior = -2.639057329615259, unseen = -2.3025850929940455,
likelihoods =
HashMap.fromList
[("Fridaythis|last|next <cycle>", -1.5040773967762742),
("dayyear", -1.5040773967762742)],
n = 1}}),
("part of days",
Classifier{okData =
ClassData{prior = -4.1672696400568074e-2,
unseen = -3.891820298110627,
likelihoods = HashMap.fromList [("", 0.0)], n = 47},
koData =
ClassData{prior = -3.1986731175506815,
unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2}}),
("at the beginning|end of <week>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -4.330733340286331,
likelihoods =
HashMap.fromList
[("week", -0.706570200892086),
("this|last|next <cycle>", -0.8209805520698302),
("about|exactly <time-of-day>", -2.70805020110221)],
n = 36},
koData =
ClassData{prior = -infinity, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [], n = 0}}),
("Eid al-Fitr",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.1972245773362196,
likelihoods = HashMap.fromList [("", 0.0)], n = 7},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("summer",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("Trinity Sunday",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("<datetime>/<datetime> (interval)",
Classifier{okData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("secondsecond", -0.916290731874155),
("intersectintersect", -0.916290731874155)],
n = 1},
koData =
ClassData{prior = -0.6931471805599453, unseen = -1.791759469228055,
likelihoods =
HashMap.fromList
[("hh:mm:ssintersect", -0.916290731874155),
("secondsecond", -0.916290731874155)],
n = 1}}),
("Mid-day",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.6094379124341003,
likelihoods = HashMap.fromList [("", 0.0)], n = 3},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("dd-dd <month> (interval)",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.1354942159291497,
likelihoods =
HashMap.fromList
[("integer (numeric)integer (numeric)September",
-1.9924301646902063),
("ordinal (digits)ordinal (digits)July", -1.9924301646902063),
("ordinal (digits)ordinal (digits)October",
-2.3978952727983707),
("integer (numeric)integer (numeric)July", -1.9924301646902063),
("month", -0.8938178760220964),
("ordinal (digits)ordinal (digits)August",
-2.3978952727983707)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -1.9459101490553135,
likelihoods = HashMap.fromList [], n = 0}}),
("Pentecost",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.3862943611198906,
likelihoods = HashMap.fromList [("", 0.0)], n = 2},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("this <time>",
Classifier{okData =
ClassData{prior = 0.0, unseen = -3.258096538021482,
likelihoods =
HashMap.fromList
[("Thursday", -2.5257286443082556),
("Martin Luther King's Day", -2.5257286443082556),
("Monday", -2.5257286443082556), ("day", -1.1394342831883648),
("Christmas", -2.5257286443082556),
("hour", -2.5257286443082556), ("winter", -2.5257286443082556),
("week-end", -2.5257286443082556),
("summer", -2.120263536200091)],
n = 8},
koData =
ClassData{prior = -infinity, unseen = -2.3025850929940455,
likelihoods = HashMap.fromList [], n = 0}}),
("Shushan Purim",
Classifier{okData =
ClassData{prior = 0.0, unseen = -1.0986122886681098,
likelihoods = HashMap.fromList [("", 0.0)], n = 1},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}}),
("August",
Classifier{okData =
ClassData{prior = 0.0, unseen = -2.0794415416798357,
likelihoods = HashMap.fromList [("", 0.0)], n = 6},
koData =
ClassData{prior = -infinity, unseen = -0.6931471805599453,
likelihoods = HashMap.fromList [], n = 0}})]
|
facebookincubator/duckling
|
Duckling/Ranking/Classifiers/EN_TT.hs
|
Haskell
|
bsd-3-clause
| 264,531
|
module Dxedrine.Hlists where
import Control.Monad (forM_, replicateM_)
import Dxedrine.Words
import Data.Binary
import Data.Binary.Get
import Data.Binary.Put
import qualified Data.ByteString.Lazy as BL
import Data.Maybe (fromMaybe)
import Data.Word (Word8(..), Word16(..))
data Range =
IgnoreR Int
| OneR Word8 Word8
| TwoR Range Range
| EnumR [Word8]
| MultiR Range Range
deriving (Show, Eq)
data Value =
IgnoreV Int
| OneV Word7
| TwoV Word14
deriving (Show, Eq)
data Entry = Entry
{ _entryName :: String
, _entryRange :: Range
, _entryDefault :: Value
} deriving (Show, Eq)
newtype Hlist = Hlist
{ unHlist :: [(String, Value)]
} deriving (Show, Eq)
validate :: Range -> Value -> Either String ()
validate r v =
case (r, v) of
(OneR s e, OneV w@(Word7 x)) ->
if (x >= s && x <= e)
then return ()
else Left $ show x ++ " outside range [" ++ show s ++ ", " ++ show e ++ "]"
(TwoR e1 e2, TwoV w@(Word14 (x, y))) -> do
_ <- validate e1 (OneV x)
_ <- validate e2 (OneV y)
return ()
(EnumR vals, OneV w@(Word7 x)) ->
if (x `elem` vals)
then return ()
else Left $ show x ++ " not an element of " ++ show vals
(MultiR e1 e2, OneV w@(Word7 x)) -> do
case validate e1 v of
Right _ -> return ()
Left r1 ->
case validate e2 v of
Right _ -> return ()
Left r2 -> Left $ "both " ++ r1 ++ " and " ++ r2
(IgnoreR i, IgnoreV j) ->
if i == j
then return ()
else Left $ "Unmatched ignore lengths: expected " ++ show i ++ " but was " ++ show j
_ -> Left "wrong byte length"
validateHlist :: [Entry] -> Hlist -> Either String ()
validateHlist es (Hlist hs) = go es
where
go [] = return ()
go (e:es) =
let r = _entryRange e
in case r of
IgnoreR _ -> go es
_ -> let n = _entryName e
in case lookup n hs of
Nothing -> Left $ "field \"" ++ n ++ "\" missing"
Just v -> do
case validate (_entryRange e) v of
Left reason -> Left $ "field \"" ++ n ++ "\" invalid: " ++ reason
_ -> go es
addDefaults :: [Entry] -> Hlist -> Hlist
addDefaults es (Hlist hs) = Hlist $ go es hs
where
go [] hs = hs
go (e:es) hs =
case (_entryRange e) of
IgnoreR i -> go es hs
_ -> let n = _entryName e
in (n, fromMaybe (_entryDefault e) (lookup n hs)):(go es hs)
defaultHlist :: [Entry] -> Hlist
defaultHlist es = addDefaults es (Hlist [])
getValue :: Entry -> Get Value
getValue e =
let r = _entryRange e
in case r of
IgnoreR i -> do
replicateM_ i getWord8
return (IgnoreV i)
TwoR _ _ -> do
w <- getWord14
let v = TwoV w
case validate r v of
Right _ -> return v
Left reason -> fail reason
_ -> do
w <- getWord7
let v = OneV w
case validate r v of
Right _ -> return v
Left reason -> fail reason
putValue :: Value -> Put
putValue v =
case v of
IgnoreV i -> replicateM_ i $ putWord8 0x00
OneV v -> putWord7 v
TwoV v -> putWord14 v
getHlist :: [Entry] -> Get Hlist
getHlist es = Hlist . reverse <$> go [] es
where
go hs [] = return hs
go hs (e:es) = do
h <- getValue e
let n = _entryName e
go ((n, h):hs) es
putHlist :: Hlist -> Put
putHlist (Hlist hs) = forM_ hs (\(_, h) -> putValue h)
packValue :: Entry -> Value -> Either String [Word7]
packValue e v =
let r = _entryRange e
in case validate r v of
Left reason -> Left reason
_ -> Right $ Word7 <$> (BL.unpack $ runPut $ putValue v)
packHlist :: [Entry] -> Hlist -> Either String [Word7]
packHlist entries hlist = do
_ <- validateHlist entries hlist
return $ Word7 <$> (BL.unpack $ runPut $ putHlist hlist)
unpackHlist :: [Entry] -> [Word7] -> Either String (Hlist, [Word7])
unpackHlist es ws =
unpack $ runGetOrFail (getHlist es) (BL.pack $ unWord7 <$> ws)
where
unpack (Left (_, _, e)) = Left e
unpack (Right (left, _, h)) = Right (nonIgnored h, Word7 <$> BL.unpack left)
nonIgnored (Hlist hs) = Hlist $ filter (\(_, h) -> shouldKeep h) hs
shouldKeep (IgnoreV _) = False
shouldKeep _ = True
reserved :: Int -> Entry
reserved i = Entry "reserved" (IgnoreR i) (IgnoreV i)
entry :: Range -> Value -> String -> Entry
entry range value name = Entry name range value
oneV :: Word8 -> Value
oneV = OneV . word7FromIntegral
twoV :: Word16 -> Value
twoV = TwoV . word14FromIntegral
|
ejconlon/dxedrine
|
src/Dxedrine/Hlists.hs
|
Haskell
|
bsd-3-clause
| 4,539
|
module Tct.Trs.Data.Precedence
( Order (..)
, Precedence (..)
, precedence
, empty
, insert
, eclasses
, recursionDepth
, ranks
) where
import qualified Control.Monad.State.Strict as St
import Data.List (find)
import qualified Data.Map as M
import Data.Maybe (fromMaybe)
import qualified Data.Set as S
import qualified Tct.Core.Common.Pretty as PP
import Tct.Trs.Data.Signature (Signature, Symbols, symbols)
data Order b = b :>: b | b :~: b
deriving (Show, Eq, Ord)
newtype Precedence f = Precedence (Signature f, [Order f]) deriving Show
instance PP.Pretty f => PP.Pretty (Precedence f) where
pretty (Precedence (_, [])) = PP.text "empty"
pretty (Precedence (_,l)) = PP.hsep $ PP.punctuate (PP.text ",") [pp e | e <- l] where
pp (f :>: g) = PP.pretty f PP.<+> PP.text ">" PP.<+> PP.pretty g
pp (f :~: g) = PP.pretty f PP.<+> PP.text "~" PP.<+> PP.pretty g
precedence :: Signature f -> [Order f] -> Precedence f
precedence = curry Precedence
empty :: Signature f -> Precedence f
empty sig = precedence sig []
insert :: Order f -> Precedence f -> Precedence f
insert e (Precedence (sig, l)) = Precedence (sig, e : l)
eclasses :: Ord f => Precedence f -> [Symbols f]
eclasses (Precedence (_, l)) = foldr ins [] l
where
ins (g :~: h) [] = [S.fromList [g,h]]
ins eq@(g :~: h) (ec:ecs)
| g `S.member` ec = h `S.insert` ec : ecs
| h `S.member` ec = g `S.insert` ec : ecs
| otherwise = ec : ins eq ecs
ins _ ecs = ecs
recursionDepth :: Ord f => Symbols f -> Precedence f -> M.Map f Int
recursionDepth recursives prec@(Precedence (sig, l)) = St.execState (mapM_ recdepthM syms) M.empty
where
ecss = eclasses prec
eclassOf f = S.singleton f `fromMaybe` find (\ cs -> f `S.member` cs) ecss
syms = S.toList $ symbols sig
below f = S.toList $ S.unions [ eclassOf h | f' :>: h <- l , f == f' ]
recdepthM f = do
m <- St.get
case M.lookup f m of
Just rd -> return rd
Nothing -> do
rds <- mapM recdepthM (below f)
let rd | f `S.member` recursives = 1 + maximum (0:rds)
| otherwise = maximum (0:rds)
St.modify (M.insert f rd)
return rd
-- | ranks of function symbols in precedence, starting at '1'
ranks :: Ord f => Precedence f -> M.Map f Int
ranks prec@(Precedence(sig,_)) = recursionDepth (symbols sig) prec
|
ComputationWithBoundedResources/tct-trs
|
src/Tct/Trs/Data/Precedence.hs
|
Haskell
|
bsd-3-clause
| 2,563
|
{-# LANGUAGE GeneralizedNewtypeDeriving, DeriveDataTypeable, DeriveFunctor, OverloadedStrings, PatternGuards #-}
-- | Types used to generate the input.
module Input.Item(
Sig(..), Ctx(..), Ty(..),
Item(..), itemName,
Target(..), TargetId(..),
splitIPackage, splitIModule,
hseToSig, hseToItem
) where
import Numeric
import Control.Applicative
import Data.Tuple.Extra
import Language.Haskell.Exts
import Data.List.Extra
import Data.Maybe
import Data.Ix
import Foreign.Storable
import Data.Word
import Control.DeepSeq
import Data.Data
import General.Util
import General.IString
import Prelude
---------------------------------------------------------------------
-- TYPES
-- FIXME: Delete the Read instances
data Sig n = Sig [Ctx n] [Ty n] deriving (Show,Eq,Ord,Typeable,Data,Functor,Read) -- list of -> types
data Ctx n = Ctx n n deriving (Show,Eq,Ord,Typeable,Data,Functor,Read) -- context, second will usually be a free variable
data Ty n = TCon n [Ty n] | TVar n [Ty n] deriving (Show,Eq,Ord,Typeable,Data,Functor,Read) -- type application, vectorised, all symbols may occur at multiple kinds
instance NFData n => NFData (Sig n) where rnf (Sig x y) = rnf x `seq` rnf y
instance NFData n => NFData (Ctx n) where rnf (Ctx x y) = rnf x `seq` rnf y
instance NFData n => NFData (Ty n) where
rnf (TCon x y) = rnf x `seq` rnf y
rnf (TVar x y) = rnf x `seq` rnf y
---------------------------------------------------------------------
-- ITEMS
data Item
= IPackage String
| IModule String
| IName String -- class or newtype
| ISignature String (Sig IString)
| IAlias String [IString] (Sig IString)
| IInstance (Sig IString)
deriving (Show,Eq,Ord,Typeable,Data)
instance NFData Item where
rnf (IPackage x) = rnf x
rnf (IModule x) = rnf x
rnf (IName x) = rnf x
rnf (ISignature a b) = rnf (a,b)
rnf (IAlias a b c) = rnf (a,b,c)
rnf (IInstance a) = rnf a
itemName :: Item -> Maybe String
itemName (IPackage x) = Just x
itemName (IModule x) = Just x
itemName (IName x) = Just x
itemName (ISignature x _) = Just x
itemName (IAlias x _ _) = Just x
itemName (IInstance _) = Nothing
---------------------------------------------------------------------
-- DATABASE
newtype TargetId = TargetId Word32 deriving (Eq,Ord,Storable,NFData,Ix)
instance Show TargetId where
show (TargetId x) = showHex x ""
instance Read TargetId where
readsPrec _ = map (first TargetId) . readHex
data Target = Target
{targetURL :: URL -- URL where this thing is located
,targetPackage :: Maybe (String, URL) -- name and URL of the package it is in (Nothing if it is a package)
,targetModule :: Maybe (String, URL) -- name and URL of the module it is in (Nothing if it is a package or module)
,targetType :: String -- one of package, module or empty string
,targetItem :: String -- HTML span of the item, using <0> for the name and <1> onwards for arguments
,targetDocs :: String -- HTML documentation to show, a sequence of block level elements
} deriving (Show,Eq,Ord)
instance NFData Target where
rnf (Target a b c d e f) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d `seq` rnf e `seq` rnf f
splitIPackage, splitIModule :: [(a, Item)] -> [(String, [(a, Item)])]
splitIPackage = splitUsing $ \x -> case snd x of IPackage x -> Just x; _ -> Nothing
splitIModule = splitUsing $ \x -> case snd x of IModule x -> Just x; _ -> Nothing
splitUsing :: (a -> Maybe String) -> [a] -> [(String, [a])]
splitUsing f = repeatedly $ \(x:xs) ->
let (a,b) = break (isJust . f) xs
in ((fromMaybe "" $ f x, x:a), b)
---------------------------------------------------------------------
-- HSE CONVERSION
hseToSig :: Type -> Sig String
hseToSig = tyForall
where
-- forall at the top is different
tyForall (TyParen x) = tyForall x
tyForall (TyForall _ c t) | Sig cs ts <- tyForall t = Sig (concatMap ctx c ++ cs) ts
tyForall x = Sig [] $ tyFun x
tyFun (TyParen x) = tyFun x
tyFun (TyFun a b) = ty a : tyFun b
tyFun x = [ty x]
ty (TyForall _ _ x) = TCon "\\/" [ty x]
ty x@TyFun{} = TCon "->" $ tyFun x
ty (TyTuple box ts) = TCon (fromQName $ Special $ TupleCon box $ length ts) (map ty ts)
ty (TyList x) = TCon "[]" [ty x]
ty (TyParArray x) = TCon "[::]" [ty x]
ty (TyApp x y) = case ty x of
TCon a b -> TCon a (b ++ [ty y])
TVar a b -> TVar a (b ++ [ty y])
ty (TyVar x) = TVar (fromName x) []
ty (TyCon x) = TCon (fromQName x) []
ty (TyInfix a b c) = ty $ TyCon b `TyApp` a `TyApp` c
ty (TyKind x _) = ty x
ty (TyBang _ x) = ty x
ty (TyParen x) = ty x
ty _ = TVar "_" []
ctx (ParenA x) = ctx x
ctx (InfixA a con b) = ctx $ ClassA con [a,b]
ctx (ClassA con (TyVar var:_)) = [Ctx (fromQName con) (fromName var)]
ctx _ = []
hseToItem :: Decl -> Maybe Item
hseToItem (TypeSig _ [name] ty) = Just $ ISignature (fromName name) (toIString <$> hseToSig ty)
hseToItem (TypeDecl _ name bind rhs) = Just $ IAlias (fromName name) (map (toIString . fromName . fromTyVarBind) bind) (toIString <$> hseToSig rhs)
hseToItem (InstDecl _ _ _ ctx name args _) = Just $ IInstance $ fmap toIString $ hseToSig $ TyForall Nothing ctx $ tyApps (TyCon name) args
hseToItem x | [x] <- declNames x = Just $ IName x
hseToItem x = Nothing
|
BartAdv/hoogle
|
src/Input/Item.hs
|
Haskell
|
bsd-3-clause
| 5,430
|
{-# LANGUAGE
TemplateHaskell
, QuasiQuotes
#-}
module Language.Haskell.TH.HDBI
(
deriveToRow
, deriveFromRow
) where
-- import Control.Applicative
import Control.Monad
import Control.Applicative
import Database.HDBI.SqlValue (ToRow(..),
FromRow(..),
FromSql(..),
ToSql(..),
ConvertError(..))
import Language.Haskell.TH
-- | return constructor name and fields count, or Nothing if data constructor is
-- infix
getTParams :: String -> Name -> Q (Name, Maybe Int)
getTParams exc name = do
tcon <- reify name
case tcon of
(TyConI dec) -> do
case dec of
(DataD _ _ vars constrs _) -> do
checkVars vars
case constrs of
[con] -> getTParams' con
_ -> fl $ "data " ++ (show name) ++ " should have exactly one constructor"
(NewtypeD _ _ vars con _) -> do
checkVars vars
getTParams' con
_ -> fl $ "deriveToRow can derive just for data with one constructor or for newtypes"
_ -> fl $ (show name) ++ " must be a type"
where
fl x = fail $ exc ++ x
checkVars [] = return ()
checkVars _ = fl $ "type " ++ show name ++ " should not have type variables"
getTParams' :: Con -> Q (Name, Maybe Int)
getTParams' (NormalC n fields) = return (n, Just $ length fields)
getTParams' (RecC n fields) = return (n, Just $ length fields)
getTParams' (InfixC _ n _) = return (n, Nothing)
getTParams' _ = fl $ "data constructors should not contain typevar boundries for " ++ show name
-- | Derive `ToRow` instance for any data with one constructor or for newtype
deriveToRow :: Name -> Q [Dec]
deriveToRow name = do
(con, fields) <- getTParams "deriveToRow: " name
names <- case fields of
Just fl -> replicateM fl $ newName "val"
Nothing -> replicateM 2 $ newName "val"
return [InstanceD [] (AppT (ConT ''ToRow) (ConT name))
[FunD 'toRow
[Clause [mkPattern con fields names]
(NormalB $ ListE $ map (\nm -> AppE (VarE 'toSql) (VarE nm)) names) [] ]]]
where
mkPattern con Nothing [n1, n2] = InfixP (VarP n1) con (VarP n2)
mkPattern con (Just _) names = ConP con $ map VarP names
deriveFromRow :: Name -> Q [Dec]
deriveFromRow name = do
(con, fields) <- getTParams "deriveFromRow: " name
names <- case fields of
Just fl -> replicateM fl $ newName "val"
Nothing -> replicateM 2 $ newName "val"
xname <- newName "x"
return [InstanceD [] (AppT (ConT ''FromRow) (ConT name))
[FunD 'safeFromRow
[Clause [ListP $ map VarP names]
(NormalB $ UInfixE (mkCon fields con) (VarE '(<$>)) (foldedFromSql names)) []
,Clause [VarP xname]
(NormalB $ AppE (ConE 'Left) (AppE (ConE 'ConvertError)
(UInfixE
(LitE $ StringL $ "Could not construct " ++ show name
++ ": query must return exactly "
++ (show $ length names) ++ " values but not " )
(VarE '(++))
(AppE (VarE 'show) (AppE (VarE 'length) (VarE xname)))))) []]]]
where
foldedFromSql names = foldl1 (\a b -> UInfixE a (VarE '(<*>)) b)
$ map (\n -> AppE (VarE 'safeFromSql) (VarE n)) names
mkCon (Just _) con = ConE con
mkCon Nothing con = ParensE $ ConE con
|
s9gf4ult/hdbi
|
Language/Haskell/TH/HDBI.hs
|
Haskell
|
bsd-3-clause
| 3,428
|
{-# LANGUAGE TypeFamilies #-}
{-| This is a dummy backend that doesn't offer any formalism to specify models or verify contracts.
It is only used to provide a backend for testing purporses (Or if you are too lazy to write components). -}
module Language.GTL.Backend.None where
import Language.GTL.Backend
import Data.Map as Map
-- | The none backend data type
data None = None
instance GTLBackend None where
data GTLBackendModel None = NoneData
backendName _ = "none"
initBackend _ _ args = return NoneData
backendGetAliases _ _ = Map.empty
typeCheckInterface _ _ x = return x
cInterface _ _ = CInterface
{ cIFaceIncludes = []
, cIFaceStateType = []
, cIFaceInputType = []
, cIFaceStateInit = const ""
, cIFaceIterate = \_ _ -> ""
, cIFaceGetOutputVar = \_ _ _ -> Just ""
, cIFaceGetInputVar = \_ _ _ -> Just ""
, cIFaceTranslateType = \_ -> ("","",False)
, cIFaceTranslateValue = \_ -> CValue ""
}
backendVerify _ _ _ _ _ _ _ _ _ = return Nothing
|
hguenther/gtl
|
lib/Language/GTL/Backend/None.hs
|
Haskell
|
bsd-3-clause
| 1,009
|
{-|
Module : Idris.Core.Evaluate
Description : Evaluate Idris expressions.
Copyright :
License : BSD3
Maintainer : The Idris Community.
-}
{-# LANGUAGE BangPatterns, DeriveGeneric, FlexibleInstances,
MultiParamTypeClasses, PatternGuards #-}
{-# OPTIONS_GHC -fwarn-incomplete-patterns #-}
module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC,
normaliseAll, normaliseBlocking, toValue, quoteTerm,
rt_simplify, simplify, inlineSmall,
specialise, unfold, convEq, convEq',
Def(..), CaseInfo(..), CaseDefs(..),
Accessibility(..), Injectivity, Totality(..), TTDecl, PReason(..), MetaInformation(..),
Context, initContext, ctxtAlist, next_tvar,
addToCtxt, setAccess, setInjective, setTotal, setRigCount,
setMetaInformation, addCtxtDef, addTyDecl,
addDatatype, addCasedef, simplifyCasedef, addOperator,
lookupNames, lookupTyName, lookupTyNameExact, lookupTy, lookupTyExact,
lookupP, lookupP_all, lookupDef, lookupNameDef, lookupDefExact, lookupDefAcc, lookupDefAccExact, lookupVal,
mapDefCtxt, tcReducible,
lookupTotal, lookupTotalExact, lookupInjectiveExact,
lookupRigCount, lookupRigCountExact,
lookupNameTotal, lookupMetaInformation, lookupTyEnv, isTCDict,
isCanonical, isDConName, canBeDConName, isTConName, isConName, isFnName,
conGuarded,
Value(..), Quote(..), initEval, uniqueNameCtxt, uniqueBindersCtxt, definitions,
isUniverse, linearCheck, linearCheckArg) where
import Idris.Core.CaseTree
import Idris.Core.TT
import Control.Applicative hiding (Const)
import Control.Monad.State
import Data.Binary hiding (get, put)
import qualified Data.Binary as B
import Data.List
import Data.Maybe (listToMaybe)
import Debug.Trace
import GHC.Generics (Generic)
data EvalState = ES { limited :: [(Name, Int)],
nexthole :: Int,
blocking :: Bool }
deriving Show
type Eval a = State EvalState a
data EvalOpt = Spec
| Simplify Bool -- ^ whether to expand lets or not
| AtREPL
| RunTT
| Unfold
deriving (Show, Eq)
initEval = ES [] 0 False
-- VALUES (as HOAS) ---------------------------------------------------------
-- | A HOAS representation of values
data Value = VP NameType Name Value
| VV Int
-- True for Bool indicates safe to reduce
| VBind Bool Name (Binder Value) (Value -> Eval Value)
-- For frozen let bindings when simplifying
| VBLet Int Name Value Value Value
| VApp Value Value
| VType UExp
| VUType Universe
| VErased
| VImpossible
| VConstant Const
| VProj Value Int
-- | VLazy Env [Value] Term
| VTmp Int
canonical :: Value -> Bool
canonical (VP (DCon _ _ _) _ _) = True
canonical (VApp f a) = canonical f
canonical (VConstant _) = True
canonical (VType _) = True
canonical (VUType _) = True
canonical VErased = True
canonical _ = False
instance Show Value where
show x = show $ evalState (quote 100 x) initEval
instance Show (a -> b) where
show x = "<<fn>>"
-- THE EVALUATOR ------------------------------------------------------------
-- The environment is assumed to be "locally named" - i.e., not de Bruijn
-- indexed.
-- i.e. it's an intermediate environment that we have while type checking or
-- while building a proof.
-- | Normalise fully type checked terms (so, assume all names/let bindings resolved)
normaliseC :: Context -> Env -> TT Name -> TT Name
normaliseC ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t []
quote 0 val) initEval
-- | Normalise everything, whether abstract, private or public
normaliseAll :: Context -> Env -> TT Name -> TT Name
normaliseAll ctxt env t
= evalState (do val <- eval False ctxt [] (map finalEntry env) t [AtREPL]
quote 0 val) initEval
-- | As normaliseAll, but with an explicit list of names *not* to reduce
normaliseBlocking :: Context -> Env -> [Name] -> TT Name -> TT Name
normaliseBlocking ctxt env blocked t
= evalState (do val <- eval False ctxt (map (\n -> (n, 0)) blocked)
(map finalEntry env) t [AtREPL]
quote 0 val) initEval
normalise :: Context -> Env -> TT Name -> TT Name
normalise = normaliseTrace False
normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name
normaliseTrace tr ctxt env t
= evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) []
quote 0 val) initEval
toValue :: Context -> Env -> TT Name -> Value
toValue ctxt env t
= evalState (eval False ctxt [] (map finalEntry env) t []) initEval
quoteTerm :: Value -> TT Name
quoteTerm val = evalState (quote 0 val) initEval
-- Return a specialised name, and an updated list of reductions available,
-- so that the caller can tell how much specialisation was achieved.
specialise :: Context -> Env -> [(Name, Int)] -> TT Name ->
(TT Name, [(Name, Int)])
specialise ctxt env limits t
= let (tm, st) =
runState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Spec]
quote 0 val) (initEval { limited = limits }) in
(tm, limited st)
-- | Like normalise, but we only reduce functions that are marked as okay to
-- inline, and lets
simplify :: Context -> Env -> TT Name -> TT Name
simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "assert_smaller", 0),
(sUN "assert_total", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "fork", 0)]
(map finalEntry env) (finalise t)
[Simplify True]
quote 0 val) initEval
-- | Like simplify, but we only reduce functions that are marked as okay to
-- inline, and don't reduce lets
inlineSmall :: Context -> Env -> TT Name -> TT Name
inlineSmall ctxt env t
= evalState (do val <- eval False ctxt []
(map finalEntry env) (finalise t)
[Simplify False]
quote 0 val) initEval
-- | Simplify for run-time (i.e. basic inlining)
rt_simplify :: Context -> Env -> TT Name -> TT Name
rt_simplify ctxt env t
= evalState (do val <- eval False ctxt [(sUN "lazy", 0),
(sUN "force", 0),
(sUN "Force", 0),
(sUN "par", 0),
(sUN "prim__syntactic_eq", 0),
(sUN "prim_fork", 0)]
(map finalEntry env) (finalise t)
[RunTT]
quote 0 val) initEval
-- | Unfold the given names in a term, the given number of times in a stack.
-- Preserves 'let'.
-- This is primarily to support inlining of the given names, and can also
-- help with partial evaluation by allowing a rescursive definition to be
-- unfolded once only.
-- Specifically used to unfold definitions using interfaces before going to
-- the totality checker (otherwise mutually recursive definitions in
-- implementations will not work...)
unfold :: Context -> Env -> [(Name, Int)] -> TT Name -> TT Name
unfold ctxt env ns t
= evalState (do val <- eval False ctxt ns
(map finalEntry env) (finalise t)
[Unfold]
quote 0 val) initEval
-- unbindEnv env (quote 0 (eval ctxt (bindEnv env t)))
finalEntry :: (Name, RigCount, Binder (TT Name)) -> (Name, RigCount, Binder (TT Name))
finalEntry (n, r, b) = (n, r, fmap finalise b)
bindEnv :: EnvTT n -> TT n -> TT n
bindEnv [] tm = tm
bindEnv ((n, r, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm)
bindEnv ((n, r, b):bs) tm = Bind n b (bindEnv bs tm)
unbindEnv :: EnvTT n -> TT n -> TT n
unbindEnv [] tm = tm
unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc
unbindEnv env tm = error "Impossible case occurred: couldn't unbind env."
usable :: Bool -- specialising
-> Bool -- unfolding only
-> Int -- Reduction depth limit (when simplifying/at REPL)
-> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)])
-- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns)
usable False uf depthlimit n [] = return (True, [])
usable True uf depthlimit n ns
= do ES ls num b <- get
if b then return (False, ns)
else case lookup n ls of
Just 0 -> return (False, ns)
Just i -> return (True, ns)
_ -> return (False, ns)
usable False uf depthlimit n ns
= case lookup n ns of
Just 0 -> return (False, ns)
Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns)
_ -> return $ if uf
then (False, ns)
else (True, (n, depthlimit) : filter (\ (n', _) -> n/=n') ns)
fnCount :: Int -> Name -> Eval ()
fnCount inc n
= do ES ls num b <- get
case lookup n ls of
Just i -> do put $ ES ((n, (i - inc)) :
filter (\ (n', _) -> n/=n') ls) num b
_ -> return ()
setBlock :: Bool -> Eval ()
setBlock b = do ES ls num _ <- get
put (ES ls num b)
deduct = fnCount 1
reinstate = fnCount (-1)
-- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed,
-- such as we might have during construction of a proof)
-- The (Name, Int) pair in the arguments is the maximum depth of unfolding of
-- a name. The corresponding pair in the state is the maximum number of
-- unfoldings overall.
eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name ->
[EvalOpt] -> Eval Value
eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where
spec = Spec `elem` opts
simpl = Simplify True `elem` opts || Simplify False `elem` opts
simpl_inline = Simplify False `elem` opts
runtime = RunTT `elem` opts
atRepl = AtREPL `elem` opts
unfold = Unfold `elem` opts
noFree = all canonical . map snd
-- returns 'True' if the function should block
-- normal evaluation should return false
blockSimplify (CaseInfo inl always dict) n stk
| runtime
= if always then False
else not (inl || dict) || elem n stk
| simpl
= (not inl || elem n stk)
|| (n == sUN "prim__syntactic_eq")
| otherwise = False
getCases cd | simpl = cases_compiletime cd
| runtime = cases_runtime cd
| otherwise = cases_compiletime cd
ev ntimes stk top env (P _ n ty)
| Just (Let t v) <- lookupBinder n genv = ev ntimes stk top env v
ev ntimes_in stk top env (P Ref n ty)
= do let limit = if simpl then 100 else 10000
(u, ntimes) <- usable spec unfold limit n ntimes_in
let red = u && (tcReducible n ctxt || spec || (atRepl && noFree env)
|| runtime || unfold
|| sUN "assert_total" `elem` stk)
if red then
do let val = lookupDefAccExact n (spec || unfold || (atRepl && noFree env) || runtime) ctxt
case val of
Just (Function _ tm, Public) ->
ev ntimes (n:stk) True env tm
Just (TyDecl nt ty, _) -> do vty <- ev ntimes stk True env ty
return $ VP nt n vty
Just (CaseOp ci _ _ _ _ cd, acc)
| (acc == Public || acc == Hidden) &&
-- || sUN "assert_total" `elem` stk) &&
null (fst (cases_compiletime cd)) -> -- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then liftM (VP Ref n) (ev ntimes stk top env ty)
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns [] tree
case c of
(Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty)
(Just v, _) -> return v
_ -> liftM (VP Ref n) (ev ntimes stk top env ty)
else liftM (VP Ref n) (ev ntimes stk top env ty)
ev ntimes stk top env (P nt n ty)
= liftM (VP nt n) (ev ntimes stk top env ty)
ev ntimes stk top env (V i)
| i < length env && i >= 0 = return $ snd (env !! i)
| otherwise = return $ VV i
ev ntimes stk top env (Bind n (Let t v) sc)
| (not (runtime || simpl_inline || unfold)) || occurrences n sc < 2
= do v' <- ev ntimes stk top env v --(finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
wknV (-1) sc'
| otherwise
= do t' <- ev ntimes stk top env t
v' <- ev ntimes stk top env v --(finalise v)
-- use Tmp as a placeholder, then make it a variable reference
-- again when evaluation finished
hs <- get
let vd = nexthole hs
put (hs { nexthole = vd + 1 })
sc' <- ev ntimes stk top ((n, VP Bound (sMN vd "vlet") VErased) : env) sc
return $ VBLet vd n t' v' sc'
ev ntimes stk top env (Bind n (NLet t v) sc)
= do t' <- ev ntimes stk top env (finalise t)
v' <- ev ntimes stk top env (finalise v)
sc' <- ev ntimes stk top ((n, v') : env) sc
return $ VBind True n (Let t' v') (\x -> return sc')
ev ntimes stk top env (Bind n b sc)
= do b' <- vbind env b
let n' = uniqueName n (map fstEnv genv ++ map fst env)
return $ VBind True -- (vinstances 0 sc < 2)
n' b' (\x -> ev ntimes stk False ((n', x):env) sc)
where vbind env t
= fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t
-- block reduction immediately under codata (and not forced)
ev ntimes stk top env
(App _ (App _ (App _ d@(P _ (UN dly) _) l@(P _ (UN lco) _)) t) arg)
| dly == txt "Delay" && lco == txt "Infinite" && not (unfold || simpl)
= do let (f, _) = unApply arg
let ntimes' = case f of
P _ fn _ -> (fn, 0) : ntimes
_ -> ntimes
when spec $ setBlock True
d' <- ev ntimes' stk False env d
l' <- ev ntimes' stk False env l
t' <- ev ntimes' stk False env t
arg' <- ev ntimes' stk False env arg
when spec $ setBlock False
evApply ntimes' stk top env [l',t',arg'] d'
-- Treat "assert_total" specially, as long as it's defined!
ev ntimes stk top env (App _ (App _ (P _ n@(UN at) _) _) arg)
| Just (CaseOp _ _ _ _ _ _, _) <- lookupDefAccExact n (spec || (atRepl && noFree env)|| runtime) ctxt,
at == txt "assert_total" && not (simpl || unfold)
= ev ntimes (n : stk) top env arg
ev ntimes stk top env (App _ f a)
= do f' <- ev ntimes stk False env f
a' <- ev ntimes stk False env a
evApply ntimes stk top env [a'] f'
ev ntimes stk top env (Proj t i)
= do -- evaluate dictionaries if it means the projection works
t' <- ev ntimes stk top env t
-- tfull' <- reapply ntimes stk top env t' []
return (doProj t' (getValArgs t'))
where doProj t' (VP (DCon _ _ _) _ _, args)
| i >= 0 && i < length args = args!!i
doProj t' _ = VProj t' i
ev ntimes stk top env (Constant c) = return $ VConstant c
ev ntimes stk top env Erased = return VErased
ev ntimes stk top env Impossible = return VImpossible
ev ntimes stk top env (Inferred tm) = ev ntimes stk top env tm
ev ntimes stk top env (TType i) = return $ VType i
ev ntimes stk top env (UType u) = return $ VUType u
evApply ntimes stk top env args (VApp f a)
= evApply ntimes stk top env (a:args) f
evApply ntimes stk top env args f
= apply ntimes stk top env f args
reapply ntimes stk top env f@(VP Ref n ty) args
= let val = lookupDefAccExact n (spec || (atRepl && noFree env) || runtime) ctxt in
case val of
Just (CaseOp ci _ _ _ _ cd, acc) ->
let (ns, tree) = getCases cd in
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
_ -> case args of
(a : as) -> return $ unload env f (a : as)
[] -> return f
reapply ntimes stk top env (VApp f a) args
= reapply ntimes stk top env f (a : args)
reapply ntimes stk top env v args = return v
apply ntimes stk top env (VBind True n (Lam _ t) sc) (a:as)
= do a' <- sc a
app <- apply ntimes stk top env a' as
wknV 1 app
apply ntimes_in stk top env f@(VP Ref n ty) args
= do let limit = if simpl then 100 else 10000
(u, ntimes) <- usable spec unfold limit n ntimes_in
let red = u && (tcReducible n ctxt || spec || (atRepl && noFree env)
|| unfold || runtime
|| sUN "assert_total" `elem` stk)
if red then
do let val = lookupDefAccExact n (spec || unfold || (atRepl && noFree env) || runtime) ctxt
case val of
Just (CaseOp ci _ _ _ _ cd, acc)
| acc == Public || acc == Hidden ->
-- unoptimised version
let (ns, tree) = getCases cd in
if blockSimplify ci n stk
then return $ unload env (VP Ref n ty) args
else -- traceWhen runtime (show (n, ns, tree)) $
do c <- evCase ntimes n (n:stk) top env ns args tree
case c of
(Nothing, _) -> return $ unload env (VP Ref n ty) args
(Just v, rest) -> evApply ntimes stk top env rest v
Just (Operator _ i op, _) ->
if (i <= length args)
then case op (take i args) of
Nothing -> return $ unload env (VP Ref n ty) args
Just v -> evApply ntimes stk top env (drop i args) v
else return $ unload env (VP Ref n ty) args
_ -> case args of
[] -> return f
_ -> return $ unload env f args
else case args of
(a : as) -> return $ unload env f (a:as)
[] -> return f
apply ntimes stk top env f (a:as) = return $ unload env f (a:as)
apply ntimes stk top env f [] = return f
-- specApply stk env f@(VP Ref n ty) args
-- = case lookupCtxt n statics of
-- [as] -> if or as
-- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $
-- return $ unload env f args
-- else return $ unload env f args
-- _ -> return $ unload env f args
-- specApply stk env f args = return $ unload env f args
unload :: [(Name, Value)] -> Value -> [Value] -> Value
unload env f [] = f
unload env f (a:as) = unload env (VApp f a) as
evCase ntimes n stk top env ns args tree
| length ns <= length args
= do let args' = take (length ns) args
let rest = drop (length ns) args
when spec $ deduct n
t <- evTree ntimes stk top env (zip ns args') tree
when spec $ case t of
Nothing -> reinstate n -- Blocked, count n again
Just _ -> return ()
-- (zipWith (\n , t) -> (n, t)) ns args') tree
return (t, rest)
| otherwise = return (Nothing, args)
evTree :: [(Name, Int)] -> [Name] -> Bool ->
[(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value)
evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing
evTree ntimes stk top env amap (STerm tm)
= do let etm = pToVs (map fst amap) tm
etm' <- ev ntimes stk (not (conHeaded tm))
(amap ++ env) etm
return $ Just etm'
evTree ntimes stk top env amap (ProjCase t alts)
= do t' <- ev ntimes stk top env t
doCase ntimes stk top env amap t' alts
evTree ntimes stk top env amap (Case _ n alts)
= case lookup n amap of
Just v -> doCase ntimes stk top env amap v alts
_ -> return Nothing
evTree ntimes stk top env amap ImpossibleCase = return Nothing
doCase ntimes stk top env amap v alts =
do c <- chooseAlt env v (getValArgs v) alts amap
case c of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap
case c' of
Just (altmap, sc) -> evTree ntimes stk top env altmap sc
_ -> return Nothing
conHeaded tm@(App _ _ _)
| (P (DCon _ _ _) _ _, args) <- unApply tm = True
conHeaded t = False
chooseAlt' ntimes stk env _ (f, args) alts amap
= do f' <- apply ntimes stk True env f args
chooseAlt env f' (getValArgs f')
alts amap
chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] ->
[(Name, Value)] ->
Eval (Maybe ([(Name, Value)], SC))
chooseAlt env _ (VP (DCon i a _) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP (TCon i a) _ _, args) alts amap
| Just (ns, sc) <- findTag i alts
= return $ Just (updateAmap (zip ns args) amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VConstant c, []) alts amap
| Just v <- findConst c alts = return $ Just (amap, v)
| Just (n', sub, sc) <- findSuc c alts
= return $ Just (updateAmap [(n',sub)] amap, sc)
| Just v <- findDefault alts = return $ Just (amap, v)
chooseAlt env _ (VP _ n _, args) alts amap
| Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc)
chooseAlt env _ (VBind _ _ (Pi _ i s k) t, []) alts amap
| Just (ns, sc) <- findFn (sUN "->") alts
= do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern
return $ Just (updateAmap (zip ns [s, t']) amap, sc)
chooseAlt _ _ _ alts amap
| Just v <- findDefault alts
= if (any fnCase alts)
then return $ Just (amap, v)
else return Nothing
| otherwise = return Nothing
fnCase (FnCase _ _ _) = True
fnCase _ = False
-- Replace old variable names in the map with new matches
-- (This is possibly unnecessary since we make unique names and don't
-- allow repeated variables...?)
updateAmap newm amap
= newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap
findTag i [] = Nothing
findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc)
findTag i (_ : xs) = findTag i xs
findFn fn [] = Nothing
findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc)
findFn fn (_ : xs) = findFn fn xs
findDefault [] = Nothing
findDefault (DefaultCase sc : xs) = Just sc
findDefault (_ : xs) = findDefault xs
findSuc c [] = Nothing
findSuc (BI val) (SucCase n sc : _)
| val /= 0 = Just (n, VConstant (BI (val - 1)), sc)
findSuc c (_ : xs) = findSuc c xs
findConst c [] = Nothing
findConst c (ConstCase c' v : xs) | c == c' = Just v
findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v
findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v
findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v
findConst StrType (ConCase n 4 [] v : xs) = Just v
findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v
findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs)
| tag == 7 + fromEnum ity = Just v
findConst c (_ : xs) = findConst c xs
getValArgs tm = getValArgs' tm []
getValArgs' (VApp f a) as = getValArgs' f (a:as)
getValArgs' f as = (f, as)
-- tmpToV i vd (VLetHole j) | vd == j = return $ VV i
-- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v)
-- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b
-- let sc' = \x -> do x' <- sc x
-- tmpToV (i + 1) vd x'
-- return (VBind n b' sc')
-- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a)
-- tmpToV i vd x = return x
instance Eq Value where
(==) x y = getTT x == getTT y
where getTT v = evalState (quote 0 v) initEval
class Quote a where
quote :: Int -> a -> Eval (TT Name)
instance Quote Value where
quote i (VP nt n v) = liftM (P nt n) (quote i v)
quote i (VV x) = return $ V x
quote i (VBind _ n b sc) = do sc' <- sc (VTmp i)
b' <- quoteB b
liftM (Bind n b') (quote (i+1) sc')
where quoteB t = fmapMB (quote i) t
quote i (VBLet vd n t v sc)
= do sc' <- quote i sc
t' <- quote i t
v' <- quote i v
let sc'' = pToV (sMN vd "vlet") (addBinder sc')
return (Bind n (Let t' v') sc'')
quote i (VApp f a) = liftM2 (App MaybeHoles) (quote i f) (quote i a)
quote i (VType u) = return (TType u)
quote i (VUType u) = return (UType u)
quote i VErased = return Erased
quote i VImpossible = return Impossible
quote i (VProj v j) = do v' <- quote i v
return (Proj v' j)
quote i (VConstant c) = return $ Constant c
quote i (VTmp x) = return $ V (i - x - 1)
wknV :: Int -> Value -> Eval Value
wknV i (VV x) | x >= i = return $ VV (x - 1)
wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b
return $ VBind red n b' (\x -> do x' <- sc x
wknV (i + 1) x')
wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a)
wknV i t = return t
isUniverse :: Term -> Bool
isUniverse (TType _) = True
isUniverse (UType _) = True
isUniverse _ = False
isUsableUniverse :: Term -> Bool
isUsableUniverse (UType NullType) = False
isUsableUniverse x = isUniverse x
convEq' ctxt hs x y = evalStateT (convEq ctxt hs x y) (0, [])
convEq :: Context -> [Name] -> TT Name -> TT Name -> StateT UCs TC Bool
convEq ctxt holes topx topy = ceq [] topx topy where
ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs TC Bool
ceq ps (P xt x _) (P yt y _)
| x `elem` holes || y `elem` holes = return True
| x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True
| otherwise = sameDefs ps x y
ceq ps x (Bind n (Lam _ t) (App _ y (V 0)))
= ceq ps x (substV (P Bound n t) y)
ceq ps (Bind n (Lam _ t) (App _ x (V 0))) y
= ceq ps (substV (P Bound n t) x) y
ceq ps x (Bind n (Lam _ t) (App _ y (P Bound n' _)))
| n == n' = ceq ps x y
ceq ps (Bind n (Lam _ t) (App _ x (P Bound n' _))) y
| n == n' = ceq ps x y
ceq ps (Bind n (PVar _ t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVar _ t) sc) = ceq ps x sc
ceq ps (Bind n (PVTy t) sc) y = ceq ps sc y
ceq ps x (Bind n (PVTy t) sc) = ceq ps x sc
ceq ps (V x) (V y) = return (x == y)
ceq ps (V x) (P _ y _)
| x >= 0 && length ps > x = return (fst (ps!!x) == y)
| otherwise = return False
ceq ps (P _ x _) (V y)
| y >= 0 && length ps > y = return (x == snd (ps!!y))
| otherwise = return False
ceq ps (Bind n xb xs) (Bind n' yb ys)
= liftM2 (&&) (ceqB ps xb yb) (ceq ((n,n'):ps) xs ys)
where
ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps (Pi r i v t) (Pi r' i' v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t')
ceqB ps b b' = ceq ps (binderTy b) (binderTy b')
-- Special case for 'case' blocks - size of scope causes complications,
-- we only want to check the blocks themselves are valid and identical
-- in the current scope. So, just check the bodies, and the additional
-- arguments the case blocks are applied to.
ceq ps x@(App _ _ _) y@(App _ _ _)
| (P _ cx _, xargs) <- unApply x,
(P _ cy _, yargs) <- unApply y,
caseName cx && caseName cy = sameCase ps cx cy xargs yargs
ceq ps (App _ fx ax) (App _ fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay)
ceq ps (Constant x) (Constant y) = return (x == y)
ceq ps (TType x) (TType y) | x == y = return True
ceq ps (TType (UVal 0)) (TType y) = return True
ceq ps (TType x) (TType y) = do (v, cs) <- get
put (v, ULE x y : cs)
return True
ceq ps (UType AllTypes) x = return (isUsableUniverse x)
ceq ps x (UType AllTypes) = return (isUsableUniverse x)
ceq ps (UType u) (UType v) = return (u == v)
ceq ps Erased _ = return True
ceq ps _ Erased = return True
ceq ps x y = return False
caseeq ps (Case _ n cs) (Case _ n' cs') = caseeqA ((n,n'):ps) cs cs'
where
caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest')
= do q1 <- caseeq (zip as as' ++ ps) sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && i == i' && q1 && q2
caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest')
= do q1 <- caseeq ps sc sc'
q2 <- caseeqA ps rest rest'
return $ x == x' && q1 && q2
caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest')
= liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest')
caseeqA ps [] [] = return True
caseeqA ps _ _ = return False
caseeq ps (STerm x) (STerm y) = ceq ps x y
caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True
caseeq ps _ _ = return False
sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (_, xdef) = cases_compiletime xd
(_, ydef) = cases_compiletime yd in
caseeq ((x,y):ps) xdef ydef
_ -> return False
sameCase :: [(Name, Name)] -> Name -> Name -> [Term] -> [Term] ->
StateT UCs TC Bool
sameCase ps x y xargs yargs
= case (lookupDef x ctxt, lookupDef y ctxt) of
([Function _ xdef], [Function _ ydef])
-> ceq ((x,y):ps) xdef ydef
([CaseOp _ _ _ _ _ xd],
[CaseOp _ _ _ _ _ yd])
-> let (xin, xdef) = cases_compiletime xd
(yin, ydef) = cases_compiletime yd in
do liftM2 (&&)
(do ok <- zipWithM (ceq ps)
(drop (length xin) xargs)
(drop (length yin) yargs)
return (and ok))
(caseeq ((x,y):ps) xdef ydef)
_ -> return False
-- SPECIALISATION -----------------------------------------------------------
-- We need too much control to be able to do this by tweaking the main
-- evaluator
spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name)
spec ctxt statics genv tm = error "spec undefined"
-- CONTEXTS -----------------------------------------------------------------
{-| A definition is either a simple function (just an expression with a type),
a constant, which could be a data or type constructor, an axiom or as an
yet undefined function, or an Operator.
An Operator is a function which explains how to reduce.
A CaseOp is a function defined by a simple case tree -}
data Def = Function !Type !Term
| TyDecl NameType !Type
| Operator Type Int ([Value] -> Maybe Value)
| CaseOp CaseInfo
!Type
![(Type, Bool)] -- argument types, whether canonical
![Either Term (Term, Term)] -- original definition
![([Name], Term, Term)] -- simplified for totality check definition
!CaseDefs
deriving Generic
-- [Name] SC -- Compile time case definition
-- [Name] SC -- Run time cae definitions
data CaseDefs = CaseDefs {
cases_compiletime :: !([Name], SC),
cases_runtime :: !([Name], SC)
}
deriving Generic
data CaseInfo = CaseInfo {
case_inlinable :: Bool, -- decided by machine
case_alwaysinline :: Bool, -- decided by %inline flag
tc_dictionary :: Bool
}
deriving Generic
{-!
deriving instance Binary Def
!-}
{-!
deriving instance Binary CaseInfo
!-}
{-!
deriving instance Binary CaseDefs
!-}
instance Show Def where
show (Function ty tm) = "Function: " ++ show (ty, tm)
show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty
show (Operator ty _ _) = "Operator: " ++ show ty
show (CaseOp (CaseInfo inlc inla inlr) ty atys ps_in ps cd)
= let (ns, sc) = cases_compiletime cd
(ns', sc') = cases_runtime cd in
"Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++
"COMPILE TIME:\n\n" ++
show ns ++ " " ++ show sc ++ "\n\n" ++
"RUN TIME:\n\n" ++
show ns' ++ " " ++ show sc' ++ "\n\n" ++
if inlc then "Inlinable" else "Not inlinable" ++
if inla then " Aggressively\n" else "\n"
-------
-- Hidden => Programs can't access the name at all
-- Public => Programs can access the name and use at will
-- Frozen => Programs can access the name, which doesn't reduce
-- Private => Programs can't access the name, doesn't reduce internally
data Accessibility = Hidden | Public | Frozen | Private
deriving (Eq, Ord, Generic)
instance Show Accessibility where
show Public = "public export"
show Frozen = "export"
show Private = "private"
show Hidden = "hidden"
type Injectivity = Bool
-- | The result of totality checking
data Totality = Total [Int] -- ^ well-founded arguments
| Productive -- ^ productive
| Partial PReason
| Unchecked
| Generated
deriving (Eq, Generic)
-- | Reasons why a function may not be total
data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name
| ExternalIO | BelieveMe | Mutual [Name] | NotProductive
deriving (Show, Eq, Generic)
instance Show Totality where
show (Total args)= "Total" -- ++ show args ++ " decreasing arguments"
show Productive = "Productive" -- ++ show args ++ " decreasing arguments"
show Unchecked = "not yet checked for totality"
show (Partial Itself) = "possibly not total as it is not well founded"
show (Partial NotCovering) = "not total as there are missing cases"
show (Partial NotPositive) = "not strictly positive"
show (Partial ExternalIO) = "an external IO primitive"
show (Partial NotProductive) = "not productive"
show (Partial BelieveMe) = "not total due to use of believe_me in proof"
show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns)
show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++
showSep " --> " (map show ns)
show (Partial (UseUndef n)) = "possibly not total because it uses the undefined name " ++ show n
show Generated = "auto-generated"
{-!
deriving instance Binary Accessibility
!-}
{-!
deriving instance Binary Totality
!-}
{-!
deriving instance Binary PReason
!-}
-- Possible attached meta-information for a definition in context
data MetaInformation =
EmptyMI -- ^ No meta-information
| DataMI [Int] -- ^ Meta information for a data declaration with position of parameters
deriving (Eq, Show, Generic)
-- | Contexts used for global definitions and for proof state. They contain
-- universe constraints and existing definitions.
-- Also store maximum RigCount of the name (can't bind a name at multiplicity
-- 1 in a RigW, for example)
data Context = MkContext {
next_tvar :: Int,
definitions :: Ctxt TTDecl
} deriving (Show, Generic)
type TTDecl = (Def, RigCount, Injectivity, Accessibility, Totality, MetaInformation)
-- | The initial empty context
initContext = MkContext 0 emptyContext
mapDefCtxt :: (Def -> Def) -> Context -> Context
mapDefCtxt f (MkContext t !defs) = MkContext t (mapCtxt f' defs)
where f' (!d, r, i, a, t, m) = f' (f d, r, i, a, t, m)
-- | Get the definitions from a context
ctxtAlist :: Context -> [(Name, Def)]
ctxtAlist ctxt = map (\(n, (d, r, i, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt)
veval ctxt env t = evalState (eval False ctxt [] env t []) initEval
addToCtxt :: Name -> Term -> Type -> Context -> Context
addToCtxt n tm ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (Function ty tm, RigW, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
setAccess :: Name -> Accessibility -> Context -> Context
setAccess n a uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, i, _, t, m) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setInjective :: Name -> Injectivity -> Context -> Context
setInjective n i uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, _, a, t, m) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setTotal :: Name -> Totality -> Context -> Context
setTotal n t uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, i, a, _, m) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setRigCount :: Name -> RigCount -> Context -> Context
setRigCount n rc uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, _, i, a, t, m) -> (d, rc, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
setMetaInformation :: Name -> MetaInformation -> Context -> Context
setMetaInformation n m uctxt
= let ctxt = definitions uctxt
!ctxt' = updateDef n (\ (d, r, i, a, t, _) -> (d, r, i, a, t, m)) ctxt in
uctxt { definitions = ctxt' }
addCtxtDef :: Name -> Def -> Context -> Context
addCtxtDef n d c = let ctxt = definitions c
!ctxt' = addDef n (d, RigW, False, Public, Unchecked, EmptyMI) $! ctxt in
c { definitions = ctxt' }
addTyDecl :: Name -> NameType -> Type -> Context -> Context
addTyDecl n nt ty uctxt
= let ctxt = definitions uctxt
!ctxt' = addDef n (TyDecl nt ty, RigW, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
addDatatype :: Datatype Name -> Context -> Context
addDatatype (Data n tag ty unique cons) uctxt
= let ctxt = definitions uctxt
ty' = normalise uctxt [] ty
!ctxt' = addCons 0 cons (addDef n
(TyDecl (TCon tag (arity ty')) ty, RigW, True, Public, Unchecked, EmptyMI) ctxt) in
uctxt { definitions = ctxt' }
where
addCons tag [] ctxt = ctxt
addCons tag ((n, ty) : cons) ctxt
= let ty' = normalise uctxt [] ty in
addCons (tag+1) cons (addDef n
(TyDecl (DCon tag (arity ty') unique) ty, RigW, True, Public, Unchecked, EmptyMI) ctxt)
-- FIXME: Too many arguments! Refactor all these Bools.
--
-- Issue #1724 on the issue tracker.
-- https://github.com/idris-lang/Idris-dev/issues/1724
addCasedef :: Name -> ErasureInfo -> CaseInfo ->
Bool -> SC -> -- default case
Bool -> Bool ->
[(Type, Bool)] -> -- argument types, whether canonical
[Int] -> -- inaccessible arguments
[Either Term (Term, Term)] ->
[([Name], Term, Term)] -> -- compile time
[([Name], Term, Term)] -> -- run time
Type -> Context -> TC Context
addCasedef n ei ci@(CaseInfo inline alwaysInline tcdict)
tcase covering reflect asserted argtys inacc
ps_in ps_ct ps_rt ty uctxt
= do let ctxt = definitions uctxt
access = case lookupDefAcc n False uctxt of
[(_, acc)] -> acc
_ -> Public
compileTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_ct ei
runtime <- simpleCase tcase covering reflect RunTime emptyFC inacc argtys ps_rt ei
ctxt' <- case (compileTime, runtime) of
( CaseDef args_ct sc_ct _,
CaseDef args_rt sc_rt _) ->
let inl = alwaysInline -- tcdict
inlc = (inl || small n args_ct sc_ct) && (not asserted)
inlr = inl || small n args_rt sc_rt
cdef = CaseDefs (args_ct, sc_ct)
(args_rt, sc_rt)
op = (CaseOp (ci { case_inlinable = inlc })
ty argtys ps_in ps_ct cdef,
RigW, False, access, Unchecked, EmptyMI)
in return $ addDef n op ctxt
-- other -> tfail (Msg $ "Error adding case def: " ++ show other)
return uctxt { definitions = ctxt' }
-- simplify a definition by unfolding interface methods
-- We need this for totality checking, because functions which use interfaces
-- in an implementation definition themselves need to have the implementation
-- inlined or it'll be treated as a higher order function that will potentially
-- loop.
simplifyCasedef :: Name -> [Name] -> [[Name]] -> ErasureInfo -> Context -> TC Context
simplifyCasedef n ufnames umethss ei uctxt
= do let ctxt = definitions uctxt
ctxt' <- case lookupCtxt n ctxt of
[(CaseOp ci ty atys [] ps _, rc, inj, acc, tot, metainf)] ->
return ctxt -- nothing to simplify (or already done...)
[(CaseOp ci ty atys ps_in ps cd, rc, inj, acc, tot, metainf)] ->
do let ps_in' = map simpl ps_in
pdef = map debind ps_in'
CaseDef args sc _ <- simpleCase False (STerm Erased) False CompileTime emptyFC [] atys pdef ei
return $ addDef n (CaseOp ci
ty atys ps_in' ps (cd { cases_compiletime = (args, sc) }),
rc, inj, acc, tot, metainf) ctxt
_ -> return ctxt
return uctxt { definitions = ctxt' }
where
depat acc (Bind n (PVar _ t) sc)
= depat (n : acc) (instantiate (P Bound n t) sc)
depat acc x = (acc, x)
debind (Right (x, y)) = let (vs, x') = depat [] x
(_, y') = depat [] y in
(vs, x', y')
debind (Left x) = let (vs, x') = depat [] x in
(vs, x', Impossible)
simpl (Right (x, y))
= if null ufnames then Right (x, y)
else Right (x, unfold uctxt [] (map (\n -> (n, 1)) (uns y)) y)
simpl t = t
-- Unfold the given name, interface methdods, and any function which uses it as
-- an argument directly. This is specifically for finding applications of
-- interface dictionaries and inlining them both for totality checking and for
-- a small performance gain.
uns tm = getNamesToUnfold ufnames umethss tm
getNamesToUnfold :: [Name] -> [[Name]] -> Term -> [Name]
getNamesToUnfold inames ms tm = nub $ inames ++ getNames Nothing tm ++ concat ms
where
getNames under fn@(App _ _ _)
| (f, args) <- unApply fn
= let under' = case f of
P _ fn _ -> Just fn
_ -> Nothing
in
getNames under f ++ concatMap (getNames under') args
getNames (Just under) (P _ ref _)
= if ref `elem` inames then [under] else []
getNames under (Bind n (Let t v) sc)
= getNames Nothing t ++
getNames Nothing v ++
getNames Nothing sc
getNames under (Bind n b sc) = getNames Nothing (binderTy b) ++
getNames Nothing sc
getNames _ _ = []
addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) ->
Context -> Context
addOperator n ty a op uctxt
= let ctxt = definitions uctxt
ctxt' = addDef n (Operator ty a op, RigW, False, Public, Unchecked, EmptyMI) ctxt in
uctxt { definitions = ctxt' }
tfst (a, _, _, _, _, _) = a
lookupNames :: Name -> Context -> [Name]
lookupNames n ctxt
= let ns = lookupCtxtName n (definitions ctxt) in
map fst ns
-- | Get the list of pairs of fully-qualified names and their types that match some name
lookupTyName :: Name -> Context -> [(Name, Type)]
lookupTyName n ctxt = do
(name, def) <- lookupCtxtName n (definitions ctxt)
ty <- case tfst def of
(Function ty _) -> return ty
(TyDecl _ ty) -> return ty
(Operator ty _ _) -> return ty
(CaseOp _ ty _ _ _ _) -> return ty
return (name, ty)
-- | Get the pair of a fully-qualified name and its type, if there is a unique one matching the name used as a key.
lookupTyNameExact :: Name -> Context -> Maybe (Name, Type)
lookupTyNameExact n ctxt = listToMaybe [ (nm, v) | (nm, v) <- lookupTyName n ctxt, nm == n ]
-- | Get the types that match some name
lookupTy :: Name -> Context -> [Type]
lookupTy n ctxt = map snd (lookupTyName n ctxt)
-- | Get the single type that matches some name precisely
lookupTyExact :: Name -> Context -> Maybe Type
lookupTyExact n ctxt = fmap snd (lookupTyNameExact n ctxt)
-- | Return true if the given type is a concrete type familyor primitive
-- False it it's a function to compute a type or a variable
isCanonical :: Type -> Context -> Bool
isCanonical t ctxt
= case unApply t of
(P _ n _, _) -> isConName n ctxt
(Constant _, _) -> True
_ -> False
isConName :: Name -> Context -> Bool
isConName n ctxt = isTConName n ctxt || isDConName n ctxt
isTConName :: Name -> Context -> Bool
isTConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (TCon _ _) _) -> True
_ -> False
-- | Check whether a resolved name is certainly a data constructor
isDConName :: Name -> Context -> Bool
isDConName n ctxt
= case lookupDefExact n ctxt of
Just (TyDecl (DCon _ _ _) _) -> True
_ -> False
-- | Check whether any overloading of a name is a data constructor
canBeDConName :: Name -> Context -> Bool
canBeDConName n ctxt
= or $ do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(TyDecl (DCon _ _ _) _) -> return True
_ -> return False
isFnName :: Name -> Context -> Bool
isFnName n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> True
Just (Operator _ _ _) -> True
Just (CaseOp _ _ _ _ _ _) -> True
_ -> False
isTCDict :: Name -> Context -> Bool
isTCDict n ctxt
= case lookupDefExact n ctxt of
Just (Function _ _) -> False
Just (Operator _ _ _) -> False
Just (CaseOp ci _ _ _ _ _) -> tc_dictionary ci
_ -> False
-- Is the name guarded by constructors in the term?
-- We assume the term is normalised, so no looking under 'let' for example.
conGuarded :: Context -> Name -> Term -> Bool
conGuarded ctxt n tm = guarded n tm
where
guarded n (P _ n' _) = n == n'
guarded n ap@(App _ _ _)
| (P _ f _, as) <- unApply ap,
isConName f ctxt = any (guarded n) as
guarded _ _ = False
lookupP :: Name -> Context -> [Term]
lookupP = lookupP_all False False
lookupP_all :: Bool -> Bool -> Name -> Context -> [Term]
lookupP_all all exact n ctxt
= do (n', def) <- names
p <- case def of
(Function ty tm, _, inj, a, _, _) -> return (P Ref n' ty, a)
(TyDecl nt ty, _, _, a, _, _) -> return (P nt n' ty, a)
(CaseOp _ ty _ _ _ _, _, inj, a, _, _) -> return (P Ref n' ty, a)
(Operator ty _ _, _, inj, a, _, _) -> return (P Ref n' ty, a)
case snd p of
Hidden -> if all then return (fst p) else []
Private -> if all then return (fst p) else []
_ -> return (fst p)
where
names = let ns = lookupCtxtName n (definitions ctxt) in
if exact
then filter (\ (n', d) -> n' == n) ns
else ns
lookupDefExact :: Name -> Context -> Maybe Def
lookupDefExact n ctxt = tfst <$> lookupCtxtExact n (definitions ctxt)
lookupDef :: Name -> Context -> [Def]
lookupDef n ctxt = tfst <$> lookupCtxt n (definitions ctxt)
lookupNameDef :: Name -> Context -> [(Name, Def)]
lookupNameDef n ctxt = mapSnd tfst $ lookupCtxtName n (definitions ctxt)
where mapSnd f [] = []
mapSnd f ((x,y):xys) = (x, f y) : mapSnd f xys
lookupDefAcc :: Name -> Bool -> Context ->
[(Def, Accessibility)]
lookupDefAcc n mkpublic ctxt
= map mkp $ lookupCtxt n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, _, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure"))
then (d, Public) else (d, a)
lookupDefAccExact :: Name -> Bool -> Context ->
Maybe (Def, Accessibility)
lookupDefAccExact n mkpublic ctxt
= fmap mkp $ lookupCtxtExact n (definitions ctxt)
-- io_bind a special case for REPL prettiness
where mkp (d, _, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure"))
then (d, Public) else (d, a)
lookupTotal :: Name -> Context -> [Totality]
lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = t
lookupTotalExact :: Name -> Context -> Maybe Totality
lookupTotalExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = t
lookupRigCount :: Name -> Context -> [Totality]
lookupRigCount n ctxt = map mkt $ lookupCtxt n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = t
lookupRigCountExact :: Name -> Context -> Maybe RigCount
lookupRigCountExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, rc, inj, a, t, m) = rc
lookupInjectiveExact :: Name -> Context -> Maybe Injectivity
lookupInjectiveExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt)
where mkt (d, _, inj, a, t, m) = inj
-- Assume type is at least in whnfArgs form
linearCheck :: Context -> Type -> TC ()
linearCheck ctxt t = checkArgs t
where
checkArgs (Bind n (Pi RigW _ ty _) sc)
= do linearCheckArg ctxt ty
checkArgs (substV (P Bound n Erased) sc)
checkArgs (Bind n (Pi _ _ _ _) sc)
= checkArgs (substV (P Bound n Erased) sc)
checkArgs _ = return ()
linearCheckArg :: Context -> Type -> TC ()
linearCheckArg ctxt ty = mapM_ checkNameOK (allTTNames ty)
where
checkNameOK f
= case lookupRigCountExact f ctxt of
Just Rig1 ->
tfail $ Msg $ show f ++ " can only appear in a linear binding"
_ -> return ()
checkArgs (Bind n (Pi RigW _ ty _) sc)
= do mapM_ checkNameOK (allTTNames ty)
checkArgs (substV (P Bound n Erased) sc)
checkArgs (Bind n (Pi _ _ _ _) sc)
= checkArgs (substV (P Bound n Erased) sc)
checkArgs _ = return ()
-- Check if a name is reducible in the type checker. Partial definitions
-- are not reducible (so treated as a constant)
tcReducible :: Name -> Context -> Bool
tcReducible n ctxt = case lookupTotalExact n ctxt of
Nothing -> True
Just (Partial _) -> False
_ -> True
lookupMetaInformation :: Name -> Context -> [MetaInformation]
lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt)
where mkm (d, _, inj, a, t, m) = m
lookupNameTotal :: Name -> Context -> [(Name, Totality)]
lookupNameTotal n = map (\(n, (_, _, _, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions
lookupVal :: Name -> Context -> [Value]
lookupVal n ctxt
= do def <- lookupCtxt n (definitions ctxt)
case tfst def of
(Function _ htm) -> return (veval ctxt [] htm)
(TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty))
_ -> []
lookupTyEnv :: Name -> Env -> Maybe (Int, RigCount, Type)
lookupTyEnv n env = li n 0 env where
li n i [] = Nothing
li n i ((x, r, b): xs)
| n == x = Just (i, r, binderTy b)
| otherwise = li n (i+1) xs
-- | Create a unique name given context and other existing names
uniqueNameCtxt :: Context -> Name -> [Name] -> Name
uniqueNameCtxt ctxt n hs
| n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs
| [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs
| otherwise = n
uniqueBindersCtxt :: Context -> [Name] -> TT Name -> TT Name
uniqueBindersCtxt ctxt ns (Bind n b sc)
= let n' = uniqueNameCtxt ctxt n ns in
Bind n' (fmap (uniqueBindersCtxt ctxt (n':ns)) b) (uniqueBindersCtxt ctxt ns sc)
uniqueBindersCtxt ctxt ns (App s f a) = App s (uniqueBindersCtxt ctxt ns f) (uniqueBindersCtxt ctxt ns a)
uniqueBindersCtxt ctxt ns t = t
|
jmitchell/Idris-dev
|
src/Idris/Core/Evaluate.hs
|
Haskell
|
bsd-3-clause
| 56,761
|
{-# LANGUAGE TemplateHaskell #-}
module Main (
main
) where
import Test.Tasty
import Test.Tasty.QuickCheck
import System.Exit
import qualified Data.Schema.Sql.ScriptParseTest as SPT (testGroup)
main = defaultMain tests
tests :: TestTree
tests =
testGroup "All Tests" [
SPT.testGroup
]
|
proegssilb/git-sql
|
test/MainTestSuite.hs
|
Haskell
|
bsd-3-clause
| 329
|
-- |
module X12.Tokens where
import Data.Text
data ElementToken = SimpleElementToken Text
| ComponentElementToken Text
| CompositeElementToken [ElementToken]
| RepeatedElementToken [ElementToken]
deriving (Eq, Show)
data SegmentToken = SegmentToken { segmentTokenId :: Text
, elementTokens :: [ElementToken]
}
deriving (Eq, Show)
|
alexkyllo/xtwelve
|
src/X12/Tokens.hs
|
Haskell
|
bsd-3-clause
| 490
|
{-# LANGUAGE ScopedTypeVariables #-}
module GhcUtilsSpec (main, spec) where
import Test.Hspec
import TestUtils
import qualified GHC as GHC
import qualified Data.Generics as SYB
import qualified GHC.SYB.Utils as SYB
import Language.Haskell.GHC.ExactPrint.Utils
import Language.Haskell.Refact.Utils.Binds
import Language.Haskell.Refact.Utils.GhcUtils
import Language.Haskell.Refact.Utils.GhcVersionSpecific
import Language.Haskell.Refact.Utils.Monad
import Language.Haskell.Refact.Utils.MonadFunctions
import Language.Haskell.Refact.Utils.TypeUtils
import Language.Haskell.Refact.Utils.Variables
-- import TestUtils
-- ---------------------------------------------------------------------
main :: IO ()
main = do
hspec spec
spec :: Spec
spec = do
describe "onelayerStaged" $ do
it "only descends one layer into a structure" $ do
-- let s = ([2,1,3,4,5],[6,7,8]) :: ([Int],[Int])
let s' = (2,[3,4],5) :: (Int,[Int],Int)
let -- worker (i :: Int)
-- | i == 2 = ["f"]
-- worker _ = []
worker' (i::Int) = [i]
-- worker'' (i::[Int]) = [head i]
let g = onelayerStaged SYB.Renamer [] ([] `SYB.mkQ` worker') s'
let g1 = SYB.gmapQ ([] `SYB.mkQ` worker') s'
let g2 = SYB.gmapQl (++) [] ([] `SYB.mkQ` worker') s'
(show g) `shouldBe` "[[2],[],[5]]"
(show g1) `shouldBe` "[[2],[],[5]]"
(show g2) `shouldBe` "[2,5]"
-- ---------------------------------
it "Finds a GHC.Name at top level only" $ do
(t, _toks, tgt) <- ct $ parsedFileGhc "./DupDef/Dd1.hs"
let
comp = do
-- (t, toks) <- parseSourceFileTest "./test/testdata/DupDef/Dd1.hs"
-- putParsedModule t toks
renamed <- getRefactRenamed
let mn = locToName (4,1) renamed
let (Just (ln@(GHC.L _ n))) = mn
let mx = locToName (4,10) renamed
let (Just (lx@(GHC.L _ x))) = mx
let declsr = hsBinds renamed
duplicatedDecls = definingDeclsNames [n] declsr True False
res = findEntity ln duplicatedDecls
res2 = findEntity n duplicatedDecls
resx = findEntity lx duplicatedDecls
resx2 = findEntity x duplicatedDecls
worker (nn::GHC.Name) = [showGhc nn]
g = onelayerStaged SYB.Renamer ["-1"] (["-10"] `SYB.mkQ` worker) duplicatedDecls
worker2 ((GHC.L _ (GHC.FunBind (GHC.L _ n') _ _ _ _ _))::GHC.Located (GHC.HsBind GHC.Name))
| n == n' = ["found"]
worker2 _ = []
g2 = onelayerStaged SYB.Renamer ["-1"] (["-10"] `SYB.mkQ` worker2) duplicatedDecls
return (res,res2,resx,resx2,duplicatedDecls,g,g2,ln,lx)
-- ((r,r2,rx,rx2,d,gg,gg2,_l,_x),_s) <- runRefactGhcState comp
((r,r2,rx,rx2,d,gg,gg2,_l,_x),_s) <- runRefactGhc comp tgt (initialState { rsModule = initRefactModule t }) testOptions
-- (SYB.showData SYB.Renamer 0 d) `shouldBe` ""
(showGhcQual d) `shouldBe` "[DupDef.Dd1.toplevel x = DupDef.Dd1.c GHC.Num.* x]"
(showGhcQual _l) `shouldBe` "DupDef.Dd1.toplevel"
(showGhc _x) `shouldBe` "x"
(show gg) `shouldBe` "[[\"-10\"],[\"-10\"]]"
(show gg2) `shouldBe` "[[\"found\"],[\"-10\"]]"
r `shouldBe` True
r2 `shouldBe` True
rx `shouldBe` False
rx2 `shouldBe` True
-- ---------------------------------------------------------------------
|
mpickering/HaRe
|
test/GhcUtilsSpec.hs
|
Haskell
|
bsd-3-clause
| 3,442
|
--------------------------------------------------------------------------------
-- |
-- Module : Graphics.Rendering.OpenGL.Raw.INTEL.ParallelArrays
-- Copyright : (c) Sven Panne 2015
-- License : BSD3
--
-- Maintainer : Sven Panne <svenpanne@gmail.com>
-- Stability : stable
-- Portability : portable
--
-- The <https://www.opengl.org/registry/specs/INTEL/parallel_arrays.txt INTEL_parallel_arrays> extension.
--
--------------------------------------------------------------------------------
module Graphics.Rendering.OpenGL.Raw.INTEL.ParallelArrays (
-- * Enums
gl_COLOR_ARRAY_PARALLEL_POINTERS_INTEL,
gl_NORMAL_ARRAY_PARALLEL_POINTERS_INTEL,
gl_PARALLEL_ARRAYS_INTEL,
gl_TEXTURE_COORD_ARRAY_PARALLEL_POINTERS_INTEL,
gl_VERTEX_ARRAY_PARALLEL_POINTERS_INTEL,
-- * Functions
glColorPointervINTEL,
glNormalPointervINTEL,
glTexCoordPointervINTEL,
glVertexPointervINTEL
) where
import Graphics.Rendering.OpenGL.Raw.Tokens
import Graphics.Rendering.OpenGL.Raw.Functions
|
phaazon/OpenGLRaw
|
src/Graphics/Rendering/OpenGL/Raw/INTEL/ParallelArrays.hs
|
Haskell
|
bsd-3-clause
| 1,012
|
module Board.MoveGen where
import Data.Bits
import Data.Char
import Data.Word
import Numeric
import Utils
import Text.Printf
import qualified Data.Vector.Unboxed as V
{-
noWe nort noEa
+7 +8 +9
\ | /
west -1 <- 0 -> +1 east
/ | \
-9 -8 -7
soWe sout soEa
-}
{-
Board indexing:
y (row/rank)
8 | 56 57 58 59 60 61 62 63
7 | 48 49 50 51 52 53 54 55
6 | 40 41 42 43 44 45 46 47
5 | 32 33 34 35 36 37 38 39
4 | 24 25 26 27 28 29 30 31
3 | 16 17 18 19 20 21 22 23
2 | 8 9 10 11 12 13 14 15
1 | 0 1 2 3 4 5 6 7
----------------------------
| 1 2 3 4 5 6 7 8 -- x (col/file)
| A B C D E F G H
,
0x02824222120a0700
-}
index64 :: V.Vector Int
index64 = V.fromList [
0, 47, 1, 56, 48, 27, 2, 60,
57, 49, 41, 37, 28, 16, 3, 61,
54, 58, 35, 52, 50, 42, 21, 44,
38, 32, 29, 23, 17, 11, 4, 62,
46, 55, 26, 59, 40, 36, 15, 53,
34, 51, 20, 43, 31, 22, 10, 45,
25, 39, 14, 33, 19, 30, 9, 24,
13, 18, 8, 12, 7, 6, 5, 63
]
-- /**
-- * bitScanForward
-- * @author Kim Walisch (2012)
-- * @param bb bitboard to scan
-- * @precondition bb != 0
-- * @return index (0..63) of least significant one bit
-- */
-- int bitScanForward(U64 bb) {
-- const U64 debruijn64 = C64(0x03f79d71b4cb0a89);
-- assert (bb != 0);
-- return index64[((bb ^ (bb-1)) * debruijn64) >> 58];
-- }
bitScanForward :: Word64 -> Int
bitScanForward bb =
let debruijn64 = 0x03f79d71b4cb0a89
ix = ((bb `xor` (bb-1)) * debruijn64) `shiftR` 58
in V.unsafeIndex index64 (fromIntegral ix)
moves :: Word64 -> Square -> Dir -> Word64
moves occ s d =
let aix = attackIndex s d
attack = V.unsafeIndex attacks aix
obstacles = occ .&. attack
firstObstacle = bitScanForward obstacles
aix2 = attackIndex firstObstacle d
attack2 = V.unsafeIndex attacks aix2
in attack `xor` attack2
occupancy :: Word64
occupancy = 0x00000000FF000000
ls1b :: Word64 -> Word64
ls1b x = x .&. (-x)
attacks :: V.Vector Word64
attacks = V.fromList
[0x8141211109050300,
0x02824222120a0700,
0x0404844424150e00,
0x08080888492a1c00,
0x1010101192543800,
0x2020212224a87000,
0x404142444850e000,
0x8182848890a0c000,
0x4121110905030000,
0x824222120a070000,
0x04844424150e0000,
0x080888492a1c0000,
0x1010119254380000,
0x20212224a8700000,
0x4142444850e00000,
0x82848890a0c00000,
0x2111090503000000,
0x4222120a07000000,
0x844424150e000000,
0x0888492a1c000000,
0x1011925438000000,
0x212224a870000000,
0x42444850e0000000,
0x848890a0c0000000,
0x1109050300000000,
0x22120a0700000000,
0x4424150e00000000,
0x88492a1c00000000,
0x1192543800000000,
0x2224a87000000000,
0x444850e000000000,
0x8890a0c000000000,
0x0905030000000000,
0x120a070000000000,
0x24150e0000000000,
0x492a1c0000000000,
0x9254380000000000,
0x24a8700000000000,
0x4850e00000000000,
0x90a0c00000000000,
0x0503000000000000,
0x0a07000000000000,
0x150e000000000000,
0x2a1c000000000000,
0x5438000000000000,
0xa870000000000000,
0x50e0000000000000,
0xa0c0000000000000,
0x0300000000000000,
0x0700000000000000,
0x0e00000000000000,
0x1c00000000000000,
0x3800000000000000,
0x7000000000000000,
0xe000000000000000,
0xc000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000000,
0x0000000000000003,
0x0000000000000007,
0x000000000000000e,
0x000000000000001c,
0x0000000000000038,
0x0000000000000070,
0x00000000000000e0,
0x00000000000000c0,
0x0000000000000305,
0x000000000000070a,
0x0000000000000e15,
0x0000000000001c2a,
0x0000000000003854,
0x00000000000070a8,
0x000000000000e050,
0x000000000000c0a0,
0x0000000000030509,
0x0000000000070a12,
0x00000000000e1524,
0x00000000001c2a49,
0x0000000000385492,
0x000000000070a824,
0x0000000000e05048,
0x0000000000c0a090,
0x0000000003050911,
0x00000000070a1222,
0x000000000e152444,
0x000000001c2a4988,
0x0000000038549211,
0x0000000070a82422,
0x00000000e0504844,
0x00000000c0a09088,
0x0000000305091121,
0x000000070a122242,
0x0000000e15244484,
0x0000001c2a498808,
0x0000003854921110,
0x00000070a8242221,
0x000000e050484442,
0x000000c0a0908884,
0x0000030509112141,
0x0000070a12224282,
0x00000e1524448404,
0x00001c2a49880808,
0x0000385492111010,
0x000070a824222120,
0x0000e05048444241,
0x0000c0a090888482,
0x0003050911214181,
0x00070a1222428202,
0x000e152444840404,
0x001c2a4988080808,
0x0038549211101010,
0x0070a82422212020,
0x00e0504844424140,
0x00c0a09088848281]
display :: Word64 -> IO ()
display x =
mapM_ (putStrLn . reverse) $
groupIn 8 $
printf "%064s" $
showIntAtBase 2 intToDigit x ""
attackIndex :: Square -> Dir -> Int
attackIndex s d = if d == Pos then s else 64+s
printTable = mapM_ putStrLn
[ f
| d <- [Pos, Neg]
, s <- [0..63]
, let w64 = t s d
, let ix = attackIndex s d
, let f = printf "(%d, 0x%016x)," ix w64
]
ix :: (Int, Int) -> Int
ix (x,y) = (y-1) * 8 + (x-1)
cix :: Int -> (Int, Int)
cix i = let (q,r) = i `quotRem` 8
in (r+1, q+1)
data Dir = Pos | Neg deriving (Eq)
type Square = Int
t :: Square -> Dir -> Word64
t s d = ray2word64 $ attackRay s d
ray2word64 :: [Square] -> Word64
ray2word64 = foldl setBit 0
-- True = up
attackRay :: Square -> Dir -> [Square]
attackRay z p =
let (x,y) = cix z
in map ix $ case p of
Pos ->
[ (x-i,y+i) | i <- [1 .. min (x-1) (8-y)] ] -- left up
++ [ (x, y+i) | i <- [1 .. 8-y] ] -- straight up
++ [ (x+i,y+i) | i <- [1 .. min (8-x) (8-y)] ] -- right up
Neg ->
[ (x-i,y-i) | i <- [1 .. min (x-1) (y-1)] ] -- left down
++ [ (x, y-i) | i <- [1 .. y-1] ] -- straight down
++ [ (x+i,y-i) | i <- [1 .. min (8-x) (y-1)] ] -- right down
ray :: (Int -> Int) -> Bool -> Int -> [Int]
ray dir increasing square =
let inside = if increasing then (<64) else (>=0)
in case takeWhile inside $ iterate dir square of
[] -> []
squares -> tail squares
north = ray (+8) True
nw = ray (+7) True
ne = ray (+9) True
south = ray (\x -> x - 8) False
sw = ray (\x -> x - 9) False
se = ray (\x -> x - 7) False
|
sphynx/hamisado
|
Board/MoveGen.hs
|
Haskell
|
bsd-3-clause
| 6,543
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TemplateHaskell #-}
{- |
Module : Kiosk.Backend.Data.ReportTemplate
Description : Render a Report Template from a Form and a list of DataTemplates
Copyright : Plow Technologies LLC
License : MIT License
Maintainer : Scott Murphy
Stability : experimental
Portability : portable
Data Templates and Form Helpers for making ReportTemplates
-}
module Kiosk.Backend.Data.ReportTemplate where
import Codec.Xlsx (Cell(..), CellMap, CellValue(..), def, cellValue, wsCells, Worksheet(..))
import Control.Applicative ((<$>), (<*>))
import Control.Lens
import Data.Map (Map)
import qualified Data.Map.Lazy as M
import Data.Maybe
import Data.Monoid
import Data.Text (Text)
import qualified Data.Text as T
import Data.Time
import Kiosk.Backend.Data.DataTemplate
-- import Kiosk.Backend.Data.DataTemplateEntry
import Kiosk.Backend.Form
import ReportTemplate.Report
type KioskReportTemplate context preOut rowOut= ReportTemplate context Form preOut DataTemplate rowOut
makeLenses ''Company
makeLenses ''Report
makePrisms ''ReportTable
makeLenses ''ReportTableRowStyle
makeLenses ''ReportPreamble
makePrisms ''InputType
makeLenses ''InputText
makeLenses ''InputDouble
makeLenses ''InputDate
-- | Kiosk Specific
type KioskPreambleTemplateList context preOut= [(ReportPreambleLabel, context -> Form -> preOut)]
type KioskRowTemplateList context rowOut = [(ReportRowLabel, context -> DataTemplate -> rowOut)]
type KioskPreambleTemplate context preOut= ReportPreambleTemplate context Form preOut
type KioskRowTemplate context rowOut = ReportRowTemplate context DataTemplate rowOut
-- | Spreadsheet specific
data XlsxContext = XlsxContext {
_xlsxCurrentTime :: String}
type XlsxReportTemplate = KioskReportTemplate XlsxContext CellMap Cell
type XlsxPreambleTemplateList = KioskPreambleTemplateList XlsxContext CellMap
type XlsxRowTemplateList = KioskRowTemplateList XlsxContext Cell
type XlsxReport = Report CellMap Cell
type XlsxPreamble = ReportPreamble CellMap
type XlsxTable = ReportTable Cell
-- | Excel Form Rendering Helper Functions
-- Because the excel preamble is a full cell map
getCompanyName :: (Int,Int) -> Form -> CellMap
getCompanyName key form = makeCellMapFromText key companyName
where
companyName = form ^. getCompany.getCompanyText
makeCellMapFromText :: (Int,Int) -> Text -> CellMap
makeCellMapFromText key t = M.insert key cellText M.empty
where
cellText = def & cellValue .~ (Just . CellText $ t)
makeCellMapFromUTCTime :: String -> (Int, Int) -> UTCTime -> CellMap
makeCellMapFromUTCTime timeFormatString key = makeCellMapFromText key .
T.pack .
formatTime defaultTimeLocale
timeFormatString
-- | Row Rendering Helper Functions
-- | Retrieve Cell Data
makeCellDoubleFromInputDouble :: Text -> DataTemplate -> Cell
makeCellDoubleFromInputDouble = makeCellValueFromDataTemplate CellDouble inputDoubleLens
where
inputDoubleLens = _InputTypeDouble.getInputDouble
makeCellTextWithCellTemplate :: ([Text] -> Text )
-> [Text] -> DataTemplate -> Cell
makeCellTextWithCellTemplate templateFcn txts dte = def & cellValue ?~ cellVal
where
cellVal = CellText . templateFcn $ targetTextList
inputTextLens = _InputTypeText.getInputText
targetTextList :: [Text]
targetTextList = fromMaybe "" <$> (getInputTypeByLabel inputTextLens
<$> txts
<*> [dte])
makeCellDoubleWithCellTemplate :: ([Text] -> Either Text Double )
-> [Text] -> DataTemplate -> Cell
makeCellDoubleWithCellTemplate templateFcn txts dte = def & cellValue ?~ cellVal
where
cellVal = either CellText CellDouble $ templateFcn $ targetTextList
inputTextLens = _InputTypeText.getInputText
targetTextList :: [Text]
targetTextList = fromMaybe "" <$> (getInputTypeByLabel inputTextLens
<$> txts
<*> [dte])
makeCellTextFromInputText :: Text -> DataTemplate -> Cell
makeCellTextFromInputText = makeCellValueFromDataTemplate CellText inputTextLens
where
inputTextLens = _InputTypeText.getInputText
makeCellTextFromInputDate :: Text -> DataTemplate -> Cell
makeCellTextFromInputDate l dte = def & cellValue .~ maybeCellValue
where
maybeInputDate = getInputTypeByLabel inputLens l$ dte
maybeCellValue = CellText <$> maybeInputDate
inputLens = _InputTypeDate . getInputDate
makeCellValueFromDataTemplate ::
(s -> CellValue)
-> Getting (First s) InputType s -> Text -> DataTemplate -> Cell
makeCellValueFromDataTemplate cellConstructor lensDt l dt = outputCell
where
maybeCellValue :: Maybe CellValue
maybeCellValue = cellConstructor <$> (getInputTypeByLabel lensDt l $ dt)
outputCell :: Cell
outputCell = def & cellValue .~ maybeCellValue
getInputTypeByLabel ::
Getting (First a) InputType a -> Text -> DataTemplate -> Maybe a
getInputTypeByLabel lensDt l dt = outputCell
where
singletonInput = catMaybes.
fmap (getItemMatchingLabel l lensDt) .
templateItems $ dt
outputCell = case singletonInput of
[] -> Nothing
(x:_) -> Just x
getItemMatchingLabel
:: Text
-> Getting (First a) InputType a
-> TemplateItem
-> Maybe a
getItemMatchingLabel l dtLens (TemplateItem lbl inVal)
|l == lbl = inVal ^? dtLens
|otherwise = Nothing
-- | Build 'Report' from 'ReportTemplate'
buildXlsxReport :: XlsxReportTemplate -> XlsxContext ->
Form ->
[DataTemplate] -> XlsxReport
buildXlsxReport xlsxReportTemplate xlsxContext form dataTemplates = renderedReport
where
renderedReport = renderReport xlsxReportTemplate xlsxContext form dataTemplates
-- | Create Excel Spreadsheet
-- | Render Spreadsheet from report
renderSpreadsheet :: XlsxReport -> Worksheet
renderSpreadsheet report = def & wsCells .~ combinedMap
where
combinedMap :: CellMap
combinedMap = M.unions (preambleMapList ++ [labelCellMap] ++ rowMapList)
preambleOffset = 10
preambleMapList :: [CellMap]
preambleMapList = toListOf (reportPreamble.preambleValue.folded._2) report
labelToIntMap :: Map ReportRowLabel Int
labelToIntMap = M.fromList . zip (report ^. (reportRows . _ReportTableRowIndex . _1 ) ) $ [1..]
rowMapList :: [CellMap]
rowMapList = foldrTableByRowWithIndex transformPositionAndMap M.empty <$>
(toListOf (reportRows._ReportTableRowIndex._2) report)
transformPositionAndMap :: (Int,String) -> Cell -> CellMap -> CellMap
transformPositionAndMap (rowInt,label') rowVal rowMap' = case M.lookup label' labelToIntMap of
Nothing -> rowMap'
(Just i) -> M.insert (rowInt + preambleOffset , i) rowVal rowMap'
labelCellMap = M.foldrWithKey (\label' idx m -> M.insert (preambleOffset,idx) (convertText label') m )
M.empty
labelToIntMap
convertText label' = def & cellValue .~ (Just . CellText . T.pack $ label')
|
plow-technologies/cobalt-kiosk-data-template
|
src/Kiosk/Backend/Data/ReportTemplate.hs
|
Haskell
|
bsd-3-clause
| 7,767
|
{-# LANGUAGE OverloadedStrings #-}
module Main where
import CNC.FanucMacro
import CNC.HCode
import CNC.GInterpreter
import CNC.GParser
import CNC.AwePrelude
--import Prelude(Num(..), Fractional(..), Floating(..), Int, ($), id, putStrLn, (++), Just)
import System.Environment
evaluateIsoFile :: FilePath -> IO ProgramStatistics
evaluateIsoFile file = do
parsed <- parseIsoFile file
-- print parsed
case parsed of
Right iso -> do
prog_trace <- gcodeToMoves iso
return $ gcode_stats prog_trace
Left err -> do putStrLn $ "Error parsing: " ++ show err
fail err
main = do
[file] <- getArgs
stats <- evaluateIsoFile file
print stats
|
akamaus/gcodec
|
src/CNC/GEmulator.hs
|
Haskell
|
bsd-3-clause
| 680
|
{-# LANGUAGE OverloadedStrings, DoAndIfThenElse #-}
-- |
-- Command line utility, this is not meant to be used as a library.
--
-- To use as a library see the README or use this as an example of how
-- to combine the caching backend, request system, and parser/rule
-- checker.
module Main where
import Control.Applicative
import Control.Monad.Error
import Control.Exception.Base (bracket)
import Data.Grob.Types
import Data.Grob.Acid
import Data.Grob.Attoparsec
import Data.ConfigFile as C
import Data.Acid (AcidState, openLocalStateFrom)
import Data.Acid.Local (createCheckpointAndClose)
import Data.Acid.Advanced (update', query')
import Data.ByteString.Char8 as CB hiding (filter, null, any)
import Data.Time (UTCTime, getCurrentTime)
import Data.Maybe
import Data.List (stripPrefix)
import Data.List.Utils (endswith)
import Network.HTTP.Robots
import Network.URI as U hiding (path)
import System.Log.Handler.Color
import System.Log.Logger
import System.Console.CmdArgs
import System.Exit
import System.Posix.Env (getEnv)
import OpenSSL
rargs :: Grob
rargs = Args
{
argrobot = def &= argPos 0 &= typ "ROBOT.TXT",
argagent = def &= argPos 1 &= typ "USERAGENT",
argresource = def &= argPos 2 &= typ "URI",
argnocache = def &= explicit &= name "n" &= help "Override HTTP cache headers to not cache robots.txt",
argallowed = def &= explicit &= name "a" &= help "Explicitly check if allowed",
config = "~/.grobrc" &= typFile &= groupname "Options" &= help "Specify config file to use"
} &=
verbosity &=
help "Parser and rule checker for robots.txt's" &=
helpArg [name "h"] &=
summary "grob v0.1.0" &=
noAtExpand &=
details ["grob is a robots.txt request, parser, and rule checker library and binary.",
"",
"Default configuration file is in \"${HOME}/.grobrc\", use --config=[File] to specify a custom one."]
-- | @main@ begin every invocation in a `withOpenSSL` computation in
-- the event we try to request a robots.txt resource behind
-- HTTPS. Parse the cmdargs and pass into `parseConfig` to generate
-- initial application state inside of `grob/1`.
main :: IO Bool
main = withOpenSSL $ cmdArgs rargs >>= grob . parseConfig
-- | @grob@ builds and executes a given cmdarg/config session.
--
-- This function is responsible for resolving (if we can) the user's
-- HOME for the datadir (unless it's been set otherwise).
grob :: IO Settings -> IO Bool
grob settings = do
sets <- settings
home <- getEnv "HOME"
debugM "Console" "Initializing AcidState"
let ddir = homePath (datadir sets) home
returnCode <- if endswith "robots.txt" (robot sets) then runWithCache (nocache sets) sets ddir else return ExitSuccess
-- proper exit code termination
exitWith returnCode
-- | @runRobot@ run with cache backend (False) or raw (True).
runWithCache :: Bool -> Settings -> String -> IO ExitCode
runWithCache True sets _ = do
(status, (_hcache, _hexp), body) <- open (CB.pack $ robot sets)
return ExitSuccess
runWithCache False sets ddir =
bracket (openLocalStateFrom ddir initialBotState)
createCheckpointAndClose
(middleState sets)
middleState :: Settings -> AcidState Bots -> IO ExitCode
middleState sets db = do
curt <- liftIO getCurrentTime
let uri = robot sets
robotUri = pack uri
directive = if allowed sets then "allow" else "disallow"
qBot <- query' db (RobotById $ RobotId (sha1 robotUri))
-- need to send Last-Modified so server can either tell us if it's
-- modified or not
resp <- open robotUri
tree <- dbOrParse db qBot curt resp robotUri
quiet <- isNormal
print tree
let f = directiveUA directive (agent sets) (resource sets) tree
v = if allowed sets then not f else f
if v
then return (ExitFailure 1)
else do
formatUri quiet (fromJust $ U.parseURI uri) uri (resource sets)
return ExitSuccess
formatUri :: Bool -> U.URI -> String -> String -> IO ()
formatUri False _ r _ = return ()
formatUri True puri _ pth = Prelude.putStrLn $ Prelude.concat [s, "//", n, p, pth]
where s = uriScheme puri
a = fromJust $ uriAuthority puri
n = uriRegName a
p = uriPort a
-- | @filterUA@ find the user agent
filterUA :: String -> [RuleSet] -> Maybe RuleSet
filterUA ua ruleset = filt ua ruleset <|> filt "*" ruleset
where filt u = listToMaybe . filter (\x-> unUA (userAgent x) == pack u)
-- | @directiveUA@ if no user agent then False otherwise get their
-- rule set and check against that
directiveUA :: ByteString -> String -> String -> [RuleSet] -> Bool
directiveUA dr ua path ruleset = maybe False (nany . rules) (filterUA ua ruleset)
where nany ps = any (\(_,y) -> checkPrefix dr path y) (filter (\(x,_) -> x==dr) ps)
-- | @checkPrefix@
checkPrefix :: ByteString -> String -> ByteString -> Bool
checkPrefix "disallow" path res = ("/"==y) || (path == y) || isJust (stripPrefix y path)
where y = CB.unpack res
checkPrefix "allow" path res = path == y
where y = CB.unpack res
checkPrefix _ _ _ = True
-- | If it's a 404 we can't cache it and we should just assume it's a
-- free for all; if it's a 304 then we just want to return the parse
-- tree.
dbOrParse _ _ _ (404,_,_) _ = return []
dbOrParse _ qBot _ (304,_,_) _ = return . parseTree $ fromJust qBot
dbOrParse db qBot curt (stat, (hcache, hexp), body) uri =
case qBot of
Nothing -> do
-- do initial parsing in here
let tree = doParse body
nBot <- update' db (NewRobot Robot {
robotId = RobotId $ sha1 uri,
url = unpack uri,
ttl = curt,
date = curt,
parseTree = tree
})
return tree
Just p ->
return (parseTree p)
-- | @homePath@ given a path and a path from getEnv/1, determine if we
-- want the users home directory and if so replace the leading ~ with
-- the user's HOME environment variable. If HOME is Nothing OR no
-- leading ~ in path then simply return path.
homePath :: String -> Maybe String -> String
homePath ('~':xs) home = fromMaybe "~" home ++ xs
homePath p _ = p
-- | @parseConfig@ given an cmdarg record, try to parse a config file
-- defined either by default or by the user into a settings record.
parseConfig :: Grob -> IO Settings
parseConfig argvs = do
-- these are being "overridden" by the verbosity/quiet arg
whenNormal $ updateGlobalLogger "Console" (setLevel ERROR)
whenLoud $ updateGlobalLogger "Console" (setLevel DEBUG)
-- activate color logging
updateGlobalLogger rootLoggerName (addHandler colorHandler)
debugM "Console" $ "Running Grob with " ++ config argvs
home <- getEnv "HOME"
-- parse the config file
cnf <- runErrorT $ do
cp <- join $ liftIO $ readfile emptyCP (homePath (config argvs) home)
datadirv <- C.get cp "CACHING" "datadir"
loglevelv <- C.get cp "LOGGING" "loglevel"
-- build and return a settings record
return Settings {
robot = argrobot argvs,
agent = argagent argvs,
resource = argresource argvs,
nocache = argnocache argvs,
allowed = argallowed argvs,
datadir = datadirv,
loglevel = loglevelv
}
handleConfig cnf
-- | @handleConfig@ log and exit with any parsing errors or return the
-- new settings record.
handleConfig :: Either (CPErrorData, String) Settings -> IO Settings
handleConfig (Left err) = do
-- log the error and exit the program
criticalM "Console" $ show err
criticalM "Console" "exiting..."
exitWith (ExitFailure 1)
handleConfig (Right conf) = do
quiet <- isNormal
setLL quiet conf
debugM "Console" "Configuration parsed"
debugM "Console" $ "Setting loglevel to " ++ show (loglevel conf)
return conf
setLL :: Bool -> Settings -> IO ()
setLL False _ = return ()
setLL True conf = updateGlobalLogger "Console" (setLevel $ loglevel conf)
|
ixmatus/grob
|
src/Grob.hs
|
Haskell
|
bsd-3-clause
| 8,593
|
{-# LANGUAGE DeriveDataTypeable #-}
{- |
Module : $Header$
Description : Shrimp error types and pretty-printing
Copyright : (c) Galois, Inc.
Shrimp error types and pretty-printing
-}
module SCD.M4.Errors(Error(..), nubError, occurrences, flatten, distribute,
errorsByType) where
import SCD.M4.Syntax(IfdefId, M4Id, LayerModule, ModuleId)
import SCD.SELinux.Syntax(Identifier, ClassId, CommonId, PermissionId,
SignedId, CondExpr, pos)
import SCD.M4.PShow(PShow, pShow, pShowLayerModule, showPos)
import SCD.M4.Kind(Kind, ParameterMap, ParameterInfo, ParameterKind,
ppParameters)
import Text.PrettyPrint.HughesPJ(text, (<+>), (<>), colon, ($+$),
quotes)
import Text.PrettyPrint.Pp(Pp, pp, pnest, above)
import Data.Set(Set)
import Data.Map(assocs, empty, insertWith, Map)
import Data.Foldable(toList)
import Data.List (intersperse, nub)
import Data.List.GroupSort (groupSort)
import Data.Generics(Data, Typeable, toConstr, constrIndex, ConIndex)
import Prelude hiding(FilePath)
import qualified Prelude
data Occ a = Occ ConIndex a
instance Eq (Occ a) where
a == b = compare a b == EQ
instance Ord (Occ a) where
Occ a _ `compare` Occ b _ = a `compare` b
occurrences :: Data a => [a] -> [(Integer,a)]
occurrences as =
[(i,a) | (Occ _ a, i) <- assocs (foldr occ empty [Occ (constrIndex (toConstr a)) a | a <- as])]
where occ :: (Eq a, Ord a) => a -> Map a Integer -> Map a Integer
occ a = insertWith (+) a 1
data Error =
DuplicateAccessVectorDefinition ClassId ClassId
| DuplicateClassPermission PermissionId ClassId
| DuplicateCommonDef CommonId CommonId
| DuplicateCommonPermission PermissionId CommonId
| DuplicateDefinitions [M4Id]
| DuplicateIdentifier Identifier Identifier
| DuplicateMacroDef Identifier Identifier
| DuplicateOccurrences M4Id Identifier
| DuplicateSymbolDeclaration Identifier Kind Identifier
| ErrorsIn Error [Error]
| FragmentKindError [SignedId Identifier] (Set ParameterKind)
| IllegalFragment String Identifier
| IllegalMacroUse Identifier Identifier
| IllegalParameterUse ParameterMap
| IllegalSymbolDeclarations (Set Identifier)
| IllegalSymbolReference Identifier [Identifier]
| InconsistentMacroDefinitions [Identifier]
| InconsistentSymbolDefinitions [(Identifier, Kind)]
| InDefinition M4Id
| InImplementation LayerModule
| MissingAccessVectorDefinition ClassId
| MissingModuleConfig ModuleId
| ModuleIdMismatch ModuleId Prelude.FilePath
| MutuallyRecursive [[M4Id]]
| RefPolicyWarnCall Identifier [String]
| KindMismatch Kind (Set Kind) Identifier
| UndefinedCall M4Id
| UndefinedCommonId CommonId
| UndefinedIdentifier Identifier
| UndefinedIds [(Identifier, Kind)]
| UndocumentedParameters M4Id [ParameterInfo]
| UnknownIfdefId IfdefId
| UnknownModuleConfigNames (Set ModuleId)
| UnusedArguments M4Id [ParameterInfo]
| WhenTunableTrue CondExpr
| WhenTunableFalse CondExpr
| WrongNumberOfArguments M4Id [ParameterInfo] Identifier
deriving (Eq, Ord, Show, Typeable, Data)
flatten :: [Error] -> [Error]
flatten = flip flat [] where
flat (ErrorsIn _ es:l) r = flat es (flat l r)
flat (e:l) r = e:flat l r
flat [] r = r
nubError :: [Error] -> [Error]
nubError es = nub $ map nubErrorsIn es
nubErrorsIn :: Error -> Error
nubErrorsIn (ErrorsIn x es) = ErrorsIn x $ nubError es
nubErrorsIn x = x
distribute :: [Error] -> [Error]
distribute = concatMap dist where
dist :: Error -> [Error]
dist (ErrorsIn e es) = map (ErrorsIn e . (:[])) (distribute es)
dist e = [e]
unite :: [Error] -> [Error]
unite (ErrorsIn e es:ErrorsIn e' es':es'') | e == e' = unite (ErrorsIn e (es++es'):es'')
unite (ErrorsIn e es:es') = ErrorsIn e (unite es):unite es'
unite (e:es) = e:unite es
unite [] = []
deepIndex :: Error -> ConIndex
deepIndex (ErrorsIn _ [e]) = deepIndex e
deepIndex (ErrorsIn _ _) = error "deepIndex"
deepIndex e = constrIndex (toConstr e)
errorsByType :: [Error] -> [[Error]]
errorsByType = map unite . map snd . groupSort deepIndex . distribute
instance Pp Error where
pp (DuplicateAccessVectorDefinition c oc) = text "Duplicate access-vector definition:" <+> pShow c
$+$ pnest (text "defined at" <+> pShow oc)
pp (DuplicateClassPermission p _c) = text "Duplicate permission:" <+> pShow p
pp (DuplicateCommonDef c oc) = text "Duplicate common definition:" <+> pShow c
$+$ pnest (text "defined at" <+> pShow oc)
pp (DuplicateCommonPermission p _c) = text "Duplicate permission:" <+> pShow p
pp (DuplicateDefinitions mds) = text "Duplicate definitions:" <+> pShow mds
pp (DuplicateIdentifier i oi) = text "Duplicate identifier:" <+> pShow i
$+$ pnest (text "defined at" <+> pShow oi)
pp (DuplicateMacroDef i oi) = text "Duplicate definition of macro:" <+> pShow i
$+$ pnest (text "defined at" <+> pShow oi)
pp (DuplicateOccurrences _i p) = text "Duplicate occurrences of identifier: " <+> pShow p
pp (DuplicateSymbolDeclaration i k oi) = text "Duplicate symbol declaration:" <+> pShow (i,k)
$+$ pnest (text "defined at" <+> pShow oi)
pp (ErrorsIn e es) = pp e <> colon $+$ pnest (above (intersperse (text "") (map pp es)))
pp (FragmentKindError a fk) = text "Kind error: expected fragment parameter of kind" <+> pShow fk <> text "but saw complex parameter:" <+> pShow a
pp (IllegalFragment s i) = text "Fragment" <+> quotes (text s) <+> text "is defined as a macro:" <+> pShow i
pp (IllegalMacroUse i oi) = text "Illegal use of macro:" <+> pShow i $+$ pnest (text "defined at" <+> pShow oi)
pp (IllegalParameterUse ps) = text "Illegal use of parameters in implementation:" <+> text (show ps)
pp (IllegalSymbolDeclarations is) = text "Illegal symbol declarations in interface:" <+> pShow (toList is)
pp (IllegalSymbolReference i is) = text "Illegal symbol reference across modules:" <+> pShow i
$+$ pnest (if null is then text "is undefined." else text "is defined at" <+> pShow is)
pp (InconsistentMacroDefinitions ms) = text "Inconsistent macro definitions (not defined in both branches of an ifdef):" <+> pShow ms
pp (InconsistentSymbolDefinitions is) = text "Inconsistent symbol declarations (not defined in both branches of an ifdef):" <+> pShow is
pp (InDefinition i) = text "In definition of" <+> pShow i
pp (InImplementation lm) = text "In implementation of" <+> pShowLayerModule lm
pp (MissingAccessVectorDefinition c) = text "Missing access-vector definition for" <+> pShow c
pp (MissingModuleConfig mi) = text "Missing module configuration for" <+> pShow mi
pp (ModuleIdMismatch mi m) = text "File base name" <+> text m <+> text "doesn't match module name:" <+> pShow mi
pp (MutuallyRecursive mcs) = text "Mutually recursive definitions:" <+> pShow mcs
pp (RefPolicyWarnCall i ws) = text "Call to macro with refpolicywarnings:" <+> pShow i $+$ pnest (above (map text ws))
pp (KindMismatch k ks i) = text "Kind mismatch: expected" <+> pShow k <> text ", got" <+> pShow ks <> colon <+> pShow i
pp (UndefinedCall i) = text "Call to undefined macro:" <+> pShow i
pp (UndefinedCommonId c) = text "Undefined commonId:" <+> pShow c
pp (UndefinedIdentifier p) = text "Undefined identifier:" <+> pShow p
pp (UndefinedIds is) = text "Undefined identifiers (need to be declared or put in require block):" <+> pShow is
pp (UndocumentedParameters _i p) = text "Undocumented parameters (missing parameter names) in" <+> ppParameters p
pp (UnknownIfdefId i) = text "Unknown identifier in ifdef:" <+> pShow i
pp (UnknownModuleConfigNames is) = text "Unknown module identifiers:" <+> pShow (toList is)
pp (UnusedArguments _i u) = text "Unused parameters (with kind {any}) in" <+> ppParameters u
pp (WhenTunableTrue c) = text "When" <+> pp c
pp (WhenTunableFalse c) = text "When not(" <> pp c <> text ")"
pp (WrongNumberOfArguments i pks oi) = text "Wrong number of arguments:" <+> pp i <> ppParameters pks <+> showPos (pos i)
$+$ pnest (text "defined at" <+> pShow oi)
|
GaloisInc/sk-dev-platform
|
libs/SCD/src/SCD/M4/Errors.hs
|
Haskell
|
bsd-3-clause
| 8,929
|
import Control.Concurrent
import Control.Concurrent.STM.TVar
import Control.Monad
import Control.Monad.STM
import Data.IORef
import Data.Map
import Foreign.C.Types
import Foreign.Ptr
import Foreign.Storable
import Graphics.Rendering.OpenGL
import Graphics.UI.GLFW as GLFW
import System.Exit (exitWith, ExitCode (..))
import System.FlyCap
import System.IO.Unsafe
import Prelude hiding (lookup)
data LayoutState = LayoutState {projectMatrix :: GLmatrix GLdouble, dictionary :: Map Int (GLmatrix GLdouble)}
main = do
cs <- createContexts
let n = length cs -- n is the number of cameras we have
print n
zipWithM_ cameraInit cs [0..]
hStartSCapture n cs
ind <- newMVar 0
GLFW.init
GLFW.defaultWindowHints
Just win <- GLFW.createWindow 640 (480 + 480 `div` n) "testing images" Nothing Nothing
GLFW.makeContextCurrent (Just win)
(texs, pMatrix) <- initGL n
layout <- newTVarIO LayoutState {projectMatrix = pMatrix ,dictionary = empty}
GLFW.setKeyCallback win (Just (keyPressed cs))
GLFW.setFramebufferSizeCallback win (Just (resize layout n))
GLFW.setWindowCloseCallback win (Just (shutdown cs))
GLFW.setMouseButtonCallback win (Just (mouseClick layout n ind))
resize layout n win 640 (480 + 480 `div` n)
forever $ do
GLFW.pollEvents
zipWithM_ loadT cs texs
i <-takeMVar ind
putMVar ind i
display layout texs i
GLFW.swapBuffers win
loadT :: Context -> TextureObject -> IO()
loadT c tex = do
i <- hRetBuff c
_ <- loadTex i tex
destroyImage i
getT :: CImage -> IO ()
getT (CImage r c str pData dS f bF iI) = do
(texImage2D Nothing NoProxy 0 Luminance8 (TextureSize2D (fromIntegral c) (fromIntegral r)) 0 (PixelData Luminance UnsignedByte pData))
display :: TVar LayoutState -> [TextureObject] -> Int -> IO ()
display tvar texs i = do
let num = length texs
n = fromIntegral num
clear [ColorBuffer]
loadIdentity
LayoutState pMatrix dictionary <- readTVarIO tvar
zipWithM_ (displayCam num dictionary ) texs [0..]
loadIdentity
translate (Vector3 0 (240/n) (0:: GLfloat))
drawTex (-320, 320, -240, 240::GLfloat) (texs !! i)
flush
displayCam :: Int -> Map Int (GLmatrix GLdouble) -> TextureObject -> Int -> IO ()
displayCam n dictionary tex i = do
let (Just mMatrix) = lookup i dictionary
matrix (Just $ Modelview 0) $= mMatrix
drawTex (-320, 320, -240, 240::GLfloat) tex
drawTex :: (GLfloat, GLfloat, GLfloat, GLfloat) -> TextureObject -> IO ()
drawTex (xl, xh, yl, yh) tex = do
textureBinding Texture2D $= Just tex
renderPrimitive Quads $ do -- render/draw the image
texCoord (TexCoord2 0 (0::GLfloat))
vertex (Vertex3 (xl) (yh) (0::GLfloat))
texCoord (TexCoord2 1 (0::GLfloat))
vertex (Vertex3 (xh) (yh) (0::GLfloat))
texCoord (TexCoord2 1 (1::GLfloat))
vertex (Vertex3 (xh) (yl) (0::GLfloat))
texCoord(TexCoord2 0 (1::GLfloat))
vertex (Vertex3 (xl) (yl) (0::GLfloat))
loadTex :: CImage -> TextureObject -> IO TextureObject
loadTex im tex = do
textureBinding Texture2D $= Just (tex)
textureFilter Texture2D $= ((Nearest, Nothing), Nearest)
getT im -- put the image into the texture
return $ tex
cameraInit ::Context -> Int -> IO()
cameraInit c i = do
pgr <- hGetCamIndex c i
hConnect c pgr
hSetVMandFR c VM800x600_Y8 Fr_30
cameraStop ::Context -> IO()
cameraStop c = do
hStopCapture c
hDisconnect c
hDestroyContext c
resize :: TVar LayoutState -> Int -> GLFW.WindowSizeCallback
resize tvar n win width height =
let (compositeWidth, compositeHeight) = (640, 480 + 480/(realToFrac n))
w = (fromIntegral width :: GLdouble)
h = (fromIntegral height :: GLdouble)
compositeAspect = compositeWidth / compositeHeight
winAspect = w/h
(coordMinX, coordMaxX, coordMinY, coordMaxY)
| winAspect > compositeAspect = (-compositeHeight/2 *winAspect, compositeHeight/2 *winAspect,(-compositeHeight)/2, compositeHeight/2) --wide
| winAspect < compositeAspect = (-compositeWidth/2,compositeWidth/2, (-compositeWidth)/2 /winAspect, compositeWidth/winAspect/2) --tall
| otherwise = ((-compositeWidth)/2,compositeWidth/2,(-compositeHeight)/2, compositeHeight/2)
in do
loadIdentity
dictionary' <- foldM (makeDictionary n) empty [0..n-1]
viewport $= ((Position 0 0), (Size (fromIntegral width)((fromIntegral height) :: GLsizei)))
matrixMode $= Projection
loadIdentity
ortho coordMinX coordMaxX coordMinY coordMaxY (-1) (1 :: GLdouble)
pMatrix <- (get $ matrix (Just Projection) :: IO (GLmatrix GLdouble))
matrixMode $= Modelview 0
loadIdentity
atomically $ writeTVar tvar (LayoutState {projectMatrix = pMatrix, dictionary = dictionary'})
flush
makeDictionary :: Int -> Map Int (GLmatrix GLdouble) -> Int -> IO (Map Int (GLmatrix GLdouble))
makeDictionary num dictionary i = do
let n = realToFrac num
translate (Vector3 (-320 + 320/n + 640/n*(fromIntegral i)) (-240) (0::GLfloat))
scale (1/n) (1/n) (1::GLfloat)
matrix <- (get $ matrix (Just $ Modelview 0) :: IO (GLmatrix GLdouble))
loadIdentity
return $ insert i matrix dictionary
initGL :: Int -> IO ([TextureObject], GLmatrix GLdouble)
initGL num = do
GLFW.windowHint $ WindowHint'RedBits 8
GLFW.windowHint $ WindowHint'GreenBits 8
GLFW.windowHint $ WindowHint'BlueBits 8
texture Texture2D $= Enabled
matrixMode $= Projection
loadIdentity
matrixMode $= Modelview 0
texs <- genObjectNames (num)
pMatrix <- (get $ matrix (Just Projection) :: IO (GLmatrix GLdouble))
flush
return (texs, pMatrix)
mouseClick :: (TVar LayoutState) -> Int -> MVar Int -> GLFW.MouseButtonCallback
mouseClick tvar num ind win (MouseButton'1) (MouseButtonState'Released) (ModifierKeys False False False False) = do
(x,y) <- getWindowSize win
(mX, mY) <- getCursorPos win
LayoutState pMatrix dictionary' <- readTVarIO tvar
vport <- get viewport
let newDictionary = (Data.Map.map (\mV -> unProject (Vertex3 (realToFrac mX) (realToFrac mY) (0.0::GLdouble)) mV pMatrix vport) dictionary' :: Map Int (IO (Vertex3 GLdouble)))
mapM_ (\(k,v) -> setCurrent k ind v) (assocs newDictionary)
flush
mouseClick _ _ _ _ _ _ _ = return ()
setCurrent :: Int -> MVar Int -> IO (Vertex3 GLdouble) -> IO ()
setCurrent i mvar v3 = do
(Vertex3 x y z) <- v3
print $ unwords [show x, show y, show z]
if( x <= 320 && x >= -320 && y <= 1200 && y >= 725) then do takeMVar mvar
putMVar mvar i
else return ()
keyPressed :: [Context] -> GLFW.KeyCallback
keyPressed cs win GLFW.Key'Escape _ GLFW.KeyState'Pressed _ = shutdown cs win
keyPressed _ _ _ _ _ _ = return ()
shutdown :: [Context] -> GLFW.WindowCloseCallback
shutdown cs win = do
mapM_ cameraStop cs
GLFW.destroyWindow win
GLFW.terminate
exitWith ExitSuccess
--fix y values from mouseclick
|
imalsogreg/flycap
|
tests/multCameras.hs
|
Haskell
|
bsd-3-clause
| 6,866
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE TypeSynonymInstances #-}
module Service.User where
import Base
import Model
import qualified Data.Text as T
import Database.Persist
import Database.Persist.Sql
import Servant
data RegisterRequest = RegisterRequest
{ username :: Text
, password :: Text
} deriving (Show, Generic, ToSchema, FromJSON, ToJSON)
data LoginRequest = LoginRequest
{ signature :: Text
, tokenType :: Maybe TokenType
} deriving (Show, Generic, ToSchema, FromJSON, ToJSON)
data UserResponse = UserResponse
{ userId :: ID
, createTime :: UTCTime
, username :: Text
, roleType :: RoleType
} deriving (Show, Generic, ToSchema, FromJSON, ToJSON)
-- Normal API
type UserApi = CheckUserToken :> Get '[JSON] UserResponse
:<|> ReqBody '[JSON] RegisterRequest :> PostCreated '[JSON] NoContent
:<|> "tokens" :> ReqBody '[JSON] LoginRequest :> Post '[JSON] TokenResponse
:<|> CheckUserToken :> "tokens" :> QueryParam "key" Text :> Put '[JSON] TokenResponse
:<|> CheckUserToken :> "tokens" :> DeleteNoContent '[JSON] NoContent
:<|> CheckUserToken :> "tickets" :> QueryParams "type" TicketType :> Post '[JSON] TokenResponse
userService :: ServerT UserApi App
userService = getUser
:<|> register
:<|> login
:<|> refresh
:<|> revokeToken
:<|> checkoutTicket
-- Administration API
type UserAdminApi = CheckAdminToken :> Capture "userId" ID :> Get '[JSON] UserResponse
:<|> CheckAdminToken :> GetPage '[JSON] UserResponse
userAdminService :: ServerT UserAdminApi App
userAdminService = getUserById
:<|> getUsers
-- Public API
type UserOpenApi = CheckTicket :> "user-info" :> Get '[JSON] UserResponse
userOpenService :: ServerT UserOpenApi App
userOpenService = getUser
getUser :: Token -> App UserResponse
getUser token = getUserById token (tokenUser token)
getUserById :: Token -> ID -> App UserResponse
getUserById _ id = let go (Just u) = return $ toUserResponse id u
go Nothing = throwM $ User_NotFound id
in withLogName "user" $ runTrans (get $ toSqlKey id) >>= go
toUserResponse :: ID -> EntityUser -> UserResponse
toUserResponse id u = UserResponse id (entityUserCreateTime u) (entityUserName u) (entityUserType u)
getUsers :: Token -> PageableApp UserResponse
getUsers _ p s = fmap to <$> runTrans (selectPageList [] p s)
where to entity = toUserResponse (fromSqlKey $ entityKey entity) (entityVal entity)
register :: RegisterRequest -> App NoContent
register (RegisterRequest username password) = do
checkName username
checkPassword password
salt <- liftIO $ randomHex 16
id <- runTrans $ do -- Create User
now <- getNow
let user = EntityUser username salt (hmacSha256 salt password) RoleUser now Nothing
entityId <- insert user `catchAll` throwM
when (fromSqlKey entityId == 1) $
update entityId [EntityUserType =. RoleAdmin, EntityUserUpdateTime =. Just now]
return $ fromSqlKey entityId
return NoContent `thenNotify` UserEvent UserRegister id (Just username)
login :: LoginRequest -> App TokenResponse
login (LoginRequest s t) = do
checkBasicAuth s
let (username,password) = T.breakOn ":" s
getUser = runTrans $ getBy (EntityUserUniqueName username) >>= convert
convert Nothing = throwM AuthenticationException
convert (Just user) = return (fromSqlKey $ entityKey user, entityVal user)
tokenType = fromMaybe TokenUser t
toToken (id, user) | checkPassword user && canSign tokenType (entityUserType user)
= signToken id tokenType [] `thenNotify` UserEvent UserLogin id (Just username)
| otherwise = throwM AuthenticationException
checkPassword user = hmacSha256 (entityUserSalt user) (T.tail password) == entityUserPassword user
getUser >>= toToken
toTokenType :: Token -> TokenType
toTokenType = tokenType
refresh :: Token -> Maybe Text -> App TokenResponse
refresh token mayK = if isNothing mayK
&& tokenRefreshSecret token /= fromJust mayK
&& isOnce (toTokenType token)
then throwM AuthenticationException
else do revokeToken token
signToken (tokenUser token) (toTokenType token) (tokenClaims token)
`thenNotify` UserEvent UserRefresh (tokenUser token) Nothing
checkoutTicket :: Token -> [TicketType] -> App TokenResponse
checkoutTicket token = signToken (tokenUser token) Ticket
|
leptonyu/mint
|
corn-server/src/Service/User.hs
|
Haskell
|
bsd-3-clause
| 5,253
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -Wno-orphans #-}
-----------------------------------------------------------------------------
-- |
-- Module : GHC.TypeLits.Singletons.Internal
-- Copyright : (C) 2014 Richard Eisenberg
-- License : BSD-style (see LICENSE)
-- Maintainer : Ryan Scott
-- Stability : experimental
-- Portability : non-portable
--
-- Defines and exports singletons useful for the 'Natural', 'Symbol', and
-- 'Char' kinds. This exports the internal, unsafe constructors. Use import
-- "GHC.TypeLits.Singletons" for a safe interface.
--
----------------------------------------------------------------------------
module GHC.TypeLits.Singletons.Internal (
Sing,
Natural, Symbol, Char,
SNat(..), SSymbol(..), SChar(..),
withKnownNat, withKnownSymbol, withKnownChar,
Error, sError,
ErrorWithoutStackTrace, sErrorWithoutStackTrace,
Undefined, sUndefined,
KnownNat, TN.natVal, KnownSymbol, symbolVal, KnownChar, charVal,
type (^), (%^),
type (<=?), (%<=?),
-- * Defunctionalization symbols
ErrorSym0, ErrorSym1,
ErrorWithoutStackTraceSym0, ErrorWithoutStackTraceSym1,
UndefinedSym0,
type (^@#@$), type (^@#@$$), type (^@#@$$$),
type (<=?@#@$), type (<=?@#@$$), type (<=?@#@$$$)
) where
import Data.Bool.Singletons
import Data.Eq.Singletons
import Data.Kind
import Data.Ord.Singletons as O
import Data.Singletons
import Data.Singletons.Decide
import Data.Singletons.TH
import GHC.Show (appPrec, appPrec1)
import GHC.Stack (HasCallStack)
import GHC.TypeLits as TL
import qualified GHC.TypeNats as TN
import Unsafe.Coerce
import qualified Data.Text as T
import Data.Text ( Text )
----------------------------------------------------------------------
---- TypeLits singletons ---------------------------------------------
----------------------------------------------------------------------
type SNat :: Natural -> Type
data SNat (n :: Natural) = KnownNat n => SNat
type instance Sing = SNat
instance KnownNat n => SingI n where
sing = SNat
instance SingKind Natural where
type Demote Natural = Natural
fromSing (SNat :: Sing n) = TN.natVal (Proxy :: Proxy n)
toSing n = case TN.someNatVal n of
SomeNat (_ :: Proxy n) -> SomeSing (SNat :: Sing n)
type SSymbol :: Symbol -> Type
data SSymbol (n :: Symbol) = KnownSymbol n => SSym
type instance Sing = SSymbol
instance KnownSymbol n => SingI n where
sing = SSym
instance SingKind Symbol where
type Demote Symbol = Text
fromSing (SSym :: Sing n) = T.pack (symbolVal (Proxy :: Proxy n))
toSing s = case someSymbolVal (T.unpack s) of
SomeSymbol (_ :: Proxy n) -> SomeSing (SSym :: Sing n)
type SChar :: Char -> Type
data SChar (c :: Char) = KnownChar c => SChar
type instance Sing = SChar
instance KnownChar c => SingI c where
sing = SChar
instance SingKind Char where
type Demote Char = Char
fromSing (SChar :: Sing c) = charVal (Proxy :: Proxy c)
toSing sc = case someCharVal sc of
SomeChar (_ :: Proxy c) -> SomeSing (SChar :: Sing c)
-- SDecide instances:
instance SDecide Natural where
(SNat :: Sing n) %~ (SNat :: Sing m)
| Just r <- TN.sameNat (Proxy :: Proxy n) (Proxy :: Proxy m)
= Proved r
| otherwise
= Disproved (\Refl -> error errStr)
where errStr = "Broken Natural singletons"
instance SDecide Symbol where
(SSym :: Sing n) %~ (SSym :: Sing m)
| Just r <- sameSymbol (Proxy :: Proxy n) (Proxy :: Proxy m)
= Proved r
| otherwise
= Disproved (\Refl -> error errStr)
where errStr = "Broken Symbol singletons"
instance SDecide Char where
(SChar :: Sing n) %~ (SChar :: Sing m)
| Just r <- sameChar (Proxy :: Proxy n) (Proxy :: Proxy m)
= Proved r
| otherwise
= Disproved (\Refl -> error errStr)
where errStr = "Broken Char singletons"
-- PEq instances
instance PEq Natural where
type x == y = DefaultEq x y
instance PEq Symbol where
type x == y = DefaultEq x y
instance PEq Char where
type x == y = DefaultEq x y
-- need SEq instances for TypeLits kinds
instance SEq Natural where
(SNat :: Sing n) %== (SNat :: Sing m)
= case sameNat (Proxy :: Proxy n) (Proxy :: Proxy m) of
Just Refl -> STrue
Nothing -> unsafeCoerce SFalse
instance SEq Symbol where
(SSym :: Sing n) %== (SSym :: Sing m)
= case sameSymbol (Proxy :: Proxy n) (Proxy :: Proxy m) of
Just Refl -> STrue
Nothing -> unsafeCoerce SFalse
instance SEq Char where
(SChar :: Sing n) %== (SChar :: Sing m)
= case sameChar (Proxy :: Proxy n) (Proxy :: Proxy m) of
Just Refl -> STrue
Nothing -> unsafeCoerce SFalse
-- POrd instances
instance POrd Natural where
type (a :: Natural) `Compare` (b :: Natural) = a `TN.CmpNat` b
instance POrd Symbol where
type (a :: Symbol) `Compare` (b :: Symbol) = a `TL.CmpSymbol` b
instance POrd Char where
type (a :: Char) `Compare` (b :: Char) = a `TL.CmpChar` b
-- SOrd instances
instance SOrd Natural where
a `sCompare` b = case fromSing a `compare` fromSing b of
LT -> unsafeCoerce SLT
EQ -> unsafeCoerce SEQ
GT -> unsafeCoerce SGT
instance SOrd Symbol where
a `sCompare` b = case fromSing a `compare` fromSing b of
LT -> unsafeCoerce SLT
EQ -> unsafeCoerce SEQ
GT -> unsafeCoerce SGT
instance SOrd Char where
a `sCompare` b = case fromSing a `compare` fromSing b of
LT -> unsafeCoerce SLT
EQ -> unsafeCoerce SEQ
GT -> unsafeCoerce SGT
-- Show instances
-- These are a bit special because the singleton constructor does not uniquely
-- determine the type being used in the constructor's return type (e.g., all Naturals
-- have the same singleton constructor, SNat). To compensate for this, we display
-- the type being used using visible type application. (Thanks to @cumber on #179
-- for suggesting this implementation.)
instance Show (SNat n) where
showsPrec p n@SNat
= showParen (p > appPrec)
( showString "SNat @"
. showsPrec appPrec1 (TN.natVal n)
)
instance Show (SSymbol s) where
showsPrec p s@SSym
= showParen (p > appPrec)
( showString "SSym @"
. showsPrec appPrec1 (symbolVal s)
)
instance Show (SChar c) where
showsPrec p s@SChar
= showParen (p > appPrec)
( showString "SChar @"
. showsPrec appPrec1 (charVal s)
)
-- Convenience functions
-- | Given a singleton for @Nat@, call something requiring a
-- @KnownNat@ instance.
withKnownNat :: Sing n -> (KnownNat n => r) -> r
withKnownNat SNat f = f
-- | Given a singleton for @Symbol@, call something requiring
-- a @KnownSymbol@ instance.
withKnownSymbol :: Sing n -> (KnownSymbol n => r) -> r
withKnownSymbol SSym f = f
-- | Given a singleton for @Char@, call something requiring
-- a @KnownChar@ instance.
withKnownChar :: Sing n -> (KnownChar n => r) -> r
withKnownChar SChar f = f
-- | The promotion of 'error'. This version is more poly-kinded for
-- easier use.
type Error :: k0 -> k
type family Error (str :: k0) :: k where {}
$(genDefunSymbols [''Error])
instance SingI (ErrorSym0 :: Symbol ~> a) where
sing = singFun1 sError
-- | The singleton for 'error'
sError :: HasCallStack => Sing (str :: Symbol) -> a
sError sstr = error (T.unpack (fromSing sstr))
-- | The promotion of 'errorWithoutStackTrace'. This version is more
-- poly-kinded for easier use.
type ErrorWithoutStackTrace :: k0 -> k
type family ErrorWithoutStackTrace (str :: k0) :: k where {}
$(genDefunSymbols [''ErrorWithoutStackTrace])
instance SingI (ErrorWithoutStackTraceSym0 :: Symbol ~> a) where
sing = singFun1 sErrorWithoutStackTrace
-- | The singleton for 'errorWithoutStackTrace'.
sErrorWithoutStackTrace :: Sing (str :: Symbol) -> a
sErrorWithoutStackTrace sstr = errorWithoutStackTrace (T.unpack (fromSing sstr))
-- | The promotion of 'undefined'.
type Undefined :: k
type family Undefined :: k where {}
$(genDefunSymbols [''Undefined])
-- | The singleton for 'undefined'.
sUndefined :: HasCallStack => a
sUndefined = undefined
-- | The singleton analogue of '(TN.^)' for 'Natural's.
(%^) :: Sing a -> Sing b -> Sing (a ^ b)
sa %^ sb =
let a = fromSing sa
b = fromSing sb
ex = TN.someNatVal (a ^ b)
in
case ex of
SomeNat (_ :: Proxy ab) -> unsafeCoerce (SNat :: Sing ab)
infixr 8 %^
-- Defunctionalization symbols for type-level (^)
$(genDefunSymbols [''(^)])
instance SingI (^@#@$) where
sing = singFun2 (%^)
instance SingI x => SingI ((^@#@$$) x) where
sing = singFun1 (sing @x %^)
instance SingI1 (^@#@$$) where
liftSing s = singFun1 (s %^)
-- | The singleton analogue of 'TN.<=?'
--
-- Note that, because of historical reasons in GHC's 'Natural' API, 'TN.<=?'
-- is incompatible (unification-wise) with 'O.<=' and the 'PEq', 'SEq',
-- 'POrd', and 'SOrd' instances for 'Natural'. @(a '<=?' b) ~ 'True@ does not
-- imply anything about @a 'O.<=' b@ or any other 'PEq' / 'POrd'
-- relationships.
--
-- (Be aware that 'O.<=' in the paragraph above refers to 'O.<=' from the
-- 'POrd' typeclass, exported from "Data.Ord.Singletons", and /not/
-- the 'TN.<=' from "GHC.TypeNats". The latter is simply a type alias for
-- @(a 'TN.<=?' b) ~ 'True@.)
--
-- This is provided here for the sake of completeness and for compatibility
-- with libraries with APIs built around '<=?'. New code should use
-- 'CmpNat', exposed through this library through the 'POrd' and 'SOrd'
-- instances for 'Natural'.
(%<=?) :: forall (a :: Natural) (b :: Natural). Sing a -> Sing b -> Sing (a <=? b)
sa %<=? sb = unsafeCoerce (sa %<= sb)
infix 4 %<=?
-- Defunctionalization symbols for (<=?)
$(genDefunSymbols [''(<=?)])
instance SingI ((<=?@#@$) @Natural) where
sing = singFun2 (%<=?)
instance SingI x => SingI ((<=?@#@$$) @Natural x) where
sing = singFun1 (sing @x %<=?)
instance SingI1 ((<=?@#@$$) @Natural) where
liftSing s = singFun1 (s %<=?)
|
goldfirere/singletons
|
singletons-base/src/GHC/TypeLits/Singletons/Internal.hs
|
Haskell
|
bsd-3-clause
| 10,075
|
{-# LANGUAGE DeriveDataTypeable,OverloadedStrings #-}
module XMonad.Actions.XHints.Translate where
import Data.Typeable
import XMonad.Actions.XHints.State
import qualified Language.Bing as B
import Language.Bing (BingLanguage,BingContext,ClientId,ClientSecret,getAccessToken,execBing)
import Control.Monad.IO.Class (MonadIO,liftIO)
import Data.Text (Text)
import Control.Monad.State.Strict
import qualified Data.Text as T
import XMonad.Actions.XHints.Helpers (newTextHint)
data Translator deriving Typeable
translateHint :: ClientId -> ClientSecret -> BingLanguage -> BingLanguage -> Text -> XHint Translator BingContext (Either Text Text)
translateHint clientId clientSecret from to text = do
s <- get
res <- B.runExceptT $ do
token <- case s of
Nothing -> getAccessToken clientId clientSecret
Just ctx -> return ctx
flip B.runBing token $ do
trans <- B.translateM text from to
ctx <- B.getBingCtx
return (trans,ctx)
case res of
Right (trans,token) -> put (Just token) >> return (Right $ T.pack $ show trans)
_ -> return $ Left "Error translating text"
translate clientId clientSecret from to = newTextHint $ translateHint clientId clientSecret from to
|
netogallo/XHints
|
src/XMonad/Actions/XHints/Translate.hs
|
Haskell
|
bsd-3-clause
| 1,214
|
{-# LANGUAGE OverloadedStrings #-}
module Air.Cli.Parser where
import Control.Applicative ((<$>), (<*>), (<*))
import Data.Char (chr)
import Data.ByteString hiding (map, elem)
import Data.Attoparsec hiding (satisfy)
import Data.Attoparsec.Combinator
import Data.Attoparsec.ByteString.Char8
import Air.Cli (Command(..))
import qualified Air.Domain as D
import Air.Test
parseCommand :: ByteString -> Either String Command
parseCommand = parseOnly command
command :: Parser Command
command = choice [
createFlatmate
, deactivateFlatmate
, activateFlatmate
, createBill
, attendBill
, flatmateInformation
, payBill
, flatmateBalance
, flatmateDeposit
, payment
, quit
, help
]
-- eg parses the "quit"
quit :: Parser Command
quit = string "quit" >> return Quit
-- eg parses the "help"
help :: Parser Command
help = string "help" >> return Help
-- eg parses the "create flatmate username Bob Marley"
createFlatmate :: Parser Command
createFlatmate = do
cmd "create" "flatmate"
CreateFlatmate
<$> (username <* spaces)
<*> (many1 (letter <|> space))
-- eg parses the "deactivate flatmate username"
deactivateFlatmate :: Parser Command
deactivateFlatmate = do
cmd "deactivate" "flatmate"
DeactivateFlatmate <$> username
-- eg parses the "activate flatmate username"
activateFlatmate :: Parser Command
activateFlatmate = do
cmd "activate" "flatmate"
ActivateFlatmate <$> username
-- eg parses the "create bill billname [usera,userb]"
createBill :: Parser Command
createBill = do
cmd "create" "bill"
CreateBill
<$> (billname <* spaces)
<*> (listOf username)
-- eg parses the "attend bill user billname"
attendBill :: Parser Command
attendBill = do
cmd "attend" "bill"
AttendBill
<$> (username <* spaces)
<*> billname
-- eg parses the "information username"
flatmateInformation :: Parser Command
flatmateInformation = do
spacesAfter $ string "information"
FlatmateInformation <$> username
-- eg parses the "pay bill billname {number}"
payBill :: Parser Command
payBill = do
string "pay"
spaces
PayBill
<$> (billname <* spaces)
<*> decimal
-- eg parses the "flatmate balance username"
flatmateBalance :: Parser Command
flatmateBalance = do
cmd "flatmate" "balance"
FlatmateBalance <$> username
-- eg parses the "flatmate deposit username {number}"
flatmateDeposit :: Parser Command
flatmateDeposit =
FlatmateDeposit
<$> (username <* spaces <* string "deposits" <* spaces)
<*> decimal
-- eg parses the "payment [usera, userb] {number} description"
payment :: Parser Command
payment = do
spacesAfter $ string "payment"
Payment
<$> ((listOf username) <* spaces)
<*> (decimal <* spaces)
<*> (many1 (letter <|> space))
username = many1 letter
billname = many1 letter
cmd f s = do
f
spaces
s
spaces
-- Tools
spacesAfter parser = do
x <- parser
spaces
return x
brackets open parser close = do
open
x <- parser
close
return x
listOf p = brackets (char '[') (p `sepBy` (char ',' >> spaces)) (char ']')
spaces = many1 space
letter = accented_letter <|> letter_ascii
accented_letter = do
satisfy (flip elem (map chr [
225, 237, 369, 337, 252, 246, 250, 243, 233
, 193, 205, 368, 336, 220, 214, 218, 211, 201
]))
a <|> b = choice [a,b]
{-# INLINE (<|>) #-}
-- Test
parserTests = [
Equals (parseCommand "create flatmate user Bob User")
(Right (CreateFlatmate "user" "Bob User"))
"Parser Test: Create Flatmate"
, Equals (parseCommand "deactivate flatmate user")
(Right (DeactivateFlatmate "user"))
"Parser Test: Deactivate Flatmate"
, Equals (parseCommand "activate flatmate user")
(Right (ActivateFlatmate "user"))
"Parser Test: Activate Flatmate"
, Equals (parseCommand "create bill billname [usera, userb, userc]")
(Right (CreateBill "billname" ["usera", "userb", "userc"]))
"Parser Test: Create Bill"
, Equals (parseCommand "attend bill user bill")
(Right (AttendBill "user" "bill"))
"Parser Test: Attend Bill"
, Equals (parseCommand "information user")
(Right (FlatmateInformation "user"))
"Parser Test: Flatmate Information"
, Equals (parseCommand "pay common 10000")
(Right (PayBill "common" 10000))
"Parser Test: Pay Bill"
, Equals (parseCommand "flatmate balance user")
(Right (FlatmateBalance "user"))
"Parser Test: Flatmate Balance"
, Equals (parseCommand "user deposits 10000")
(Right (FlatmateDeposit "user" 10000))
"Parser Test: Flatmate Deposit"
, Equals (parseCommand "payment [usera, userb] 100 This is a description")
(Right (Payment ["usera", "userb"] 100 "This is a description"))
"Parser Test: Payment"
, Equals (parseCommand "quit") (Right Quit) "Parses Test: Quit"
, Equals (parseCommand "help") (Right Help) "Parses Test: Help"
]
|
andorp/air
|
src/Air/Cli/Parser.hs
|
Haskell
|
bsd-3-clause
| 4,974
|
module ReplaceExperiment where
replaceWithP :: b -> Char
replaceWithP = const 'p'
lms :: [Maybe [Char]]
lms = [Just "Ave", Nothing, Just "woohoo"]
replaceWithP' :: [Maybe [Char]] -> Char
replaceWithP' = replaceWithP
liftedReplace :: Functor f => f a -> f Char
liftedReplace = fmap replaceWithP
liftedReplace' :: [Maybe [Char]] -> [Char]
liftedReplace' = liftedReplace
twiceLifted :: (Functor f1, Functor f) => f (f1 a) -> f (f1 Char)
twiceLifted = (fmap . fmap) replaceWithP
twiceLifted' :: [Maybe [Char]] -> [Maybe Char]
twiceLifted' = twiceLifted
thriceLifted :: (Functor f2, Functor f1, Functor f) => f (f1 (f2 a)) -> f (f1 (f2 Char))
thriceLifted = (fmap . fmap . fmap) replaceWithP
thriceLifted' :: [Maybe [Char]] -> [Maybe [Char]]
thriceLifted' = thriceLifted
main :: IO ()
main = do
putStr "replaceWithP' lms: "
print $ replaceWithP' lms
putStr "liftedReplace lms: "
print $ liftedReplace lms
putStr "liftedReplace' lms: "
print $ liftedReplace' lms
putStr "twiceLifted lms: "
print $ twiceLifted lms
putStr "twiceLifted' lms: "
print $ twiceLifted' lms
putStr "thriceLifted lms: "
print $ thriceLifted lms
putStr "thriceLifted' lms: "
print $ thriceLifted' lms
|
chengzh2008/hpffp
|
src/ch16-Functor/oneMoreList.hs
|
Haskell
|
bsd-3-clause
| 1,231
|
{-# language CPP #-}
-- | = Name
--
-- VK_AMD_shader_explicit_vertex_parameter - device extension
--
-- == VK_AMD_shader_explicit_vertex_parameter
--
-- [__Name String__]
-- @VK_AMD_shader_explicit_vertex_parameter@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
-- 22
--
-- [__Revision__]
-- 1
--
-- [__Extension and Version Dependencies__]
--
-- - Requires Vulkan 1.0
--
-- [__Contact__]
--
-- - Qun Lin
-- <https://github.com/KhronosGroup/Vulkan-Docs/issues/new?body=[VK_AMD_shader_explicit_vertex_parameter] @linqun%0A<<Here describe the issue or question you have about the VK_AMD_shader_explicit_vertex_parameter extension>> >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
-- 2016-05-10
--
-- [__IP Status__]
-- No known IP claims.
--
-- [__Interactions and External Dependencies__]
--
-- - This extension requires
-- <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/AMD/SPV_AMD_shader_explicit_vertex_parameter.html SPV_AMD_shader_explicit_vertex_parameter>
--
-- - This extension provides API support for
-- <https://www.khronos.org/registry/OpenGL/extensions/AMD/AMD_shader_explicit_vertex_parameter.txt GL_AMD_shader_explicit_vertex_parameter>
--
-- [__Contributors__]
--
-- - Matthaeus G. Chajdas, AMD
--
-- - Qun Lin, AMD
--
-- - Daniel Rakos, AMD
--
-- - Graham Sellers, AMD
--
-- - Rex Xu, AMD
--
-- == Description
--
-- This extension adds support for the following SPIR-V extension in
-- Vulkan:
--
-- - <https://htmlpreview.github.io/?https://github.com/KhronosGroup/SPIRV-Registry/blob/master/extensions/AMD/SPV_AMD_shader_explicit_vertex_parameter.html SPV_AMD_shader_explicit_vertex_parameter>
--
-- == New Enum Constants
--
-- - 'AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME'
--
-- - 'AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION'
--
-- == Version History
--
-- - Revision 1, 2016-05-10 (Daniel Rakos)
--
-- - Initial draft
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
-- <https://www.khronos.org/registry/vulkan/specs/1.3-extensions/html/vkspec.html#VK_AMD_shader_explicit_vertex_parameter Vulkan Specification>
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_AMD_shader_explicit_vertex_parameter ( AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION
, pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION
, AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME
, pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME
) where
import Data.String (IsString)
type AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION = 1
-- No documentation found for TopLevel "VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION"
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION :: forall a . Integral a => a
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION = 1
type AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME = "VK_AMD_shader_explicit_vertex_parameter"
-- No documentation found for TopLevel "VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME"
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME = "VK_AMD_shader_explicit_vertex_parameter"
|
expipiplus1/vulkan
|
src/Vulkan/Extensions/VK_AMD_shader_explicit_vertex_parameter.hs
|
Haskell
|
bsd-3-clause
| 3,769
|
{-
(c) The University of Glasgow 2006
(c) The GRASP/AQUA Project, Glasgow University, 1992-1999
Analysis functions over data types. Specficially, detecting recursive types.
This stuff is only used for source-code decls; it's recorded in interface
files for imported data types.
-}
{-# LANGUAGE CPP #-}
module TcTyDecls(
calcRecFlags, RecTyInfo(..),
calcSynCycles,
checkClassCycles,
-- * Roles
RoleAnnots, extractRoleAnnots, emptyRoleAnnots, lookupRoleAnnots,
-- * Implicits
tcAddImplicits,
-- * Record selectors
mkRecSelBinds, mkOneRecordSelector
) where
#include "HsVersions.h"
import TcRnMonad
import TcEnv
import TcTypeable( mkTypeableBinds )
import TcBinds( tcRecSelBinds )
import TyCoRep( Type(..), TyBinder(..), delBinderVar )
import TcType
import TysWiredIn( unitTy )
import MkCore( rEC_SEL_ERROR_ID )
import HsSyn
import Class
import Type
import HscTypes
import TyCon
import ConLike
import DataCon
import Name
import NameEnv
import RdrName ( mkVarUnqual )
import Id
import IdInfo
import VarEnv
import VarSet
import NameSet
import Coercion ( ltRole )
import Digraph
import BasicTypes
import SrcLoc
import Unique ( mkBuiltinUnique )
import Outputable
import Util
import Maybes
import Data.List
import Bag
import FastString
import Control.Monad
{-
************************************************************************
* *
Cycles in type synonym declarations
* *
************************************************************************
Checking for class-decl loops is easy, because we don't allow class decls
in interface files.
We allow type synonyms in hi-boot files, but we *trust* hi-boot files,
so we don't check for loops that involve them. So we only look for synonym
loops in the module being compiled.
We check for type synonym and class cycles on the *source* code.
Main reasons:
a) Otherwise we'd need a special function to extract type-synonym tycons
from a type, whereas we already have the free vars pinned on the decl
b) If we checked for type synonym loops after building the TyCon, we
can't do a hoistForAllTys on the type synonym rhs, (else we fall into
a black hole) which seems unclean. Apart from anything else, it'd mean
that a type-synonym rhs could have for-alls to the right of an arrow,
which means adding new cases to the validity checker
Indeed, in general, checking for cycles beforehand means we need to
be less careful about black holes through synonym cycles.
The main disadvantage is that a cycle that goes via a type synonym in an
.hi-boot file can lead the compiler into a loop, because it assumes that cycles
only occur entirely within the source code of the module being compiled.
But hi-boot files are trusted anyway, so this isn't much worse than (say)
a kind error.
[ NOTE ----------------------------------------------
If we reverse this decision, this comment came from tcTyDecl1, and should
go back there
-- dsHsType, not tcHsKindedType, to avoid a loop. tcHsKindedType does hoisting,
-- which requires looking through synonyms... and therefore goes into a loop
-- on (erroneously) recursive synonyms.
-- Solution: do not hoist synonyms, because they'll be hoisted soon enough
-- when they are substituted
We'd also need to add back in this definition
synonymTyConsOfType :: Type -> [TyCon]
-- Does not look through type synonyms at all
-- Return a list of synonym tycons
synonymTyConsOfType ty
= nameEnvElts (go ty)
where
go :: Type -> NameEnv TyCon -- The NameEnv does duplicate elim
go (TyVarTy v) = emptyNameEnv
go (TyConApp tc tys) = go_tc tc tys
go (AppTy a b) = go a `plusNameEnv` go b
go (FunTy a b) = go a `plusNameEnv` go b
go (ForAllTy _ ty) = go ty
go_tc tc tys | isTypeSynonymTyCon tc = extendNameEnv (go_s tys)
(tyConName tc) tc
| otherwise = go_s tys
go_s tys = foldr (plusNameEnv . go) emptyNameEnv tys
---------------------------------------- END NOTE ]
-}
mkSynEdges :: [LTyClDecl Name] -> [(LTyClDecl Name, Name, [Name])]
mkSynEdges syn_decls = [ (ldecl, name, nameSetElems fvs)
| ldecl@(L _ (SynDecl { tcdLName = L _ name
, tcdFVs = fvs })) <- syn_decls ]
calcSynCycles :: [LTyClDecl Name] -> [SCC (LTyClDecl Name)]
calcSynCycles = stronglyConnCompFromEdgedVertices . mkSynEdges
{- Note [Superclass cycle check]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The superclass cycle check for C decides if we can statically
guarantee that expanding C's superclass cycles transitively is
guaranteed to terminate. This is a Haskell98 requirement,
but one that we lift with -XUndecidableSuperClasses.
The worry is that a superclass cycle could make the type checker loop.
More precisely, with a constraint (Given or Wanted)
C ty1 .. tyn
one approach is to instantiate all of C's superclasses, transitively.
We can only do so if that set is finite.
This potential loop occurs only through superclasses. This, for
exmaple, is fine
class C a where
op :: C b => a -> b -> b
even though C's full definition uses C.
Making the check static also makes it conservative. Eg
type family F a
class F a => C a
Here an instance of (F a) might mention C:
type instance F [a] = C a
and now we'd have a loop.
The static check works like this, starting with C
* Look at C's superclass predicates
* If any is a type-function application,
or is headed by a type variable, fail
* If any has C at the head, fail
* If any has a type class D at the head,
make the same test with D
A tricky point is: what if there is a type variable at the head?
Consider this:
class f (C f) => C f
class c => Id c
and now expand superclasses for constraint (C Id):
C Id
--> Id (C Id)
--> C Id
--> ....
Each step expands superclasses one layer, and clearly does not terminate.
-}
checkClassCycles :: Class -> Maybe SDoc
-- Nothing <=> ok
-- Just err <=> possible cycle error
checkClassCycles cls
= do { (definite_cycle, err) <- go (unitNameSet (getName cls))
cls (mkTyVarTys (classTyVars cls))
; let herald | definite_cycle = text "Superclass cycle for"
| otherwise = text "Potential superclass cycle for"
; return (vcat [ herald <+> quotes (ppr cls)
, nest 2 err, hint]) }
where
hint = text "Use UndecidableSuperClasses to accept this"
-- Expand superclasses starting with (C a b), complaining
-- if you find the same class a second time, or a type function
-- or predicate headed by a type variable
--
-- NB: this code duplicates TcType.transSuperClasses, but
-- with more error message generation clobber
-- Make sure the two stay in sync.
go :: NameSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go so_far cls tys = firstJusts $
map (go_pred so_far) $
immSuperClasses cls tys
go_pred :: NameSet -> PredType -> Maybe (Bool, SDoc)
-- Nothing <=> ok
-- Just (True, err) <=> definite cycle
-- Just (False, err) <=> possible cycle
go_pred so_far pred -- NB: tcSplitTyConApp looks through synonyms
| Just (tc, tys) <- tcSplitTyConApp_maybe pred
= go_tc so_far pred tc tys
| hasTyVarHead pred
= Just (False, hang (text "one of whose superclass constraints is headed by a type variable:")
2 (quotes (ppr pred)))
| otherwise
= Nothing
go_tc :: NameSet -> PredType -> TyCon -> [Type] -> Maybe (Bool, SDoc)
go_tc so_far pred tc tys
| isFamilyTyCon tc
= Just (False, hang (text "one of whose superclass constraints is headed by a type family:")
2 (quotes (ppr pred)))
| Just cls <- tyConClass_maybe tc
= go_cls so_far cls tys
| otherwise -- Equality predicate, for example
= Nothing
go_cls :: NameSet -> Class -> [Type] -> Maybe (Bool, SDoc)
go_cls so_far cls tys
| cls_nm `elemNameSet` so_far
= Just (True, text "one of whose superclasses is" <+> quotes (ppr cls))
| isCTupleClass cls
= go so_far cls tys
| otherwise
= do { (b,err) <- go (so_far `extendNameSet` cls_nm) cls tys
; return (b, text "one of whose superclasses is" <+> quotes (ppr cls)
$$ err) }
where
cls_nm = getName cls
{-
************************************************************************
* *
Deciding which type constructors are recursive
* *
************************************************************************
Identification of recursive TyCons
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The knot-tying parameters: @rec_details_list@ is an alist mapping @Name@s to
@TyThing@s.
Identifying a TyCon as recursive serves two purposes
1. Avoid infinite types. Non-recursive newtypes are treated as
"transparent", like type synonyms, after the type checker. If we did
this for all newtypes, we'd get infinite types. So we figure out for
each newtype whether it is "recursive", and add a coercion if so. In
effect, we are trying to "cut the loops" by identifying a loop-breaker.
2. Avoid infinite unboxing. This has nothing to do with newtypes.
Suppose we have
data T = MkT Int T
f (MkT x t) = f t
Well, this function diverges, but we don't want the strictness analyser
to diverge. But the strictness analyser will diverge because it looks
deeper and deeper into the structure of T. (I believe there are
examples where the function does something sane, and the strictness
analyser still diverges, but I can't see one now.)
Now, concerning (1), the FC2 branch currently adds a coercion for ALL
newtypes. I did this as an experiment, to try to expose cases in which
the coercions got in the way of optimisations. If it turns out that we
can indeed always use a coercion, then we don't risk recursive types,
and don't need to figure out what the loop breakers are.
For newtype *families* though, we will always have a coercion, so they
are always loop breakers! So you can easily adjust the current
algorithm by simply treating all newtype families as loop breakers (and
indeed type families). I think.
For newtypes, we label some as "recursive" such that
INVARIANT: there is no cycle of non-recursive newtypes
In any loop, only one newtype need be marked as recursive; it is
a "loop breaker". Labelling more than necessary as recursive is OK,
provided the invariant is maintained.
A newtype M.T is defined to be "recursive" iff
(a) it is declared in an hi-boot file (see RdrHsSyn.hsIfaceDecl)
(b) it is declared in a source file, but that source file has a
companion hi-boot file which declares the type
or (c) one can get from T's rhs to T via type
synonyms, or non-recursive newtypes *in M*
e.g. newtype T = MkT (T -> Int)
(a) is conservative; declarations in hi-boot files are always
made loop breakers. That's why in (b) we can restrict attention
to tycons in M, because any loops through newtypes outside M
will be broken by those newtypes
(b) ensures that a newtype is not treated as a loop breaker in one place
and later as a non-loop-breaker. This matters in GHCi particularly, when
a newtype T might be embedded in many types in the environment, and then
T's source module is compiled. We don't want T's recursiveness to change.
The "recursive" flag for algebraic data types is irrelevant (never consulted)
for types with more than one constructor.
An algebraic data type M.T is "recursive" iff
it has just one constructor, and
(a) it is declared in an hi-boot file (see RdrHsSyn.hsIfaceDecl)
(b) it is declared in a source file, but that source file has a
companion hi-boot file which declares the type
or (c) one can get from its arg types to T via type synonyms,
or by non-recursive newtypes or non-recursive product types in M
e.g. data T = MkT (T -> Int) Bool
Just like newtype in fact
A type synonym is recursive if one can get from its
right hand side back to it via type synonyms. (This is
reported as an error.)
A class is recursive if one can get from its superclasses
back to it. (This is an error too.)
Hi-boot types
~~~~~~~~~~~~~
A data type read from an hi-boot file will have an AbstractTyCon as its AlgTyConRhs
and will respond True to isAbstractTyCon. The idea is that we treat these as if one
could get from these types to anywhere. So when we see
module Baz where
import {-# SOURCE #-} Foo( T )
newtype S = MkS T
then we mark S as recursive, just in case. What that means is that if we see
import Baz( S )
newtype R = MkR S
then we don't need to look inside S to compute R's recursiveness. Since S is imported
(not from an hi-boot file), one cannot get from R back to S except via an hi-boot file,
and that means that some data type will be marked recursive along the way. So R is
unconditionly non-recursive (i.e. there'll be a loop breaker elsewhere if necessary)
This in turn means that we grovel through fewer interface files when computing
recursiveness, because we need only look at the type decls in the module being
compiled, plus the outer structure of directly-mentioned types.
-}
data RecTyInfo = RTI { rti_roles :: Name -> [Role]
, rti_is_rec :: Name -> RecFlag }
calcRecFlags :: SelfBootInfo -> Bool -- hs-boot file?
-> RoleAnnots -> [TyCon] -> RecTyInfo
-- The 'boot_names' are the things declared in M.hi-boot, if M is the current module.
-- Any type constructors in boot_names are automatically considered loop breakers
-- Recursion of newtypes/data types can happen via
-- the class TyCon, so all_tycons includes the class tycons
calcRecFlags boot_details is_boot mrole_env all_tycons
= RTI { rti_roles = roles
, rti_is_rec = is_rec }
where
roles = inferRoles is_boot mrole_env all_tycons
----------------- Recursion calculation ----------------
is_rec n | n `elemNameSet` rec_names = Recursive
| otherwise = NonRecursive
boot_name_set = case boot_details of
NoSelfBoot -> emptyNameSet
SelfBoot { sb_tcs = tcs } -> tcs
rec_names = boot_name_set `unionNameSet`
nt_loop_breakers `unionNameSet`
prod_loop_breakers
-------------------------------------------------
-- NOTE
-- These edge-construction loops rely on
-- every loop going via tyclss, the types and classes
-- in the module being compiled. Stuff in interface
-- files should be correctly marked. If not (e.g. a
-- type synonym in a hi-boot file) we can get an infinite
-- loop. We could program round this, but it'd make the code
-- rather less nice, so I'm not going to do that yet.
single_con_tycons = [ tc | tc <- all_tycons
, not (tyConName tc `elemNameSet` boot_name_set)
-- Remove the boot_name_set because they are
-- going to be loop breakers regardless.
, isSingleton (tyConDataCons tc) ]
-- Both newtypes and data types, with exactly one data constructor
(new_tycons, prod_tycons) = partition isNewTyCon single_con_tycons
-- NB: we do *not* call isProductTyCon because that checks
-- for vanilla-ness of data constructors; and that depends
-- on empty existential type variables; and that is figured
-- out by tcResultType; which uses tcMatchTy; which uses
-- coreView; which calls expandSynTyCon_maybe; which uses
-- the recursiveness of the TyCon. Result... a black hole.
-- YUK YUK YUK
--------------- Newtypes ----------------------
nt_loop_breakers = mkNameSet (findLoopBreakers nt_edges)
is_rec_nt tc = tyConName tc `elemNameSet` nt_loop_breakers
-- is_rec_nt is a locally-used helper function
nt_edges = [(t, mk_nt_edges t) | t <- new_tycons]
mk_nt_edges nt -- Invariant: nt is a newtype
= [ tc | tc <- nameEnvElts (tyConsOfType (new_tc_rhs nt))
-- tyConsOfType looks through synonyms
, tc `elem` new_tycons ]
-- If not (tc `elem` new_tycons) we know that either it's a local *data* type,
-- or it's imported. Either way, it can't form part of a newtype cycle
--------------- Product types ----------------------
prod_loop_breakers = mkNameSet (findLoopBreakers prod_edges)
prod_edges = [(tc, mk_prod_edges tc) | tc <- prod_tycons]
mk_prod_edges tc -- Invariant: tc is a product tycon
= concatMap (mk_prod_edges1 tc) (dataConOrigArgTys (head (tyConDataCons tc)))
mk_prod_edges1 ptc ty = concatMap (mk_prod_edges2 ptc) (nameEnvElts (tyConsOfType ty))
mk_prod_edges2 ptc tc
| tc `elem` prod_tycons = [tc] -- Local product
| tc `elem` new_tycons = if is_rec_nt tc -- Local newtype
then []
else mk_prod_edges1 ptc (new_tc_rhs tc)
-- At this point we know that either it's a local non-product data type,
-- or it's imported. Either way, it can't form part of a cycle
| otherwise = []
new_tc_rhs :: TyCon -> Type
new_tc_rhs tc = snd (newTyConRhs tc) -- Ignore the type variables
findLoopBreakers :: [(TyCon, [TyCon])] -> [Name]
-- Finds a set of tycons that cut all loops
findLoopBreakers deps
= go [(tc,tc,ds) | (tc,ds) <- deps]
where
go edges = [ name
| CyclicSCC ((tc,_,_) : edges') <- stronglyConnCompFromEdgedVerticesR edges,
name <- tyConName tc : go edges']
{-
************************************************************************
* *
Role annotations
* *
************************************************************************
-}
type RoleAnnots = NameEnv (LRoleAnnotDecl Name)
extractRoleAnnots :: TyClGroup Name -> RoleAnnots
extractRoleAnnots (TyClGroup { group_roles = roles })
= mkNameEnv [ (tycon, role_annot)
| role_annot@(L _ (RoleAnnotDecl (L _ tycon) _)) <- roles ]
emptyRoleAnnots :: RoleAnnots
emptyRoleAnnots = emptyNameEnv
lookupRoleAnnots :: RoleAnnots -> Name -> Maybe (LRoleAnnotDecl Name)
lookupRoleAnnots = lookupNameEnv
{-
************************************************************************
* *
Role inference
* *
************************************************************************
Note [Role inference]
~~~~~~~~~~~~~~~~~~~~~
The role inference algorithm datatype definitions to infer the roles on the
parameters. Although these roles are stored in the tycons, we can perform this
algorithm on the built tycons, as long as we don't peek at an as-yet-unknown
roles field! Ah, the magic of laziness.
First, we choose appropriate initial roles. For families and classes, roles
(including initial roles) are N. For datatypes, we start with the role in the
role annotation (if any), or otherwise use Phantom. This is done in
initialRoleEnv1.
The function irGroup then propagates role information until it reaches a
fixpoint, preferring N over (R or P) and R over P. To aid in this, we have a
monad RoleM, which is a combination reader and state monad. In its state are
the current RoleEnv, which gets updated by role propagation, and an update
bit, which we use to know whether or not we've reached the fixpoint. The
environment of RoleM contains the tycon whose parameters we are inferring, and
a VarEnv from parameters to their positions, so we can update the RoleEnv.
Between tycons, this reader information is missing; it is added by
addRoleInferenceInfo.
There are two kinds of tycons to consider: algebraic ones (excluding classes)
and type synonyms. (Remember, families don't participate -- all their parameters
are N.) An algebraic tycon processes each of its datacons, in turn. Note that
a datacon's universally quantified parameters might be different from the parent
tycon's parameters, so we use the datacon's univ parameters in the mapping from
vars to positions. Note also that we don't want to infer roles for existentials
(they're all at N, too), so we put them in the set of local variables. As an
optimisation, we skip any tycons whose roles are already all Nominal, as there
nowhere else for them to go. For synonyms, we just analyse their right-hand sides.
irType walks through a type, looking for uses of a variable of interest and
propagating role information. Because anything used under a phantom position
is at phantom and anything used under a nominal position is at nominal, the
irType function can assume that anything it sees is at representational. (The
other possibilities are pruned when they're encountered.)
The rest of the code is just plumbing.
How do we know that this algorithm is correct? It should meet the following
specification:
Let Z be a role context -- a mapping from variables to roles. The following
rules define the property (Z |- t : r), where t is a type and r is a role:
Z(a) = r' r' <= r
------------------------- RCVar
Z |- a : r
---------- RCConst
Z |- T : r -- T is a type constructor
Z |- t1 : r
Z |- t2 : N
-------------- RCApp
Z |- t1 t2 : r
forall i<=n. (r_i is R or N) implies Z |- t_i : r_i
roles(T) = r_1 .. r_n
---------------------------------------------------- RCDApp
Z |- T t_1 .. t_n : R
Z, a:N |- t : r
---------------------- RCAll
Z |- forall a:k.t : r
We also have the following rules:
For all datacon_i in type T, where a_1 .. a_n are universally quantified
and b_1 .. b_m are existentially quantified, and the arguments are t_1 .. t_p,
then if forall j<=p, a_1 : r_1 .. a_n : r_n, b_1 : N .. b_m : N |- t_j : R,
then roles(T) = r_1 .. r_n
roles(->) = R, R
roles(~#) = N, N
With -dcore-lint on, the output of this algorithm is checked in checkValidRoles,
called from checkValidTycon.
Note [Role-checking data constructor arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
data T a where
MkT :: Eq b => F a -> (a->a) -> T (G a)
Then we want to check the roles at which 'a' is used
in MkT's type. We want to work on the user-written type,
so we need to take into account
* the arguments: (F a) and (a->a)
* the context: C a b
* the result type: (G a) -- this is in the eq_spec
Note [Coercions in role inference]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Is (t |> co1) representationally equal to (t |> co2)? Of course they are! Changing
the kind of a type is totally irrelevant to the representation of that type. So,
we want to totally ignore coercions when doing role inference. This includes omitting
any type variables that appear in nominal positions but only within coercions.
-}
type RoleEnv = NameEnv [Role] -- from tycon names to roles
-- This, and any of the functions it calls, must *not* look at the roles
-- field of a tycon we are inferring roles about!
-- See Note [Role inference]
inferRoles :: Bool -> RoleAnnots -> [TyCon] -> Name -> [Role]
inferRoles is_boot annots tycons
= let role_env = initialRoleEnv is_boot annots tycons
role_env' = irGroup role_env tycons in
\name -> case lookupNameEnv role_env' name of
Just roles -> roles
Nothing -> pprPanic "inferRoles" (ppr name)
initialRoleEnv :: Bool -> RoleAnnots -> [TyCon] -> RoleEnv
initialRoleEnv is_boot annots = extendNameEnvList emptyNameEnv .
map (initialRoleEnv1 is_boot annots)
initialRoleEnv1 :: Bool -> RoleAnnots -> TyCon -> (Name, [Role])
initialRoleEnv1 is_boot annots_env tc
| isFamilyTyCon tc = (name, map (const Nominal) bndrs)
| isAlgTyCon tc = (name, default_roles)
| isTypeSynonymTyCon tc = (name, default_roles)
| otherwise = pprPanic "initialRoleEnv1" (ppr tc)
where name = tyConName tc
bndrs = tyConBinders tc
visflags = map binderVisibility $ take (tyConArity tc) bndrs
num_exps = count (== Visible) visflags
-- if the number of annotations in the role annotation decl
-- is wrong, just ignore it. We check this in the validity check.
role_annots
= case lookupNameEnv annots_env name of
Just (L _ (RoleAnnotDecl _ annots))
| annots `lengthIs` num_exps -> map unLoc annots
_ -> replicate num_exps Nothing
default_roles = build_default_roles visflags role_annots
build_default_roles (Visible : viss) (m_annot : ras)
= (m_annot `orElse` default_role) : build_default_roles viss ras
build_default_roles (_inv : viss) ras
= Nominal : build_default_roles viss ras
build_default_roles [] [] = []
build_default_roles _ _ = pprPanic "initialRoleEnv1 (2)"
(vcat [ppr tc, ppr role_annots])
default_role
| isClassTyCon tc = Nominal
| is_boot && isAbstractTyCon tc = Representational
| otherwise = Phantom
irGroup :: RoleEnv -> [TyCon] -> RoleEnv
irGroup env tcs
= let (env', update) = runRoleM env $ mapM_ irTyCon tcs in
if update
then irGroup env' tcs
else env'
irTyCon :: TyCon -> RoleM ()
irTyCon tc
| isAlgTyCon tc
= do { old_roles <- lookupRoles tc
; unless (all (== Nominal) old_roles) $ -- also catches data families,
-- which don't want or need role inference
irTcTyVars tc $
do { mapM_ (irType emptyVarSet) (tyConStupidTheta tc) -- See #8958
; whenIsJust (tyConClass_maybe tc) irClass
; mapM_ irDataCon (visibleDataCons $ algTyConRhs tc) }}
| Just ty <- synTyConRhs_maybe tc
= irTcTyVars tc $
irType emptyVarSet ty
| otherwise
= return ()
-- any type variable used in an associated type must be Nominal
irClass :: Class -> RoleM ()
irClass cls
= mapM_ ir_at (classATs cls)
where
cls_tvs = classTyVars cls
cls_tv_set = mkVarSet cls_tvs
ir_at at_tc
= mapM_ (updateRole Nominal) (varSetElems nvars)
where nvars = (mkVarSet $ tyConTyVars at_tc) `intersectVarSet` cls_tv_set
-- See Note [Role inference]
irDataCon :: DataCon -> RoleM ()
irDataCon datacon
= setRoleInferenceVars univ_tvs $
irExTyVars ex_tvs $ \ ex_var_set ->
mapM_ (irType ex_var_set)
(map tyVarKind ex_tvs ++ eqSpecPreds eq_spec ++ theta ++ arg_tys)
-- See Note [Role-checking data constructor arguments]
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty)
= dataConFullSig datacon
irType :: VarSet -> Type -> RoleM ()
irType = go
where
go lcls (TyVarTy tv) = unless (tv `elemVarSet` lcls) $
updateRole Representational tv
go lcls (AppTy t1 t2) = go lcls t1 >> markNominal lcls t2
go lcls (TyConApp tc tys) = do { roles <- lookupRolesX tc
; zipWithM_ (go_app lcls) roles tys }
go lcls (ForAllTy (Named tv _) ty)
= let lcls' = extendVarSet lcls tv in
markNominal lcls (tyVarKind tv) >> go lcls' ty
go lcls (ForAllTy (Anon arg) res)
= go lcls arg >> go lcls res
go _ (LitTy {}) = return ()
-- See Note [Coercions in role inference]
go lcls (CastTy ty _) = go lcls ty
go _ (CoercionTy _) = return ()
go_app _ Phantom _ = return () -- nothing to do here
go_app lcls Nominal ty = markNominal lcls ty -- all vars below here are N
go_app lcls Representational ty = go lcls ty
irTcTyVars :: TyCon -> RoleM a -> RoleM a
irTcTyVars tc thing
= setRoleInferenceTc (tyConName tc) $ go (tyConTyVars tc)
where
go [] = thing
go (tv:tvs) = do { markNominal emptyVarSet (tyVarKind tv)
; addRoleInferenceVar tv $ go tvs }
irExTyVars :: [TyVar] -> (TyVarSet -> RoleM a) -> RoleM a
irExTyVars orig_tvs thing = go emptyVarSet orig_tvs
where
go lcls [] = thing lcls
go lcls (tv:tvs) = do { markNominal lcls (tyVarKind tv)
; go (extendVarSet lcls tv) tvs }
markNominal :: TyVarSet -- local variables
-> Type -> RoleM ()
markNominal lcls ty = let nvars = get_ty_vars ty `minusVarSet` lcls in
mapM_ (updateRole Nominal) (varSetElems nvars)
where
-- get_ty_vars gets all the tyvars (no covars!) from a type *without*
-- recurring into coercions. Recall: coercions are totally ignored during
-- role inference. See [Coercions in role inference]
get_ty_vars (TyVarTy tv) = unitVarSet tv
get_ty_vars (AppTy t1 t2) = get_ty_vars t1 `unionVarSet` get_ty_vars t2
get_ty_vars (TyConApp _ tys) = foldr (unionVarSet . get_ty_vars) emptyVarSet tys
get_ty_vars (ForAllTy bndr ty)
= get_ty_vars ty `delBinderVar` bndr
`unionVarSet` (tyCoVarsOfType $ binderType bndr)
get_ty_vars (LitTy {}) = emptyVarSet
get_ty_vars (CastTy ty _) = get_ty_vars ty
get_ty_vars (CoercionTy _) = emptyVarSet
-- like lookupRoles, but with Nominal tags at the end for oversaturated TyConApps
lookupRolesX :: TyCon -> RoleM [Role]
lookupRolesX tc
= do { roles <- lookupRoles tc
; return $ roles ++ repeat Nominal }
-- gets the roles either from the environment or the tycon
lookupRoles :: TyCon -> RoleM [Role]
lookupRoles tc
= do { env <- getRoleEnv
; case lookupNameEnv env (tyConName tc) of
Just roles -> return roles
Nothing -> return $ tyConRoles tc }
-- tries to update a role; won't ever update a role "downwards"
updateRole :: Role -> TyVar -> RoleM ()
updateRole role tv
= do { var_ns <- getVarNs
; name <- getTyConName
; case lookupVarEnv var_ns tv of
Nothing -> pprPanic "updateRole" (ppr name $$ ppr tv $$ ppr var_ns)
Just n -> updateRoleEnv name n role }
-- the state in the RoleM monad
data RoleInferenceState = RIS { role_env :: RoleEnv
, update :: Bool }
-- the environment in the RoleM monad
type VarPositions = VarEnv Int
-- See [Role inference]
newtype RoleM a = RM { unRM :: Maybe Name -- of the tycon
-> VarPositions
-> Int -- size of VarPositions
-> RoleInferenceState
-> (a, RoleInferenceState) }
instance Functor RoleM where
fmap = liftM
instance Applicative RoleM where
pure x = RM $ \_ _ _ state -> (x, state)
(<*>) = ap
instance Monad RoleM where
a >>= f = RM $ \m_info vps nvps state ->
let (a', state') = unRM a m_info vps nvps state in
unRM (f a') m_info vps nvps state'
runRoleM :: RoleEnv -> RoleM () -> (RoleEnv, Bool)
runRoleM env thing = (env', update)
where RIS { role_env = env', update = update }
= snd $ unRM thing Nothing emptyVarEnv 0 state
state = RIS { role_env = env
, update = False }
setRoleInferenceTc :: Name -> RoleM a -> RoleM a
setRoleInferenceTc name thing = RM $ \m_name vps nvps state ->
ASSERT( isNothing m_name )
ASSERT( isEmptyVarEnv vps )
ASSERT( nvps == 0 )
unRM thing (Just name) vps nvps state
addRoleInferenceVar :: TyVar -> RoleM a -> RoleM a
addRoleInferenceVar tv thing
= RM $ \m_name vps nvps state ->
ASSERT( isJust m_name )
unRM thing m_name (extendVarEnv vps tv nvps) (nvps+1) state
setRoleInferenceVars :: [TyVar] -> RoleM a -> RoleM a
setRoleInferenceVars tvs thing
= RM $ \m_name _vps _nvps state ->
ASSERT( isJust m_name )
unRM thing m_name (mkVarEnv (zip tvs [0..])) (panic "setRoleInferenceVars")
state
getRoleEnv :: RoleM RoleEnv
getRoleEnv = RM $ \_ _ _ state@(RIS { role_env = env }) -> (env, state)
getVarNs :: RoleM VarPositions
getVarNs = RM $ \_ vps _ state -> (vps, state)
getTyConName :: RoleM Name
getTyConName = RM $ \m_name _ _ state ->
case m_name of
Nothing -> panic "getTyConName"
Just name -> (name, state)
updateRoleEnv :: Name -> Int -> Role -> RoleM ()
updateRoleEnv name n role
= RM $ \_ _ _ state@(RIS { role_env = role_env }) -> ((),
case lookupNameEnv role_env name of
Nothing -> pprPanic "updateRoleEnv" (ppr name)
Just roles -> let (before, old_role : after) = splitAt n roles in
if role `ltRole` old_role
then let roles' = before ++ role : after
role_env' = extendNameEnv role_env name roles' in
RIS { role_env = role_env', update = True }
else state )
{- *********************************************************************
* *
Building implicits
* *
********************************************************************* -}
tcAddImplicits :: [TyCon] -> TcM TcGblEnv
-- Given a [TyCon], add to the TcGblEnv
-- * extend the TypeEnv with their implicitTyThings
-- * extend the TypeEnv with any default method Ids
-- * add bindings for record selectors
-- * add bindings for type representations for the TyThings
tcAddImplicits tycons
= discardWarnings $
tcExtendGlobalEnvImplicit implicit_things $
tcExtendGlobalValEnv def_meth_ids $
do { traceTc "tcAddImplicits" $ vcat
[ text "tycons" <+> ppr tycons
, text "implicits" <+> ppr implicit_things ]
; gbl_env <- mkTypeableBinds tycons
; gbl_env <- setGblEnv gbl_env $
tcRecSelBinds (mkRecSelBinds tycons)
; return gbl_env }
where
implicit_things = concatMap implicitTyConThings tycons
def_meth_ids = mkDefaultMethodIds tycons
mkDefaultMethodIds :: [TyCon] -> [Id]
-- We want to put the default-method Ids (both vanilla and generic)
-- into the type environment so that they are found when we typecheck
-- the filled-in default methods of each instance declaration
-- See Note [Default method Ids and Template Haskell]
mkDefaultMethodIds tycons
= [ mkExportedVanillaId dm_name (mk_dm_ty cls sel_id dm_spec)
| tc <- tycons
, Just cls <- [tyConClass_maybe tc]
, (sel_id, Just (dm_name, dm_spec)) <- classOpItems cls ]
where
mk_dm_ty :: Class -> Id -> DefMethSpec Type -> Type
mk_dm_ty _ sel_id VanillaDM = idType sel_id
mk_dm_ty cls _ (GenericDM dm_ty) = mkSpecSigmaTy cls_tvs [pred] dm_ty
where
cls_tvs = classTyVars cls
pred = mkClassPred cls (mkTyVarTys cls_tvs)
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
{-
Note [Default method Ids and Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (Trac #4169):
class Numeric a where
fromIntegerNum :: a
fromIntegerNum = ...
ast :: Q [Dec]
ast = [d| instance Numeric Int |]
When we typecheck 'ast' we have done the first pass over the class decl
(in tcTyClDecls), but we have not yet typechecked the default-method
declarations (because they can mention value declarations). So we
must bring the default method Ids into scope first (so they can be seen
when typechecking the [d| .. |] quote, and typecheck them later.
-}
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
mkRecSelBinds :: [TyCon] -> HsValBinds Name
-- NB We produce *un-typechecked* bindings, rather like 'deriving'
-- This makes life easier, because the later type checking will add
-- all necessary type abstractions and applications
mkRecSelBinds tycons
= ValBindsOut binds sigs
where
(sigs, binds) = unzip rec_sels
rec_sels = map mkRecSelBind [ (tc,fld)
| tc <- tycons
, fld <- tyConFieldLabels tc ]
mkRecSelBind :: (TyCon, FieldLabel) -> (LSig Name, (RecFlag, LHsBinds Name))
mkRecSelBind (tycon, fl)
= mkOneRecordSelector all_cons (RecSelData tycon) fl
where
all_cons = map RealDataCon (tyConDataCons tycon)
mkOneRecordSelector :: [ConLike] -> RecSelParent -> FieldLabel
-> (LSig Name, (RecFlag, LHsBinds Name))
mkOneRecordSelector all_cons idDetails fl
= (L loc (IdSig sel_id), (NonRecursive, unitBag (L loc sel_bind)))
where
loc = getSrcSpan sel_name
lbl = flLabel fl
sel_name = flSelector fl
sel_id = mkExportedLocalId rec_details sel_name sel_ty
rec_details = RecSelId { sel_tycon = idDetails, sel_naughty = is_naughty }
-- Find a representative constructor, con1
cons_w_field = conLikesWithFields all_cons [lbl]
con1 = ASSERT( not (null cons_w_field) ) head cons_w_field
-- Selector type; Note [Polymorphic selectors]
field_ty = conLikeFieldType con1 lbl
data_tvs = tyCoVarsOfTypeWellScoped data_ty
data_tv_set= mkVarSet data_tvs
is_naughty = not (tyCoVarsOfType field_ty `subVarSet` data_tv_set)
(field_tvs, field_theta, field_tau) = tcSplitSigmaTy field_ty
sel_ty | is_naughty = unitTy -- See Note [Naughty record selectors]
| otherwise = mkSpecForAllTys data_tvs $
mkPhiTy (conLikeStupidTheta con1) $ -- Urgh!
mkFunTy data_ty $
mkSpecForAllTys field_tvs $
mkPhiTy field_theta $
-- req_theta is empty for normal DataCon
mkPhiTy req_theta $
field_tau
-- Make the binding: sel (C2 { fld = x }) = x
-- sel (C7 { fld = x }) = x
-- where cons_w_field = [C2,C7]
sel_bind = mkTopFunBind Generated sel_lname alts
where
alts | is_naughty = [mkSimpleMatch [] unit_rhs]
| otherwise = map mk_match cons_w_field ++ deflt
mk_match con = mkSimpleMatch [L loc (mk_sel_pat con)]
(L loc (HsVar (L loc field_var)))
mk_sel_pat con = ConPatIn (L loc (getName con)) (RecCon rec_fields)
rec_fields = HsRecFields { rec_flds = [rec_field], rec_dotdot = Nothing }
rec_field = noLoc (HsRecField
{ hsRecFieldLbl
= L loc (FieldOcc (L loc $ mkVarUnqual lbl) sel_name)
, hsRecFieldArg = L loc (VarPat (L loc field_var))
, hsRecPun = False })
sel_lname = L loc sel_name
field_var = mkInternalName (mkBuiltinUnique 1) (getOccName sel_name) loc
-- Add catch-all default case unless the case is exhaustive
-- We do this explicitly so that we get a nice error message that
-- mentions this particular record selector
deflt | all dealt_with all_cons = []
| otherwise = [mkSimpleMatch [L loc (WildPat placeHolderType)]
(mkHsApp (L loc (HsVar
(L loc (getName rEC_SEL_ERROR_ID))))
(L loc (HsLit msg_lit)))]
-- Do not add a default case unless there are unmatched
-- constructors. We must take account of GADTs, else we
-- get overlap warning messages from the pattern-match checker
-- NB: we need to pass type args for the *representation* TyCon
-- to dataConCannotMatch, hence the calculation of inst_tys
-- This matters in data families
-- data instance T Int a where
-- A :: { fld :: Int } -> T Int Bool
-- B :: { fld :: Int } -> T Int Char
dealt_with :: ConLike -> Bool
dealt_with (PatSynCon _) = False -- We can't predict overlap
dealt_with con@(RealDataCon dc) =
con `elem` cons_w_field || dataConCannotMatch inst_tys dc
(univ_tvs, _, eq_spec, _, req_theta, _, data_ty) = conLikeFullSig con1
eq_subst = mkTvSubstPrs (map eqSpecPair eq_spec)
inst_tys = substTyVars eq_subst univ_tvs
unit_rhs = mkLHsTupleExpr []
msg_lit = HsStringPrim "" (fastStringToByteString lbl)
{-
Note [Polymorphic selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We take care to build the type of a polymorphic selector in the right
order, so that visible type application works.
data Ord a => T a = MkT { field :: forall b. (Num a, Show b) => (a, b) }
We want
field :: forall a. Ord a => T a -> forall b. (Num a, Show b) => (a, b)
Note [Naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A "naughty" field is one for which we can't define a record
selector, because an existential type variable would escape. For example:
data T = forall a. MkT { x,y::a }
We obviously can't define
x (MkT v _) = v
Nevertheless we *do* put a RecSelId into the type environment
so that if the user tries to use 'x' as a selector we can bleat
helpfully, rather than saying unhelpfully that 'x' is not in scope.
Hence the sel_naughty flag, to identify record selectors that don't really exist.
In general, a field is "naughty" if its type mentions a type variable that
isn't in the result type of the constructor. Note that this *allows*
GADT record selectors (Note [GADT record selectors]) whose types may look
like sel :: T [a] -> a
For naughty selectors we make a dummy binding
sel = ()
so that the later type-check will add them to the environment, and they'll be
exported. The function is never called, because the typechecker spots the
sel_naughty field.
Note [GADT record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For GADTs, we require that all constructors with a common field 'f' have the same
result type (modulo alpha conversion). [Checked in TcTyClsDecls.checkValidTyCon]
E.g.
data T where
T1 { f :: Maybe a } :: T [a]
T2 { f :: Maybe a, y :: b } :: T [a]
T3 :: T Int
and now the selector takes that result type as its argument:
f :: forall a. T [a] -> Maybe a
Details: the "real" types of T1,T2 are:
T1 :: forall r a. (r~[a]) => a -> T r
T2 :: forall r a b. (r~[a]) => a -> b -> T r
So the selector loooks like this:
f :: forall a. T [a] -> Maybe a
f (a:*) (t:T [a])
= case t of
T1 c (g:[a]~[c]) (v:Maybe c) -> v `cast` Maybe (right (sym g))
T2 c d (g:[a]~[c]) (v:Maybe c) (w:d) -> v `cast` Maybe (right (sym g))
T3 -> error "T3 does not have field f"
Note the forall'd tyvars of the selector are just the free tyvars
of the result type; there may be other tyvars in the constructor's
type (e.g. 'b' in T2).
Note the need for casts in the result!
Note [Selector running example]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's OK to combine GADTs and type families. Here's a running example:
data instance T [a] where
T1 { fld :: b } :: T [Maybe b]
The representation type looks like this
data :R7T a where
T1 { fld :: b } :: :R7T (Maybe b)
and there's coercion from the family type to the representation type
:CoR7T a :: T [a] ~ :R7T a
The selector we want for fld looks like this:
fld :: forall b. T [Maybe b] -> b
fld = /\b. \(d::T [Maybe b]).
case d `cast` :CoR7T (Maybe b) of
T1 (x::b) -> x
The scrutinee of the case has type :R7T (Maybe b), which can be
gotten by appying the eq_spec to the univ_tvs of the data con.
-}
|
nushio3/ghc
|
compiler/typecheck/TcTyDecls.hs
|
Haskell
|
bsd-3-clause
| 45,303
|
{-# LANGUAGE ViewPatterns, TemplateHaskell #-}
module Data.TrieMap.Representation.TH.ReprMonad (
ReprMonad,
liftQuasi,
recurse,
getInstance,
outputInstance,
mustBreak,
execReprMonad,
forceDefaultListRep) where
import Data.TrieMap.Representation.Class
import Data.TrieMap.Representation.TH.Utils
import Control.Monad
import Language.Haskell.TH.Syntax
import Language.Haskell.TH.ExpandSyns
type Instances = [(Name, ([Name], Type))]
newtype ReprMonad a = ReprMonad {runReprMonad ::
Bool -- whether to force default list reps
-> Instances -- tycons of known instances
-> [Name] -- tycons of instances in progress (breakpoints of recursive loopies)
-> Q ([Dec], Instances, a) -- output decs, new known instances
}
instance Monad ReprMonad where
return x = ReprMonad $ \ _ knowns _ -> return ([], knowns, x)
m >>= k = ReprMonad $ \ def knowns breaks -> do
(outDecs, knowns', a) <- runReprMonad m def knowns breaks
(outDecs', knowns'', b) <- runReprMonad (k a) def knowns' breaks
return (outDecs ++ outDecs', knowns'', b)
fail err = ReprMonad $ \ _ _ _ -> fail err
instance Functor ReprMonad where
fmap = liftM
liftQuasi :: Q a -> ReprMonad a
liftQuasi q = ReprMonad $ \ _ knowns _ -> do
a <- q
return ([], knowns, a)
instance Quasi ReprMonad where
qNewName = liftQuasi . qNewName
qReport b str = liftQuasi (qReport b str)
qRecover m k = ReprMonad $ \ def knowns breaks -> qRecover (runReprMonad m def knowns breaks) (runReprMonad k def knowns breaks)
qReify = liftQuasi . qReify
qClassInstances name typs = liftQuasi (qClassInstances name typs)
qLocation = liftQuasi qLocation
qRunIO = liftQuasi . qRunIO
insNub :: Eq a => a -> [a] -> [a]
insNub x ys0@(y:ys)
| x == y = ys0
| otherwise = y:insNub x ys
insNub x [] = [x]
recurse :: Name -> ReprMonad a -> ReprMonad a
recurse breakTy m = ReprMonad $ \ def knowns breaks -> runReprMonad m def knowns (breakTy `insNub` breaks)
outputInstance :: Type -> Type -> [Dec] -> ReprMonad ()
outputInstance ty tyRep decs = ReprMonad $ \ _ knowns _ -> case decompose' ty of
Just (tyCon, tyArgs)
-> return (decs, (tyCon, (tyArgs, tyRep)):knowns, ())
_ -> return (decs, knowns, ())
getInstance :: Type -> ReprMonad (Maybe Type)
getInstance typ = case decompose typ of
(ConT tyCon, tyArgs) -> ReprMonad $ \ _ knowns _ -> case lookup tyCon knowns of
Nothing -> return ([], knowns, Nothing)
Just (tyArgs', tyRep) -> return ([], knowns, Just $ foldr substInType tyRep (zip tyArgs' tyArgs))
_ -> return Nothing
mustBreak :: Name -> ReprMonad Bool
mustBreak tyCon = ReprMonad $ \ _ knowns breaks -> return ([], knowns, tyCon `elem` breaks)
execReprMonad :: Bool -> ReprMonad a -> Q [Dec]
execReprMonad def m = do
ClassI _ instances <- reify ''Repr
let instanceHeads = [(tyConName, (tyArgs, ConT ''Rep `AppT` compose tyConName tyArgs))
| ClassInstance{ci_tys = [decompose' -> Just (tyConName, tyArgs)]} <- instances]
(decs, _, _) <- runReprMonad m def instanceHeads []
return decs
forceDefaultListRep :: ReprMonad Bool
forceDefaultListRep = ReprMonad $ \ def known _ -> return ([], known, def)
|
lowasser/TrieMap
|
Data/TrieMap/Representation/TH/ReprMonad.hs
|
Haskell
|
bsd-3-clause
| 3,101
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[StgSyn]{Shared term graph (STG) syntax for spineless-tagless code generation}
This data type represents programs just before code generation (conversion to
@Cmm@): basically, what we have is a stylised form of @CoreSyntax@, the style
being one that happens to be ideally suited to spineless tagless code
generation.
-}
{-# LANGUAGE CPP #-}
module StgSyn (
GenStgArg(..),
GenStgLiveVars,
GenStgBinding(..), GenStgExpr(..), GenStgRhs(..),
GenStgAlt, AltType(..),
UpdateFlag(..), isUpdatable,
StgBinderInfo,
noBinderInfo, stgSatOcc, stgUnsatOcc, satCallsOnly,
combineStgBinderInfo,
-- a set of synonyms for the most common (only :-) parameterisation
StgArg, StgLiveVars,
StgBinding, StgExpr, StgRhs, StgAlt,
-- StgOp
StgOp(..),
-- utils
topStgBindHasCafRefs, stgArgHasCafRefs, stgRhsArity,
isDllConApp,
stgArgType,
stripStgTicksTop,
pprStgBinding, pprStgBindings,
pprStgLVs
) where
#include "HsVersions.h"
import CoreSyn ( AltCon, Tickish )
import CostCentre ( CostCentreStack )
import Data.List ( intersperse )
import DataCon
import DynFlags
import FastString
import ForeignCall ( ForeignCall )
import Id
import IdInfo ( mayHaveCafRefs )
import Literal ( Literal, literalType )
import Module ( Module )
import Outputable
import Packages ( isDllName )
import Platform
import PprCore ( {- instances -} )
import PrimOp ( PrimOp, PrimCall )
import TyCon ( PrimRep(..) )
import TyCon ( TyCon )
import Type ( Type )
import Type ( typePrimRep )
import UniqSet
import Unique ( Unique )
import UniqFM
import Util
{-
************************************************************************
* *
\subsection{@GenStgBinding@}
* *
************************************************************************
As usual, expressions are interesting; other things are boring. Here
are the boring things [except note the @GenStgRhs@], parameterised
with respect to binder and occurrence information (just as in
@CoreSyn@):
-}
data GenStgBinding bndr occ
= StgNonRec bndr (GenStgRhs bndr occ)
| StgRec [(bndr, GenStgRhs bndr occ)]
{-
************************************************************************
* *
\subsection{@GenStgArg@}
* *
************************************************************************
-}
data GenStgArg occ
= StgVarArg occ
| StgLitArg Literal
-- | Does this constructor application refer to
-- anything in a different *Windows* DLL?
-- If so, we can't allocate it statically
isDllConApp :: DynFlags -> Module -> DataCon -> [StgArg] -> Bool
isDllConApp dflags this_mod con args
| platformOS (targetPlatform dflags) == OSMinGW32
= isDllName dflags this_pkg this_mod (dataConName con) || any is_dll_arg args
| otherwise = False
where
-- NB: typePrimRep is legit because any free variables won't have
-- unlifted type (there are no unlifted things at top level)
is_dll_arg :: StgArg -> Bool
is_dll_arg (StgVarArg v) = isAddrRep (typePrimRep (idType v))
&& isDllName dflags this_pkg this_mod (idName v)
is_dll_arg _ = False
this_pkg = thisPackage dflags
-- True of machine addresses; these are the things that don't
-- work across DLLs. The key point here is that VoidRep comes
-- out False, so that a top level nullary GADT constructor is
-- False for isDllConApp
-- data T a where
-- T1 :: T Int
-- gives
-- T1 :: forall a. (a~Int) -> T a
-- and hence the top-level binding
-- $WT1 :: T Int
-- $WT1 = T1 Int (Coercion (Refl Int))
-- The coercion argument here gets VoidRep
isAddrRep :: PrimRep -> Bool
isAddrRep AddrRep = True
isAddrRep PtrRep = True
isAddrRep _ = False
-- | Type of an @StgArg@
--
-- Very half baked becase we have lost the type arguments.
stgArgType :: StgArg -> Type
stgArgType (StgVarArg v) = idType v
stgArgType (StgLitArg lit) = literalType lit
-- | Strip ticks of a given type from an STG expression
stripStgTicksTop :: (Tickish Id -> Bool) -> StgExpr -> ([Tickish Id], StgExpr)
stripStgTicksTop p = go []
where go ts (StgTick t e) | p t = go (t:ts) e
go ts other = (reverse ts, other)
{-
************************************************************************
* *
\subsection{STG expressions}
* *
************************************************************************
The @GenStgExpr@ data type is parameterised on binder and occurrence
info, as before.
************************************************************************
* *
\subsubsection{@GenStgExpr@ application}
* *
************************************************************************
An application is of a function to a list of atoms [not expressions].
Operationally, we want to push the arguments on the stack and call the
function. (If the arguments were expressions, we would have to build
their closures first.)
There is no constructor for a lone variable; it would appear as
@StgApp var []@.
-}
type GenStgLiveVars occ = UniqSet occ
data GenStgExpr bndr occ
= StgApp
occ -- function
[GenStgArg occ] -- arguments; may be empty
{-
************************************************************************
* *
\subsubsection{@StgConApp@ and @StgPrimApp@---saturated applications}
* *
************************************************************************
There are specialised forms of application, for constructors,
primitives, and literals.
-}
| StgLit Literal
-- StgConApp is vital for returning unboxed tuples
-- which can't be let-bound first
| StgConApp DataCon
[GenStgArg occ] -- Saturated
| StgOpApp StgOp -- Primitive op or foreign call
[GenStgArg occ] -- Saturated
Type -- Result type
-- We need to know this so that we can
-- assign result registers
{-
************************************************************************
* *
\subsubsection{@StgLam@}
* *
************************************************************************
StgLam is used *only* during CoreToStg's work. Before CoreToStg has
finished it encodes (\x -> e) as (let f = \x -> e in f)
-}
| StgLam
[bndr]
StgExpr -- Body of lambda
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: case-expressions}
* *
************************************************************************
This has the same boxed/unboxed business as Core case expressions.
-}
| StgCase
(GenStgExpr bndr occ)
-- the thing to examine
bndr -- binds the result of evaluating the scrutinee
AltType
[GenStgAlt bndr occ]
-- The DEFAULT case is always *first*
-- if it is there at all
{-
************************************************************************
* *
\subsubsection{@GenStgExpr@: @let(rec)@-expressions}
* *
************************************************************************
The various forms of let(rec)-expression encode most of the
interesting things we want to do.
\begin{enumerate}
\item
\begin{verbatim}
let-closure x = [free-vars] [args] expr
in e
\end{verbatim}
is equivalent to
\begin{verbatim}
let x = (\free-vars -> \args -> expr) free-vars
\end{verbatim}
\tr{args} may be empty (and is for most closures). It isn't under
circumstances like this:
\begin{verbatim}
let x = (\y -> y+z)
\end{verbatim}
This gets mangled to
\begin{verbatim}
let-closure x = [z] [y] (y+z)
\end{verbatim}
The idea is that we compile code for @(y+z)@ in an environment in which
@z@ is bound to an offset from \tr{Node}, and @y@ is bound to an
offset from the stack pointer.
(A let-closure is an @StgLet@ with a @StgRhsClosure@ RHS.)
\item
\begin{verbatim}
let-constructor x = Constructor [args]
in e
\end{verbatim}
(A let-constructor is an @StgLet@ with a @StgRhsCon@ RHS.)
\item
Letrec-expressions are essentially the same deal as
let-closure/let-constructor, so we use a common structure and
distinguish between them with an @is_recursive@ boolean flag.
\item
\begin{verbatim}
let-unboxed u = an arbitrary arithmetic expression in unboxed values
in e
\end{verbatim}
All the stuff on the RHS must be fully evaluated.
No function calls either!
(We've backed away from this toward case-expressions with
suitably-magical alts ...)
\item
~[Advanced stuff here! Not to start with, but makes pattern matching
generate more efficient code.]
\begin{verbatim}
let-escapes-not fail = expr
in e'
\end{verbatim}
Here the idea is that @e'@ guarantees not to put @fail@ in a data structure,
or pass it to another function. All @e'@ will ever do is tail-call @fail@.
Rather than build a closure for @fail@, all we need do is to record the stack
level at the moment of the @let-escapes-not@; then entering @fail@ is just
a matter of adjusting the stack pointer back down to that point and entering
the code for it.
Another example:
\begin{verbatim}
f x y = let z = huge-expression in
if y==1 then z else
if y==2 then z else
1
\end{verbatim}
(A let-escapes-not is an @StgLetNoEscape@.)
\item
We may eventually want:
\begin{verbatim}
let-literal x = Literal
in e
\end{verbatim}
\end{enumerate}
And so the code for let(rec)-things:
-}
| StgLet
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
| StgLetNoEscape
(GenStgBinding bndr occ) -- right hand sides (see below)
(GenStgExpr bndr occ) -- body
{-
%************************************************************************
%* *
\subsubsection{@GenStgExpr@: @hpc@, @scc@ and other debug annotations}
%* *
%************************************************************************
Finally for @hpc@ expressions we introduce a new STG construct.
-}
| StgTick
(Tickish bndr)
(GenStgExpr bndr occ) -- sub expression
-- END of GenStgExpr
{-
************************************************************************
* *
\subsection{STG right-hand sides}
* *
************************************************************************
Here's the rest of the interesting stuff for @StgLet@s; the first
flavour is for closures:
-}
data GenStgRhs bndr occ
= StgRhsClosure
CostCentreStack -- CCS to be attached (default is CurrentCCS)
StgBinderInfo -- Info about how this binder is used (see below)
[occ] -- non-global free vars; a list, rather than
-- a set, because order is important
!UpdateFlag -- ReEntrant | Updatable | SingleEntry
[bndr] -- arguments; if empty, then not a function;
-- as above, order is important.
(GenStgExpr bndr occ) -- body
{-
An example may be in order. Consider:
\begin{verbatim}
let t = \x -> \y -> ... x ... y ... p ... q in e
\end{verbatim}
Pulling out the free vars and stylising somewhat, we get the equivalent:
\begin{verbatim}
let t = (\[p,q] -> \[x,y] -> ... x ... y ... p ...q) p q
\end{verbatim}
Stg-operationally, the @[x,y]@ are on the stack, the @[p,q]@ are
offsets from @Node@ into the closure, and the code ptr for the closure
will be exactly that in parentheses above.
The second flavour of right-hand-side is for constructors (simple but important):
-}
| StgRhsCon
CostCentreStack -- CCS to be attached (default is CurrentCCS).
-- Top-level (static) ones will end up with
-- DontCareCCS, because we don't count static
-- data in heap profiles, and we don't set CCCS
-- from static closure.
DataCon -- constructor
[GenStgArg occ] -- args
stgRhsArity :: StgRhs -> Int
stgRhsArity (StgRhsClosure _ _ _ _ bndrs _)
= ASSERT( all isId bndrs ) length bndrs
-- The arity never includes type parameters, but they should have gone by now
stgRhsArity (StgRhsCon _ _ _) = 0
-- Note [CAF consistency]
-- ~~~~~~~~~~~~~~~~~~~~~~
--
-- `topStgBindHasCafRefs` is only used by an assert (`consistentCafInfo` in
-- `CoreToStg`) to make sure CAF-ness predicted by `TidyPgm` is consistent with
-- reality.
--
-- Specifically, if the RHS mentions any Id that itself is marked
-- `MayHaveCafRefs`; or if the binding is a top-level updateable thunk; then the
-- `Id` for the binding should be marked `MayHaveCafRefs`. The potential trouble
-- is that `TidyPgm` computed the CAF info on the `Id` but some transformations
-- have taken place since then.
topStgBindHasCafRefs :: GenStgBinding bndr Id -> Bool
topStgBindHasCafRefs (StgNonRec _ rhs)
= topRhsHasCafRefs rhs
topStgBindHasCafRefs (StgRec binds)
= any topRhsHasCafRefs (map snd binds)
topRhsHasCafRefs :: GenStgRhs bndr Id -> Bool
topRhsHasCafRefs (StgRhsClosure _ _ _ upd _ body)
= -- See Note [CAF consistency]
isUpdatable upd || exprHasCafRefs body
topRhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
exprHasCafRefs :: GenStgExpr bndr Id -> Bool
exprHasCafRefs (StgApp f args)
= stgIdHasCafRefs f || any stgArgHasCafRefs args
exprHasCafRefs StgLit{}
= False
exprHasCafRefs (StgConApp _ args)
= any stgArgHasCafRefs args
exprHasCafRefs (StgOpApp _ args _)
= any stgArgHasCafRefs args
exprHasCafRefs (StgLam _ body)
= exprHasCafRefs body
exprHasCafRefs (StgCase scrt _ _ alts)
= exprHasCafRefs scrt || any altHasCafRefs alts
exprHasCafRefs (StgLet bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgLetNoEscape bind body)
= bindHasCafRefs bind || exprHasCafRefs body
exprHasCafRefs (StgTick _ expr)
= exprHasCafRefs expr
bindHasCafRefs :: GenStgBinding bndr Id -> Bool
bindHasCafRefs (StgNonRec _ rhs)
= rhsHasCafRefs rhs
bindHasCafRefs (StgRec binds)
= any rhsHasCafRefs (map snd binds)
rhsHasCafRefs :: GenStgRhs bndr Id -> Bool
rhsHasCafRefs (StgRhsClosure _ _ _ _ _ body)
= exprHasCafRefs body
rhsHasCafRefs (StgRhsCon _ _ args)
= any stgArgHasCafRefs args
altHasCafRefs :: GenStgAlt bndr Id -> Bool
altHasCafRefs (_, _, rhs) = exprHasCafRefs rhs
stgArgHasCafRefs :: GenStgArg Id -> Bool
stgArgHasCafRefs (StgVarArg id)
= stgIdHasCafRefs id
stgArgHasCafRefs _
= False
stgIdHasCafRefs :: Id -> Bool
stgIdHasCafRefs id =
-- We are looking for occurrences of an Id that is bound at top level, and may
-- have CAF refs. At this point (after TidyPgm) top-level Ids (whether
-- imported or defined in this module) are GlobalIds, so the test is easy.
isGlobalId id && mayHaveCafRefs (idCafInfo id)
-- Here's the @StgBinderInfo@ type, and its combining op:
data StgBinderInfo
= NoStgBinderInfo
| SatCallsOnly -- All occurrences are *saturated* *function* calls
-- This means we don't need to build an info table and
-- slow entry code for the thing
-- Thunks never get this value
noBinderInfo, stgUnsatOcc, stgSatOcc :: StgBinderInfo
noBinderInfo = NoStgBinderInfo
stgUnsatOcc = NoStgBinderInfo
stgSatOcc = SatCallsOnly
satCallsOnly :: StgBinderInfo -> Bool
satCallsOnly SatCallsOnly = True
satCallsOnly NoStgBinderInfo = False
combineStgBinderInfo :: StgBinderInfo -> StgBinderInfo -> StgBinderInfo
combineStgBinderInfo SatCallsOnly SatCallsOnly = SatCallsOnly
combineStgBinderInfo _ _ = NoStgBinderInfo
--------------
pp_binder_info :: StgBinderInfo -> SDoc
pp_binder_info NoStgBinderInfo = empty
pp_binder_info SatCallsOnly = text "sat-only"
{-
************************************************************************
* *
\subsection[Stg-case-alternatives]{STG case alternatives}
* *
************************************************************************
Very like in @CoreSyntax@ (except no type-world stuff).
The type constructor is guaranteed not to be abstract; that is, we can
see its representation. This is important because the code generator
uses it to determine return conventions etc. But it's not trivial
where there's a module loop involved, because some versions of a type
constructor might not have all the constructors visible. So
mkStgAlgAlts (in CoreToStg) ensures that it gets the TyCon from the
constructors or literals (which are guaranteed to have the Real McCoy)
rather than from the scrutinee type.
-}
type GenStgAlt bndr occ
= (AltCon, -- alts: data constructor,
[bndr], -- constructor's parameters,
GenStgExpr bndr occ) -- ...right-hand side.
data AltType
= PolyAlt -- Polymorphic (a type variable)
| UbxTupAlt Int -- Unboxed tuple of this arity
| AlgAlt TyCon -- Algebraic data type; the AltCons will be DataAlts
| PrimAlt TyCon -- Primitive data type; the AltCons will be LitAlts
{-
************************************************************************
* *
\subsection[Stg]{The Plain STG parameterisation}
* *
************************************************************************
This happens to be the only one we use at the moment.
-}
type StgBinding = GenStgBinding Id Id
type StgArg = GenStgArg Id
type StgLiveVars = GenStgLiveVars Id
type StgExpr = GenStgExpr Id Id
type StgRhs = GenStgRhs Id Id
type StgAlt = GenStgAlt Id Id
{-
************************************************************************
* *
\subsubsection[UpdateFlag-datatype]{@UpdateFlag@}
* *
************************************************************************
This is also used in @LambdaFormInfo@ in the @ClosureInfo@ module.
A @ReEntrant@ closure may be entered multiple times, but should not be
updated or blackholed. An @Updatable@ closure should be updated after
evaluation (and may be blackholed during evaluation). A @SingleEntry@
closure will only be entered once, and so need not be updated but may
safely be blackholed.
-}
data UpdateFlag = ReEntrant | Updatable | SingleEntry
instance Outputable UpdateFlag where
ppr u = char $ case u of
ReEntrant -> 'r'
Updatable -> 'u'
SingleEntry -> 's'
isUpdatable :: UpdateFlag -> Bool
isUpdatable ReEntrant = False
isUpdatable SingleEntry = False
isUpdatable Updatable = True
{-
************************************************************************
* *
\subsubsection{StgOp}
* *
************************************************************************
An StgOp allows us to group together PrimOps and ForeignCalls.
It's quite useful to move these around together, notably
in StgOpApp and COpStmt.
-}
data StgOp
= StgPrimOp PrimOp
| StgPrimCallOp PrimCall
| StgFCallOp ForeignCall Unique
-- The Unique is occasionally needed by the C pretty-printer
-- (which lacks a unique supply), notably when generating a
-- typedef for foreign-export-dynamic
{-
************************************************************************
* *
\subsection[Stg-pretty-printing]{Pretty-printing}
* *
************************************************************************
Robin Popplestone asked for semi-colon separators on STG binds; here's
hoping he likes terminators instead... Ditto for case alternatives.
-}
pprGenStgBinding :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgBinding bndr bdee -> SDoc
pprGenStgBinding (StgNonRec bndr rhs)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr rhs <> semi)
pprGenStgBinding (StgRec pairs)
= vcat $ ifPprDebug (text "{- StgRec (begin) -}") :
map (ppr_bind) pairs ++ [ifPprDebug (text "{- StgRec (end) -}")]
where
ppr_bind (bndr, expr)
= hang (hsep [pprBndr LetBind bndr, equals])
4 (ppr expr <> semi)
pprStgBinding :: StgBinding -> SDoc
pprStgBinding bind = pprGenStgBinding bind
pprStgBindings :: [StgBinding] -> SDoc
pprStgBindings binds = vcat $ intersperse blankLine (map pprGenStgBinding binds)
instance (Outputable bdee) => Outputable (GenStgArg bdee) where
ppr = pprStgArg
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgBinding bndr bdee) where
ppr = pprGenStgBinding
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgExpr bndr bdee) where
ppr = pprStgExpr
instance (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> Outputable (GenStgRhs bndr bdee) where
ppr rhs = pprStgRhs rhs
pprStgArg :: (Outputable bdee) => GenStgArg bdee -> SDoc
pprStgArg (StgVarArg var) = ppr var
pprStgArg (StgLitArg con) = ppr con
pprStgExpr :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgExpr bndr bdee -> SDoc
-- special case
pprStgExpr (StgLit lit) = ppr lit
-- general case
pprStgExpr (StgApp func args)
= hang (ppr func) 4 (sep (map (ppr) args))
pprStgExpr (StgConApp con args)
= hsep [ ppr con, brackets (interppSP args)]
pprStgExpr (StgOpApp op args _)
= hsep [ pprStgOp op, brackets (interppSP args)]
pprStgExpr (StgLam bndrs body)
= sep [ char '\\' <+> ppr_list (map (pprBndr LambdaBind) bndrs)
<+> text "->",
pprStgExpr body ]
where ppr_list = brackets . fsep . punctuate comma
-- special case: let v = <very specific thing>
-- in
-- let ...
-- in
-- ...
--
-- Very special! Suspicious! (SLPJ)
{-
pprStgExpr (StgLet srt (StgNonRec bndr (StgRhsClosure cc bi free_vars upd_flag args rhs))
expr@(StgLet _ _))
= ($$)
(hang (hcat [text "let { ", ppr bndr, ptext (sLit " = "),
ppr cc,
pp_binder_info bi,
text " [", ifPprDebug (interppSP free_vars), ptext (sLit "] \\"),
ppr upd_flag, text " [",
interppSP args, char ']'])
8 (sep [hsep [ppr rhs, text "} in"]]))
(ppr expr)
-}
-- special case: let ... in let ...
pprStgExpr (StgLet bind expr@(StgLet _ _))
= ($$)
(sep [hang (text "let {")
2 (hsep [pprGenStgBinding bind, text "} in"])])
(ppr expr)
-- general case
pprStgExpr (StgLet bind expr)
= sep [hang (text "let {") 2 (pprGenStgBinding bind),
hang (text "} in ") 2 (ppr expr)]
pprStgExpr (StgLetNoEscape bind expr)
= sep [hang (text "let-no-escape {")
2 (pprGenStgBinding bind),
hang (text "} in ")
2 (ppr expr)]
pprStgExpr (StgTick tickish expr)
= sdocWithDynFlags $ \dflags ->
if gopt Opt_PprShowTicks dflags
then sep [ ppr tickish, pprStgExpr expr ]
else pprStgExpr expr
pprStgExpr (StgCase expr bndr alt_type alts)
= sep [sep [text "case",
nest 4 (hsep [pprStgExpr expr,
ifPprDebug (dcolon <+> ppr alt_type)]),
text "of", pprBndr CaseBind bndr, char '{'],
nest 2 (vcat (map pprStgAlt alts)),
char '}']
pprStgAlt :: (OutputableBndr bndr, Outputable occ, Ord occ)
=> GenStgAlt bndr occ -> SDoc
pprStgAlt (con, params, expr)
= hang (hsep [ppr con, sep (map (pprBndr CasePatBind) params), text "->"])
4 (ppr expr <> semi)
pprStgOp :: StgOp -> SDoc
pprStgOp (StgPrimOp op) = ppr op
pprStgOp (StgPrimCallOp op)= ppr op
pprStgOp (StgFCallOp op _) = ppr op
instance Outputable AltType where
ppr PolyAlt = text "Polymorphic"
ppr (UbxTupAlt n) = text "UbxTup" <+> ppr n
ppr (AlgAlt tc) = text "Alg" <+> ppr tc
ppr (PrimAlt tc) = text "Prim" <+> ppr tc
pprStgLVs :: Outputable occ => GenStgLiveVars occ -> SDoc
pprStgLVs lvs
= getPprStyle $ \ sty ->
if userStyle sty || isEmptyUniqSet lvs then
empty
else
hcat [text "{-lvs:", pprUFM lvs interpp'SP, text "-}"]
pprStgRhs :: (OutputableBndr bndr, Outputable bdee, Ord bdee)
=> GenStgRhs bndr bdee -> SDoc
-- special case
pprStgRhs (StgRhsClosure cc bi [free_var] upd_flag [{-no args-}] (StgApp func []))
= hcat [ ppr cc,
pp_binder_info bi,
brackets (ifPprDebug (ppr free_var)),
text " \\", ppr upd_flag, ptext (sLit " [] "), ppr func ]
-- general case
pprStgRhs (StgRhsClosure cc bi free_vars upd_flag args body)
= sdocWithDynFlags $ \dflags ->
hang (hsep [if gopt Opt_SccProfilingOn dflags then ppr cc else empty,
pp_binder_info bi,
ifPprDebug (brackets (interppSP free_vars)),
char '\\' <> ppr upd_flag, brackets (interppSP args)])
4 (ppr body)
pprStgRhs (StgRhsCon cc con args)
= hcat [ ppr cc,
space, ppr con, text "! ", brackets (interppSP args)]
|
vTurbine/ghc
|
compiler/stgSyn/StgSyn.hs
|
Haskell
|
bsd-3-clause
| 27,004
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE GADTs #-}
module Oracle.DiffOracleOld where
import qualified Data.IntMap as M
import Data.Maybe
import Data.List
import Debug.Trace
import Oracle.Internal
import Language.Clojure.Lang
import Language.Clojure.AST
import Util.UnixDiff
type DiffOp = Path
data DiffOracle = DiffOracle DelInsMap
type DelInsMap = (M.IntMap DiffOp, M.IntMap DiffOp)
unionDelInsMap :: DelInsMap -> DelInsMap -> DelInsMap
unionDelInsMap (s1, d1) (s2, d2) = (M.union s1 s2, M.union d1 d2)
buildOracle :: [DiffAction] -> DelInsMap
buildOracle [] = (M.empty, M.empty)
buildOracle (first:rest) = (process first) `unionDelInsMap` (buildOracle rest)
where
process (Copy (i1, i2)) = (M.empty, M.empty)
process (Ins i) = (M.empty, M.singleton i I)
process (Del i) = (M.singleton i D, M.empty)
askOracle :: DiffOracle -> LineRange -> LineRange -> [Path]
askOracle (DiffOracle (delMap, insMap)) srcRange dstRange =
if containsRange delMap srcRange
&& containsRange insMap dstRange
&& inSync srcRange dstRange
then []
else if containsRange delMap srcRange
&& not (inSync srcRange dstRange)
then [ D ]
else if containsRange insMap dstRange
&& not (inSync srcRange dstRange)
then [ I ]
else [ M ]
-- dstSpan = findSpan insMap dstRange
-- srcSpan = findSpan delMap srcRange
-- dstOffset = calculateOffset (delMap, insMap) dstStart
-- srcOffset = calculateOffset (delMap, insMap) srcStart
-- dstSpan = (Range (dstStart + dstOffset) (dstEnd + dstOffset))
-- srcSpan = (Range (srcStart - srcOffset) (srcEnd - srcOffset))
inSync :: LineRange -> LineRange -> Bool
inSync (Range s1 _) (Range s2 _) = s1 == s2
findSpan :: M.IntMap DiffOp -> LineRange -> LineRange
findSpan m (Range start end) = go m start end
where
go m s e | isJust (M.lookup s m) = go m (s-1) e
go m s e | isJust (M.lookup e m) = go m s (e + 1)
go m s e | otherwise = Range (s+1) (e-1)
calculateOffset :: DelInsMap -> Int -> Int
calculateOffset (del, ins) i = process (M.elems splitIns ++ M.elems splitDel)
where
(splitIns, _) = M.split (i+1) ins
(splitDel, _) = M.split (i+1) del
process [] = 0
process (I:xs) = (- 1) + process xs
process (D:xs) = 1 + process xs
intersectsRange :: M.IntMap DiffOp -> LineRange -> Bool
intersectsRange m (Range start end) = go m start
where
go m i | i <= end =
if isJust (M.lookup i m)
then True
else go m (i+1)
go m i | otherwise = False
containsRange :: M.IntMap DiffOp -> LineRange -> Bool
containsRange m (Range start end) = go m start
where
go m i | i <= end =
if isJust (M.lookup i m)
then go m (i+1)
else False
go m i | otherwise = True
instance (Monad m) => OracleF DiffOracle m where
callF o s d = do
-- traceM ("src[" ++ show (fromJust $ extractRange s) ++ "]: " ++ show s)
-- traceM ("dst[" ++ show (fromJust $ extractRange d) ++ "]: " ++ show d)
let ans = askOracle o (fromJust $ extractRange s) (fromJust $ extractRange d)
-- traceM ("ans: " ++ show ans)
return ans
instance (Monad m) => OracleP DiffOracle m where
callP _ An An = return []
callP _ An (_ `Ac` _) = return [ I ]
callP _ (_ `Ac` _) An = return [ D ]
callP o (s `Ac` _) (d `Ac` _) = do
case (extractRange s, extractRange d) of
(Nothing, Nothing) -> do
-- traceM "ans: M"
return [ M ]
(Just sRange, Nothing) -> do
-- traceM "ans: D"
return [ D ]
(Nothing, Just dRange) -> do
-- traceM "ans: I"
return [ I ]
(Just sRange, Just dRange) -> do
let ans = askOracle o sRange dRange
-- traceM ("ans: " ++ show ans)
return ans
instance Show DiffOracle where
show (DiffOracle m) = show m
|
nazrhom/vcs-clojure
|
src/Oracle/DiffOracleOld.hs
|
Haskell
|
bsd-3-clause
| 3,890
|
-- mathsprimitives.hs
module Math.MathsPrimitives where
-- primitive operations on sequences (lists) of numbers
-- used in implementation of vectors, matrices, polynomials, cyclotomic fields, etc
import List (transpose)
infixr 8 */, *//
infixl 7 $*, $., $$*
infixl 6 $+, $-, $$+, $$-
-- addition of sequences
(a:as) $+ (b:bs) = (a+b) : (as $+ bs)
as $+ [] = as
[] $+ bs = bs
as $- bs = as $+ (map negate bs)
-- scalar multiplication
a */ bs = map (a*) bs
-- polynomial multiplication
[] $* _ = []
_ $* [] = []
(a:as) $* (b:bs) = [a*b] $+ shift (map (a*) bs $+ map (*b) as) $+ shift (shift (as $* bs))
shift [] = []
shift as = 0 : as
-- dot product of vectors (also called inner or scalar product)
u $. v = sum (zipWith (*) u v)
-- tensor product of vectors (also called outer or matrix product)
(a:as) $** v = map (a*) v : (as $** v)
[] $** _ = []
-- matrix operations
a $$+ b = zipWith (zipWith (+)) a b
a $$- b = zipWith (zipWith (-)) a b
a $$* b = doMultMx a (transpose b)
where
doMultMx [] _ = []
-- doMultMx (u:us) bT = map (u $.) bT : doMultMx us bT
doMultMx (u:us) bT = ((:) $! (map (u $.) bT)) (doMultMx us bT)
-- scalar multiplication
k *// m = map (map (k*)) m
fMatrix f n = [[f i j | j <- [1..n]] | i <- [1..n]]
partialSums xs = scanl1 (+) xs
partialProducts xs = scanl1 (*) xs
factorials :: [Integer]
factorials = scanl (*) 1 [1..]
-- A class for types which represent mathematical functions
class FunctionRep f where
compose :: f -> f -> f
deriv :: f -> f
integ :: f -> f
nthderiv :: Int -> f -> f
nthderiv n f = iterate deriv f !! n
{-
-- action on the left
[] <. _ = []
(row:rows) <. xs =
sum (zipWith (*) row xs) : (rows <. xs)
-- action on the right
v .> m = doApplyRightMx [] v m
where
doApplyRightMx ys [] [] = foldl1 (zipWith (+)) ys
doApplyRightMx ys (x:xs) (row:rows) = doApplyRightMx (map (x *) row : ys) xs rows
-}
|
nfjinjing/bench-euler
|
src/Math/MathsPrimitives.hs
|
Haskell
|
bsd-3-clause
| 2,012
|
{-# LANGUAGE TypeSynonymInstances,GeneralizedNewtypeDeriving,MultiParamTypeClasses,FlexibleInstances #-}
module MigrationsTest
( tests
)
where
import Test.HUnit
import Control.Monad.Identity ( runIdentity, Identity )
import qualified Data.Map as Map
import Data.Time.Clock ( UTCTime )
import Database.Schema.Migrations
import Database.Schema.Migrations.Store
import Database.Schema.Migrations.Migration
import Database.Schema.Migrations.Backend
tests :: [Test]
tests = migrationsToApplyTests
type TestBackend = [Migration]
newtype TestM a = TestM (Identity a) deriving (Monad)
instance MonadMigration TestM where
getCurrentTime = undefined
instance Backend TestBackend TestM where
getBootstrapMigration _ = undefined
isBootstrapped _ = return True
applyMigration _ _ = undefined
revertMigration _ _ = undefined
getMigrations b = return $ map mId b
-- |Given a backend and a store, what are the list of migrations
-- missing in the backend that are available in the store?
type MissingMigrationTestCase = (MigrationMap, TestBackend, Migration,
[Migration])
ts :: UTCTime
ts = read "2009-04-15 10:02:06 UTC"
blankMigration :: Migration
blankMigration = Migration { mTimestamp = ts
, mId = undefined
, mDesc = Nothing
, mApply = ""
, mRevert = Nothing
, mDeps = []
}
missingMigrationsTestcases :: [MissingMigrationTestCase]
missingMigrationsTestcases = [ (m, [], one, [one])
, (m, [one], one, [])
, (m, [one], two, [two])
, (m, [one, two], one, [])
, (m, [one, two], two, [])
]
where
one = blankMigration { mId = "one" }
two = blankMigration { mId = "two", mDeps = ["one"] }
m = Map.fromList [ (mId e, e) | e <- [one, two] ]
mkTest :: MissingMigrationTestCase -> Test
mkTest (mapping, backend, theMigration, expected) =
let Right graph = depGraphFromMapping mapping
storeData = StoreData mapping graph
TestM act = migrationsToApply storeData backend theMigration
result = runIdentity act
in expected ~=? result
migrationsToApplyTests :: [Test]
migrationsToApplyTests = map mkTest missingMigrationsTestcases
|
creswick/dbmigrations
|
test/MigrationsTest.hs
|
Haskell
|
bsd-3-clause
| 2,438
|
module ParsecExpr
{-# DEPRECATED "This module has moved to Text.ParserCombinators.Parsec.Expr" #-}
(module Text.ParserCombinators.Parsec.Expr) where
import Text.ParserCombinators.Parsec.Expr
|
FranklinChen/hugs98-plus-Sep2006
|
fptools/hslibs/text/parsec/ParsecExpr.hs
|
Haskell
|
bsd-3-clause
| 191
|
-- Copyright 2004-present Facebook. All Rights Reserved.
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE CPP #-}
module Haxl.Core.CallGraph where
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
#if __GLASGOW_HASKELL__ < 804
import Data.Monoid
#endif
import Data.Text (Text)
import qualified Data.Text as Text
type ModuleName = Text
-- | An unqualified function
type Function = Text
-- | A qualified function
data QualFunction = QualFunction ModuleName Function deriving (Eq, Ord)
instance Show QualFunction where
show (QualFunction mn nm) = Text.unpack $ mn <> Text.pack "." <> nm
-- | Represents an edge between a parent function which calls a child function
-- in the call graph
type FunctionCall = (QualFunction, QualFunction)
-- | An edge list which represents the dependencies between function calls
type CallGraph = ([FunctionCall], Map QualFunction Text)
-- | Used as the root of all function calls
mainFunction :: QualFunction
mainFunction = QualFunction "MAIN" "main"
emptyCallGraph :: CallGraph
emptyCallGraph = ([], Map.empty)
|
facebook/Haxl
|
Haxl/Core/CallGraph.hs
|
Haskell
|
bsd-3-clause
| 1,071
|
{-
(c) The University of Glasgow 2006
(c) The AQUA Project, Glasgow University, 1996-1998
TcTyClsDecls: Typecheck type and class declarations
-}
{-# LANGUAGE TupleSections, CPP #-}
module ETA.TypeCheck.TcTyClsDecls (
tcTyAndClassDecls, tcAddImplicits,
-- Functions used by TcInstDcls to check
-- data/type family instance declarations
kcDataDefn, tcConDecls, dataDeclChecks, checkValidTyCon,
tcFamTyPats, tcTyFamInstEqn, famTyConShape,
tcAddTyFamInstCtxt, tcAddDataFamInstCtxt,
wrongKindOfFamily, dataConCtxt, badDataConTyCon
) where
import ETA.HsSyn.HsSyn
import ETA.Main.HscTypes
import ETA.Iface.BuildTyCl
import ETA.TypeCheck.TcRnMonad
import ETA.TypeCheck.TcEnv
import ETA.TypeCheck.TcValidity
import ETA.TypeCheck.TcHsSyn
import ETA.TypeCheck.TcSimplify( growThetaTyVars )
import ETA.TypeCheck.TcBinds( tcRecSelBinds )
import ETA.TypeCheck.TcTyDecls
import ETA.TypeCheck.TcClassDcl
import ETA.TypeCheck.TcHsType
import ETA.TypeCheck.TcMType
import ETA.TypeCheck.TcType
import ETA.Prelude.TysWiredIn( unitTy )
import ETA.TypeCheck.FamInst
import ETA.Types.FamInstEnv( isDominatedBy, mkCoAxBranch, mkBranchedCoAxiom )
import ETA.Types.Coercion( pprCoAxBranch, ltRole )
import ETA.Types.Type
import ETA.Types.TypeRep -- for checkValidRoles
import ETA.Types.Kind
import ETA.Types.Class
import ETA.Types.CoAxiom
import ETA.Types.TyCon
import ETA.BasicTypes.DataCon
import ETA.BasicTypes.Id
import ETA.Core.MkCore ( rEC_SEL_ERROR_ID )
import ETA.BasicTypes.IdInfo
import ETA.BasicTypes.Var
import ETA.BasicTypes.VarEnv
import ETA.BasicTypes.VarSet
import ETA.BasicTypes.Module
import ETA.BasicTypes.Name
import ETA.BasicTypes.NameSet
import ETA.BasicTypes.NameEnv
import ETA.Utils.Outputable
import qualified ETA.Utils.Outputable as Outputable
import ETA.Utils.Maybes
import ETA.Types.Unify
import ETA.Utils.Util
import ETA.BasicTypes.SrcLoc
import ETA.Utils.ListSetOps
import ETA.Utils.Digraph
import ETA.Main.DynFlags
import ETA.Utils.FastString
import ETA.BasicTypes.Unique ( mkBuiltinUnique )
import ETA.BasicTypes.BasicTypes
import ETA.Utils.Bag
import Control.Monad
import Data.List
#include "HsVersions.h"
{-
************************************************************************
* *
\subsection{Type checking for type and class declarations}
* *
************************************************************************
Note [Grouping of type and class declarations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tcTyAndClassDecls is called on a list of `TyClGroup`s. Each group is a strongly
connected component of mutually dependent types and classes. We kind check and
type check each group separately to enhance kind polymorphism. Take the
following example:
type Id a = a
data X = X (Id Int)
If we were to kind check the two declarations together, we would give Id the
kind * -> *, since we apply it to an Int in the definition of X. But we can do
better than that, since Id really is kind polymorphic, and should get kind
forall (k::BOX). k -> k. Since it does not depend on anything else, it can be
kind-checked by itself, hence getting the most general kind. We then kind check
X, which works fine because we then know the polymorphic kind of Id, and simply
instantiate k to *.
Note [Check role annotations in a second pass]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Role inference potentially depends on the types of all of the datacons declared
in a mutually recursive group. The validity of a role annotation, in turn,
depends on the result of role inference. Because the types of datacons might
be ill-formed (see #7175 and Note [Checking GADT return types]) we must check
*all* the tycons in a group for validity before checking *any* of the roles.
Thus, we take two passes over the resulting tycons, first checking for general
validity and then checking for valid role annotations.
-}
tcTyAndClassDecls :: ModDetails
-> [TyClGroup Name] -- Mutually-recursive groups in dependency order
-> TcM TcGblEnv -- Input env extended by types and classes
-- and their implicit Ids,DataCons
-- Fails if there are any errors
tcTyAndClassDecls boot_details tyclds_s
= checkNoErrs $ -- The code recovers internally, but if anything gave rise to
-- an error we'd better stop now, to avoid a cascade
fold_env tyclds_s -- Type check each group in dependency order folding the global env
where
fold_env :: [TyClGroup Name] -> TcM TcGblEnv
fold_env [] = getGblEnv
fold_env (tyclds:tyclds_s)
= do { tcg_env <- tcTyClGroup boot_details tyclds
; setGblEnv tcg_env $ fold_env tyclds_s }
-- remaining groups are typecheck in the extended global env
tcTyClGroup :: ModDetails -> TyClGroup Name -> TcM TcGblEnv
-- Typecheck one strongly-connected component of type and class decls
tcTyClGroup boot_details tyclds
= do { -- Step 1: kind-check this group and returns the final
-- (possibly-polymorphic) kind of each TyCon and Class
-- See Note [Kind checking for type and class decls]
names_w_poly_kinds <- kcTyClGroup tyclds
; traceTc "tcTyAndCl generalized kinds" (ppr names_w_poly_kinds)
-- Step 2: type-check all groups together, returning
-- the final TyCons and Classes
; let role_annots = extractRoleAnnots tyclds
decls = group_tyclds tyclds
; tyclss <- fixM $ \ rec_tyclss -> do
{ is_boot <- tcIsHsBootOrSig
; let rec_flags = calcRecFlags boot_details is_boot
role_annots rec_tyclss
-- Populate environment with knot-tied ATyCon for TyCons
-- NB: if the decls mention any ill-staged data cons
-- (see Note [Recusion and promoting data constructors]
-- we will have failed already in kcTyClGroup, so no worries here
; tcExtendRecEnv (zipRecTyClss names_w_poly_kinds rec_tyclss) $
-- Also extend the local type envt with bindings giving
-- the (polymorphic) kind of each knot-tied TyCon or Class
-- See Note [Type checking recursive type and class declarations]
tcExtendKindEnv names_w_poly_kinds $
-- Kind and type check declarations for this group
concatMapM (tcTyClDecl rec_flags) decls }
-- Step 3: Perform the validity check
-- We can do this now because we are done with the recursive knot
-- Do it before Step 4 (adding implicit things) because the latter
-- expects well-formed TyCons
; tcExtendGlobalEnv tyclss $ do
{ traceTc "Starting validity check" (ppr tyclss)
; checkNoErrs $
mapM_ (recoverM (return ()) . checkValidTyCl) tyclss
-- We recover, which allows us to report multiple validity errors
-- the checkNoErrs is necessary to fix #7175.
; mapM_ (recoverM (return ()) . checkValidRoleAnnots role_annots) tyclss
-- See Note [Check role annotations in a second pass]
-- Step 4: Add the implicit things;
-- we want them in the environment because
-- they may be mentioned in interface files
; tcExtendGlobalValEnv (mkDefaultMethodIds tyclss) $
tcAddImplicits tyclss } }
tcAddImplicits :: [TyThing] -> TcM TcGblEnv
tcAddImplicits tyclss
= tcExtendGlobalEnvImplicit implicit_things $
tcRecSelBinds rec_sel_binds
where
implicit_things = concatMap implicitTyThings tyclss
rec_sel_binds = mkRecSelBinds tyclss
zipRecTyClss :: [(Name, Kind)]
-> [TyThing] -- Knot-tied
-> [(Name,TyThing)]
-- Build a name-TyThing mapping for the things bound by decls
-- being careful not to look at the [TyThing]
-- The TyThings in the result list must have a visible ATyCon,
-- because typechecking types (in, say, tcTyClDecl) looks at this outer constructor
zipRecTyClss kind_pairs rec_things
= [ (name, ATyCon (get name)) | (name, _kind) <- kind_pairs ]
where
rec_type_env :: TypeEnv
rec_type_env = mkTypeEnv rec_things
get name = case lookupTypeEnv rec_type_env name of
Just (ATyCon tc) -> tc
other -> pprPanic "zipRecTyClss" (ppr name <+> ppr other)
{-
************************************************************************
* *
Kind checking
* *
************************************************************************
Note [Kind checking for type and class decls]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Kind checking is done thus:
1. Make up a kind variable for each parameter of the *data* type, class,
and closed type family decls, and extend the kind environment (which is
in the TcLclEnv)
2. Dependency-analyse the type *synonyms* (which must be non-recursive),
and kind-check them in dependency order. Extend the kind envt.
3. Kind check the data type and class decls
Synonyms are treated differently to data type and classes,
because a type synonym can be an unboxed type
type Foo = Int#
and a kind variable can't unify with UnboxedTypeKind
So we infer their kinds in dependency order
We need to kind check all types in the mutually recursive group
before we know the kind of the type variables. For example:
class C a where
op :: D b => a -> b -> b
class D c where
bop :: (Monad c) => ...
Here, the kind of the locally-polymorphic type variable "b"
depends on *all the uses of class D*. For example, the use of
Monad c in bop's type signature means that D must have kind Type->Type.
However type synonyms work differently. They can have kinds which don't
just involve (->) and *:
type R = Int# -- Kind #
type S a = Array# a -- Kind * -> #
type T a b = (# a,b #) -- Kind * -> * -> (# a,b #)
So we must infer their kinds from their right-hand sides *first* and then
use them, whereas for the mutually recursive data types D we bring into
scope kind bindings D -> k, where k is a kind variable, and do inference.
Open type families
~~~~~~~~~~~~~~~~~~
This treatment of type synonyms only applies to Haskell 98-style synonyms.
General type functions can be recursive, and hence, appear in `alg_decls'.
The kind of an open type family is solely determinded by its kind signature;
hence, only kind signatures participate in the construction of the initial
kind environment (as constructed by `getInitialKind'). In fact, we ignore
instances of families altogether in the following. However, we need to include
the kinds of *associated* families into the construction of the initial kind
environment. (This is handled by `allDecls').
-}
kcTyClGroup :: TyClGroup Name -> TcM [(Name,Kind)]
-- Kind check this group, kind generalize, and return the resulting local env
-- This bindds the TyCons and Classes of the group, but not the DataCons
-- See Note [Kind checking for type and class decls]
kcTyClGroup (TyClGroup { group_tyclds = decls })
= do { mod <- getModule
; traceTc "kcTyClGroup" (ptext (sLit "module") <+> ppr mod $$ vcat (map ppr decls))
-- Kind checking;
-- 1. Bind kind variables for non-synonyms
-- 2. Kind-check synonyms, and bind kinds of those synonyms
-- 3. Kind-check non-synonyms
-- 4. Generalise the inferred kinds
-- See Note [Kind checking for type and class decls]
-- Step 1: Bind kind variables for non-synonyms
; let (syn_decls, non_syn_decls) = partition (isSynDecl . unLoc) decls
; initial_kinds <- getInitialKinds non_syn_decls
; traceTc "kcTyClGroup: initial kinds" (ppr initial_kinds)
-- Step 2: Set initial envt, kind-check the synonyms
; lcl_env <- tcExtendKindEnv2 initial_kinds $
kcSynDecls (calcSynCycles syn_decls)
-- Step 3: Set extended envt, kind-check the non-synonyms
; setLclEnv lcl_env $
mapM_ kcLTyClDecl non_syn_decls
-- Step 4: generalisation
-- Kind checking done for this group
-- Now we have to kind generalize the flexis
; res <- concatMapM (generaliseTCD (tcl_env lcl_env)) decls
; traceTc "kcTyClGroup result" (ppr res)
; return res }
where
generalise :: TcTypeEnv -> Name -> TcM (Name, Kind)
-- For polymorphic things this is a no-op
generalise kind_env name
= do { let kc_kind = case lookupNameEnv kind_env name of
Just (AThing k) -> k
_ -> pprPanic "kcTyClGroup" (ppr name $$ ppr kind_env)
; kvs <- kindGeneralize (tyVarsOfType kc_kind)
; kc_kind' <- zonkTcKind kc_kind -- Make sure kc_kind' has the final,
-- skolemised kind variables
; traceTc "Generalise kind" (vcat [ ppr name, ppr kc_kind, ppr kvs, ppr kc_kind' ])
; return (name, mkForAllTys kvs kc_kind') }
generaliseTCD :: TcTypeEnv -> LTyClDecl Name -> TcM [(Name, Kind)]
generaliseTCD kind_env (L _ decl)
| ClassDecl { tcdLName = (L _ name), tcdATs = ats } <- decl
= do { first <- generalise kind_env name
; rest <- mapM ((generaliseFamDecl kind_env) . unLoc) ats
; return (first : rest) }
| FamDecl { tcdFam = fam } <- decl
= do { res <- generaliseFamDecl kind_env fam
; return [res] }
| otherwise
= do { res <- generalise kind_env (tcdName decl)
; return [res] }
generaliseFamDecl :: TcTypeEnv -> FamilyDecl Name -> TcM (Name, Kind)
generaliseFamDecl kind_env (FamilyDecl { fdLName = L _ name })
= generalise kind_env name
mk_thing_env :: [LTyClDecl Name] -> [(Name, TcTyThing)]
mk_thing_env [] = []
mk_thing_env (decl : decls)
| L _ (ClassDecl { tcdLName = L _ nm, tcdATs = ats }) <- decl
= (nm, APromotionErr ClassPE) :
(map (, APromotionErr TyConPE) $ map (unLoc . fdLName . unLoc) ats) ++
(mk_thing_env decls)
| otherwise
= (tcdName (unLoc decl), APromotionErr TyConPE) :
(mk_thing_env decls)
getInitialKinds :: [LTyClDecl Name] -> TcM [(Name, TcTyThing)]
getInitialKinds decls
= tcExtendKindEnv2 (mk_thing_env decls) $
do { pairss <- mapM (addLocM getInitialKind) decls
; return (concat pairss) }
getInitialKind :: TyClDecl Name -> TcM [(Name, TcTyThing)]
-- Allocate a fresh kind variable for each TyCon and Class
-- For each tycon, return (tc, AThing k)
-- where k is the kind of tc, derived from the LHS
-- of the definition (and probably including
-- kind unification variables)
-- Example: data T a b = ...
-- return (T, kv1 -> kv2 -> kv3)
--
-- This pass deals with (ie incorporates into the kind it produces)
-- * The kind signatures on type-variable binders
-- * The result kinds signature on a TyClDecl
--
-- ALSO for each datacon, return (dc, APromotionErr RecDataConPE)
-- Note [ARecDataCon: Recursion and promoting data constructors]
--
-- No family instances are passed to getInitialKinds
getInitialKind decl@(ClassDecl { tcdLName = L _ name, tcdTyVars = ktvs, tcdATs = ats })
= do { (cl_kind, inner_prs) <-
kcHsTyVarBndrs (hsDeclHasCusk decl) ktvs $
do { inner_prs <- getFamDeclInitialKinds ats
; return (constraintKind, inner_prs) }
; let main_pr = (name, AThing cl_kind)
; return (main_pr : inner_prs) }
getInitialKind decl@(DataDecl { tcdLName = L _ name
, tcdTyVars = ktvs
, tcdDataDefn = HsDataDefn { dd_kindSig = m_sig
, dd_cons = cons' } })
= let cons = cons' -- AZ list monad coming
in
do { (decl_kind, _) <-
kcHsTyVarBndrs (hsDeclHasCusk decl) ktvs $
do { res_k <- case m_sig of
Just ksig -> tcLHsKind ksig
Nothing -> return liftedTypeKind
; return (res_k, ()) }
; let main_pr = (name, AThing decl_kind)
inner_prs = [ (unLoc con, APromotionErr RecDataConPE)
| L _ con' <- cons, con <- con_names con' ]
; return (main_pr : inner_prs) }
getInitialKind (FamDecl { tcdFam = decl })
= getFamDeclInitialKind decl
getInitialKind decl@(SynDecl {})
= pprPanic "getInitialKind" (ppr decl)
---------------------------------
getFamDeclInitialKinds :: [LFamilyDecl Name] -> TcM [(Name, TcTyThing)]
getFamDeclInitialKinds decls
= tcExtendKindEnv2 [ (n, APromotionErr TyConPE)
| L _ (FamilyDecl { fdLName = L _ n }) <- decls] $
concatMapM (addLocM getFamDeclInitialKind) decls
getFamDeclInitialKind :: FamilyDecl Name
-> TcM [(Name, TcTyThing)]
getFamDeclInitialKind decl@(FamilyDecl { fdLName = L _ name
, fdTyVars = ktvs
, fdKindSig = ksig })
= do { (fam_kind, _) <-
kcHsTyVarBndrs (famDeclHasCusk decl) ktvs $
do { res_k <- case ksig of
Just k -> tcLHsKind k
Nothing
| famDeclHasCusk decl -> return liftedTypeKind
| otherwise -> newMetaKindVar
; return (res_k, ()) }
; return [ (name, AThing fam_kind) ] }
----------------
kcSynDecls :: [SCC (LTyClDecl Name)]
-> TcM TcLclEnv -- Kind bindings
kcSynDecls [] = getLclEnv
kcSynDecls (group : groups)
= do { (n,k) <- kcSynDecl1 group
; lcl_env <- tcExtendKindEnv [(n,k)] (kcSynDecls groups)
; return lcl_env }
kcSynDecl1 :: SCC (LTyClDecl Name)
-> TcM (Name,TcKind) -- Kind bindings
kcSynDecl1 (AcyclicSCC (L _ decl)) = kcSynDecl decl
kcSynDecl1 (CyclicSCC decls) = do { recSynErr decls; failM }
-- Fail here to avoid error cascade
-- of out-of-scope tycons
kcSynDecl :: TyClDecl Name -> TcM (Name, TcKind)
kcSynDecl decl@(SynDecl { tcdTyVars = hs_tvs, tcdLName = L _ name
, tcdRhs = rhs })
-- Returns a possibly-unzonked kind
= tcAddDeclCtxt decl $
do { (syn_kind, _) <-
kcHsTyVarBndrs (hsDeclHasCusk decl) hs_tvs $
do { traceTc "kcd1" (ppr name <+> brackets (ppr hs_tvs))
; (_, rhs_kind) <- tcLHsType rhs
; traceTc "kcd2" (ppr name)
; return (rhs_kind, ()) }
; return (name, syn_kind) }
kcSynDecl decl = pprPanic "kcSynDecl" (ppr decl)
------------------------------------------------------------------------
kcLTyClDecl :: LTyClDecl Name -> TcM ()
-- See Note [Kind checking for type and class decls]
kcLTyClDecl (L loc decl)
= setSrcSpan loc $ tcAddDeclCtxt decl $ kcTyClDecl decl
kcTyClDecl :: TyClDecl Name -> TcM ()
-- This function is used solely for its side effect on kind variables
-- NB kind signatures on the type variables and
-- result kind signature have aready been dealt with
-- by getInitialKind, so we can ignore them here.
kcTyClDecl (DataDecl { tcdLName = L _ name, tcdTyVars = hs_tvs, tcdDataDefn = defn })
| HsDataDefn { dd_cons = cons, dd_kindSig = Just _ } <- defn
= mapM_ (wrapLocM kcConDecl) cons
-- hs_tvs and dd_kindSig already dealt with in getInitialKind
-- If dd_kindSig is Just, this must be a GADT-style decl,
-- (see invariants of DataDefn declaration)
-- so (a) we don't need to bring the hs_tvs into scope, because the
-- ConDecls bind all their own variables
-- (b) dd_ctxt is not allowed for GADT-style decls, so we can ignore it
| HsDataDefn { dd_ctxt = ctxt, dd_cons = cons } <- defn
= kcTyClTyVars name hs_tvs $
do { _ <- tcHsContext ctxt
; mapM_ (wrapLocM kcConDecl) cons }
kcTyClDecl decl@(SynDecl {}) = pprPanic "kcTyClDecl" (ppr decl)
kcTyClDecl (ClassDecl { tcdLName = L _ name, tcdTyVars = hs_tvs
, tcdCtxt = ctxt, tcdSigs = sigs })
= kcTyClTyVars name hs_tvs $
do { _ <- tcHsContext ctxt
; mapM_ (wrapLocM kc_sig) sigs }
where
kc_sig (TypeSig _ op_ty _) = discardResult (tcHsLiftedType op_ty)
kc_sig (GenericSig _ op_ty) = discardResult (tcHsLiftedType op_ty)
kc_sig _ = return ()
-- closed type families look at their equations, but other families don't
-- do anything here
kcTyClDecl (FamDecl (FamilyDecl { fdLName = L _ fam_tc_name
, fdTyVars = hs_tvs
, fdInfo = ClosedTypeFamily eqns }))
= do { tc_kind <- kcLookupKind fam_tc_name
; let fam_tc_shape = ( fam_tc_name, length (hsQTvBndrs hs_tvs), tc_kind)
; mapM_ (kcTyFamInstEqn fam_tc_shape) eqns }
kcTyClDecl (FamDecl {}) = return ()
-------------------
kcConDecl :: ConDecl Name -> TcM ()
kcConDecl (ConDecl { con_names = names, con_qvars = ex_tvs
, con_cxt = ex_ctxt, con_details = details
, con_res = res })
= addErrCtxt (dataConCtxtName names) $
-- the 'False' says that the existentials don't have a CUSK, as the
-- concept doesn't really apply here. We just need to bring the variables
-- into scope!
do { _ <- kcHsTyVarBndrs False ex_tvs $
do { _ <- tcHsContext ex_ctxt
; mapM_ (tcHsOpenType . getBangType) (hsConDeclArgTys details)
; _ <- tcConRes res
; return (panic "kcConDecl", ()) }
; return () }
{-
Note [Recursion and promoting data constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't want to allow promotion in a strongly connected component
when kind checking.
Consider:
data T f = K (f (K Any))
When kind checking the `data T' declaration the local env contains the
mappings:
T -> AThing <some initial kind>
K -> ARecDataCon
ANothing is only used for DataCons, and only used during type checking
in tcTyClGroup.
************************************************************************
* *
\subsection{Type checking}
* *
************************************************************************
Note [Type checking recursive type and class declarations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
At this point we have completed *kind-checking* of a mutually
recursive group of type/class decls (done in kcTyClGroup). However,
we discarded the kind-checked types (eg RHSs of data type decls);
note that kcTyClDecl returns (). There are two reasons:
* It's convenient, because we don't have to rebuild a
kinded HsDecl (a fairly elaborate type)
* It's necessary, because after kind-generalisation, the
TyCons/Classes may now be kind-polymorphic, and hence need
to be given kind arguments.
Example:
data T f a = MkT (f a) (T f a)
During kind-checking, we give T the kind T :: k1 -> k2 -> *
and figure out constraints on k1, k2 etc. Then we generalise
to get T :: forall k. (k->*) -> k -> *
So now the (T f a) in the RHS must be elaborated to (T k f a).
However, during tcTyClDecl of T (above) we will be in a recursive
"knot". So we aren't allowed to look at the TyCon T itself; we are only
allowed to put it (lazily) in the returned structures. But when
kind-checking the RHS of T's decl, we *do* need to know T's kind (so
that we can correctly elaboarate (T k f a). How can we get T's kind
without looking at T? Delicate answer: during tcTyClDecl, we extend
*Global* env with T -> ATyCon (the (not yet built) TyCon for T)
*Local* env with T -> AThing (polymorphic kind of T)
Then:
* During TcHsType.kcTyVar we look in the *local* env, to get the
known kind for T.
* But in TcHsType.ds_type (and ds_var_app in particular) we look in
the *global* env to get the TyCon. But we must be careful not to
force the TyCon or we'll get a loop.
This fancy footwork (with two bindings for T) is only necesary for the
TyCons or Classes of this recursive group. Earlier, finished groups,
live in the global env only.
-}
tcTyClDecl :: RecTyInfo -> LTyClDecl Name -> TcM [TyThing]
tcTyClDecl rec_info (L loc decl)
= setSrcSpan loc $ tcAddDeclCtxt decl $
traceTc "tcTyAndCl-x" (ppr decl) >>
tcTyClDecl1 NoParentTyCon rec_info decl
-- "type family" declarations
tcTyClDecl1 :: TyConParent -> RecTyInfo -> TyClDecl Name -> TcM [TyThing]
tcTyClDecl1 parent _rec_info (FamDecl { tcdFam = fd })
= tcFamDecl1 parent fd
-- "type" synonym declaration
tcTyClDecl1 _parent rec_info
(SynDecl { tcdLName = L _ tc_name, tcdTyVars = tvs, tcdRhs = rhs })
= ASSERT( isNoParent _parent )
tcTyClTyVars tc_name tvs $ \ tvs' kind ->
tcTySynRhs rec_info tc_name tvs' kind rhs
-- "data/newtype" declaration
tcTyClDecl1 _parent rec_info
(DataDecl { tcdLName = L _ tc_name, tcdTyVars = tvs, tcdDataDefn = defn })
= ASSERT( isNoParent _parent )
tcTyClTyVars tc_name tvs $ \ tvs' kind ->
tcDataDefn rec_info tc_name tvs' kind defn
tcTyClDecl1 _parent rec_info
(ClassDecl { tcdLName = L _ class_name, tcdTyVars = tvs
, tcdCtxt = ctxt, tcdMeths = meths
, tcdFDs = fundeps, tcdSigs = sigs
, tcdATs = ats, tcdATDefs = at_defs })
= ASSERT( isNoParent _parent )
do { (clas, tvs', gen_dm_env) <- fixM $ \ ~(clas,_,_) ->
tcTyClTyVars class_name tvs $ \ tvs' kind ->
do { MASSERT( isConstraintKind kind )
-- This little knot is just so we can get
-- hold of the name of the class TyCon, which we
-- need to look up its recursiveness
; let tycon_name = tyConName (classTyCon clas)
tc_isrec = rti_is_rec rec_info tycon_name
roles = rti_roles rec_info tycon_name
; ctxt' <- tcHsContext ctxt
; ctxt' <- zonkTcTypeToTypes emptyZonkEnv ctxt'
-- Squeeze out any kind unification variables
; fds' <- mapM (addLocM tc_fundep) fundeps
; (sig_stuff, gen_dm_env) <- tcClassSigs class_name sigs meths
; at_stuff <- tcClassATs class_name (AssocFamilyTyCon clas) ats at_defs
; mindef <- tcClassMinimalDef class_name sigs sig_stuff
; clas <- buildClass
class_name tvs' roles ctxt' fds' at_stuff
sig_stuff mindef tc_isrec
; traceTc "tcClassDecl" (ppr fundeps $$ ppr tvs' $$ ppr fds')
; return (clas, tvs', gen_dm_env) }
; let { gen_dm_ids = [ AnId (mkExportedLocalId VanillaId gen_dm_name gen_dm_ty)
| (sel_id, GenDefMeth gen_dm_name) <- classOpItems clas
, let gen_dm_tau = expectJust "tcTyClDecl1" $
lookupNameEnv gen_dm_env (idName sel_id)
, let gen_dm_ty = mkSigmaTy tvs'
[mkClassPred clas (mkTyVarTys tvs')]
gen_dm_tau
]
; class_ats = map ATyCon (classATs clas) }
; return (ATyCon (classTyCon clas) : gen_dm_ids ++ class_ats ) }
-- NB: Order is important due to the call to `mkGlobalThings' when
-- tying the the type and class declaration type checking knot.
where
tc_fundep (tvs1, tvs2) = do { tvs1' <- mapM (tc_fd_tyvar . unLoc) tvs1 ;
; tvs2' <- mapM (tc_fd_tyvar . unLoc) tvs2 ;
; return (tvs1', tvs2') }
tc_fd_tyvar name -- Scoped kind variables are bound to unification variables
-- which are now fixed, so we can zonk
= do { tv <- tcLookupTyVar name
; ty <- zonkTyVarOcc emptyZonkEnv tv
-- Squeeze out any kind unification variables
; case getTyVar_maybe ty of
Just tv' -> return tv'
Nothing -> pprPanic "tc_fd_tyvar" (ppr name $$ ppr tv $$ ppr ty) }
tcFamDecl1 :: TyConParent -> FamilyDecl Name -> TcM [TyThing]
tcFamDecl1 parent
(FamilyDecl {fdInfo = OpenTypeFamily, fdLName = L _ tc_name, fdTyVars = tvs})
= tcTyClTyVars tc_name tvs $ \ tvs' kind -> do
{ traceTc "open type family:" (ppr tc_name)
; checkFamFlag tc_name
; tycon <- buildFamilyTyCon tc_name tvs' OpenSynFamilyTyCon kind parent
; return [ATyCon tycon] }
tcFamDecl1 parent
(FamilyDecl { fdInfo = ClosedTypeFamily eqns
, fdLName = lname@(L _ tc_name), fdTyVars = tvs })
-- Closed type families are a little tricky, because they contain the definition
-- of both the type family and the equations for a CoAxiom.
-- Note: eqns might be empty, in a hs-boot file!
= do { traceTc "closed type family:" (ppr tc_name)
-- the variables in the header have no scope:
; (tvs', kind) <- tcTyClTyVars tc_name tvs $ \ tvs' kind ->
return (tvs', kind)
; checkFamFlag tc_name -- make sure we have -XTypeFamilies
-- Process the equations, creating CoAxBranches
; tc_kind <- kcLookupKind tc_name
; let fam_tc_shape = (tc_name, length (hsQTvBndrs tvs), tc_kind)
; branches <- mapM (tcTyFamInstEqn fam_tc_shape) eqns
-- we need the tycon that we will be creating, but it's in scope.
-- just look it up.
; fam_tc <- tcLookupLocatedTyCon lname
-- create a CoAxiom, with the correct src location. It is Vitally
-- Important that we do not pass the branches into
-- newFamInstAxiomName. They have types that have been zonked inside
-- the knot and we will die if we look at them. This is OK here
-- because there will only be one axiom, so we don't need to
-- differentiate names.
-- See [Zonking inside the knot] in TcHsType
; loc <- getSrcSpanM
; co_ax_name <- newFamInstAxiomName loc tc_name []
-- mkBranchedCoAxiom will fail on an empty list of branches, but
-- we'll never look at co_ax in this case
; let co_ax = mkBranchedCoAxiom co_ax_name fam_tc branches
-- now, finally, build the TyCon
; let syn_rhs = if null eqns
then AbstractClosedSynFamilyTyCon
else ClosedSynFamilyTyCon co_ax
; tycon <- buildFamilyTyCon tc_name tvs' syn_rhs kind parent
; let result = if null eqns
then [ATyCon tycon]
else [ATyCon tycon, ACoAxiom co_ax]
; return result }
-- We check for instance validity later, when doing validity checking for
-- the tycon
tcFamDecl1 parent
(FamilyDecl {fdInfo = DataFamily, fdLName = L _ tc_name, fdTyVars = tvs})
= tcTyClTyVars tc_name tvs $ \ tvs' kind -> do
{ traceTc "data family:" (ppr tc_name)
; checkFamFlag tc_name
; extra_tvs <- tcDataKindSig kind
; let final_tvs = tvs' ++ extra_tvs -- we may not need these
roles = map (const Nominal) final_tvs
tycon = buildAlgTyCon tc_name final_tvs roles Nothing []
DataFamilyTyCon Recursive
False -- Not promotable to the kind level
True -- GADT syntax
parent
; return [ATyCon tycon] }
tcTySynRhs :: RecTyInfo
-> Name
-> [TyVar] -> Kind
-> LHsType Name -> TcM [TyThing]
tcTySynRhs rec_info tc_name tvs kind hs_ty
= do { env <- getLclEnv
; traceTc "tc-syn" (ppr tc_name $$ ppr (tcl_env env))
; rhs_ty <- tcCheckLHsType hs_ty kind
; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty
; let roles = rti_roles rec_info tc_name
; tycon <- buildSynonymTyCon tc_name tvs roles rhs_ty kind
; return [ATyCon tycon] }
tcDataDefn :: RecTyInfo -> Name
-> [TyVar] -> Kind
-> HsDataDefn Name -> TcM [TyThing]
-- NB: not used for newtype/data instances (whether associated or not)
tcDataDefn rec_info tc_name tvs kind
(HsDataDefn { dd_ND = new_or_data, dd_cType = cType
, dd_ctxt = ctxt, dd_kindSig = mb_ksig
, dd_cons = cons' })
= let cons = cons' -- AZ List monad coming
in do { extra_tvs <- tcDataKindSig kind
; let final_tvs = tvs ++ extra_tvs
roles = rti_roles rec_info tc_name
; stupid_tc_theta <- tcHsContext ctxt
; stupid_theta <- zonkTcTypeToTypes emptyZonkEnv stupid_tc_theta
; kind_signatures <- xoptM Opt_KindSignatures
; is_boot <- tcIsHsBootOrSig -- Are we compiling an hs-boot file?
-- Check that we don't use kind signatures without Glasgow extensions
; case mb_ksig of
Nothing -> return ()
Just hs_k -> do { checkTc (kind_signatures) (badSigTyDecl tc_name)
; tc_kind <- tcLHsKind hs_k
; checkKind kind tc_kind
; return () }
; gadt_syntax <- dataDeclChecks tc_name new_or_data stupid_theta cons
; tycon <- fixM $ \ tycon -> do
{ let res_ty = mkTyConApp tycon (mkTyVarTys final_tvs)
; data_cons <- tcConDecls new_or_data tycon (final_tvs, res_ty) cons
; tc_rhs <-
if null cons && is_boot -- In a hs-boot file, empty cons means
then return totallyAbstractTyConRhs -- "don't know"; hence totally Abstract
else case new_or_data of
DataType -> return (mkDataTyConRhs data_cons)
NewType -> ASSERT( not (null data_cons) )
mkNewTyConRhs tc_name tycon (head data_cons)
; return (buildAlgTyCon tc_name final_tvs roles (fmap unLoc cType)
stupid_theta tc_rhs
(rti_is_rec rec_info tc_name)
(rti_promotable rec_info)
gadt_syntax NoParentTyCon) }
; return [ATyCon tycon] }
{-
************************************************************************
* *
Typechecking associated types (in class decls)
(including the associated-type defaults)
* *
************************************************************************
Note [Associated type defaults]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The following is an example of associated type defaults:
class C a where
data D a
type F a b :: *
type F a Z = [a] -- Default
type F a (S n) = F a n -- Default
Note that:
- We can have more than one default definition for a single associated type,
as long as they do not overlap (same rules as for instances)
- We can get default definitions only for type families, not data families
-}
tcClassATs :: Name -- The class name (not knot-tied)
-> TyConParent -- The class parent of this associated type
-> [LFamilyDecl Name] -- Associated types.
-> [LTyFamDefltEqn Name] -- Associated type defaults.
-> TcM [ClassATItem]
tcClassATs class_name parent ats at_defs
= do { -- Complain about associated type defaults for non associated-types
sequence_ [ failWithTc (badATErr class_name n)
| n <- map at_def_tycon at_defs
, not (n `elemNameSet` at_names) ]
; mapM tc_at ats }
where
at_def_tycon :: LTyFamDefltEqn Name -> Name
at_def_tycon (L _ eqn) = unLoc (tfe_tycon eqn)
at_fam_name :: LFamilyDecl Name -> Name
at_fam_name (L _ decl) = unLoc (fdLName decl)
at_names = mkNameSet (map at_fam_name ats)
at_defs_map :: NameEnv [LTyFamDefltEqn Name]
-- Maps an AT in 'ats' to a list of all its default defs in 'at_defs'
at_defs_map = foldr (\at_def nenv -> extendNameEnv_C (++) nenv
(at_def_tycon at_def) [at_def])
emptyNameEnv at_defs
tc_at at = do { [ATyCon fam_tc] <- addLocM (tcFamDecl1 parent) at
; let at_defs = lookupNameEnv at_defs_map (at_fam_name at)
`orElse` []
; atd <- tcDefaultAssocDecl fam_tc at_defs
; return (ATI fam_tc atd) }
-------------------------
tcDefaultAssocDecl :: TyCon -- ^ Family TyCon
-> [LTyFamDefltEqn Name] -- ^ Defaults
-> TcM (Maybe (Type, SrcSpan)) -- ^ Type checked RHS
tcDefaultAssocDecl _ []
= return Nothing -- No default declaration
tcDefaultAssocDecl _ (d1:_:_)
= failWithTc (ptext (sLit "More than one default declaration for")
<+> ppr (tfe_tycon (unLoc d1)))
tcDefaultAssocDecl fam_tc [L loc (TyFamEqn { tfe_tycon = L _ tc_name
, tfe_pats = hs_tvs
, tfe_rhs = rhs })]
= setSrcSpan loc $
tcAddFamInstCtxt (ptext (sLit "default type instance")) tc_name $
tcTyClTyVars tc_name hs_tvs $ \ tvs rhs_kind ->
do { traceTc "tcDefaultAssocDecl" (ppr tc_name)
; checkTc (isTypeFamilyTyCon fam_tc) (wrongKindOfFamily fam_tc)
; let (fam_name, fam_pat_arity, _) = famTyConShape fam_tc
; ASSERT( fam_name == tc_name )
checkTc (length (hsQTvBndrs hs_tvs) == fam_pat_arity)
(wrongNumberOfParmsErr fam_pat_arity)
; rhs_ty <- tcCheckLHsType rhs rhs_kind
; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty
; let fam_tc_tvs = tyConTyVars fam_tc
subst = zipTopTvSubst tvs (mkTyVarTys fam_tc_tvs)
; return ( ASSERT( equalLength fam_tc_tvs tvs )
Just (substTy subst rhs_ty, loc) ) }
-- We check for well-formedness and validity later, in checkValidClass
-------------------------
kcTyFamInstEqn :: FamTyConShape -> LTyFamInstEqn Name -> TcM ()
kcTyFamInstEqn fam_tc_shape
(L loc (TyFamEqn { tfe_pats = pats, tfe_rhs = hs_ty }))
= setSrcSpan loc $
discardResult $
tc_fam_ty_pats fam_tc_shape pats (discardResult . (tcCheckLHsType hs_ty))
tcTyFamInstEqn :: FamTyConShape -> LTyFamInstEqn Name -> TcM CoAxBranch
-- Needs to be here, not in TcInstDcls, because closed families
-- (typechecked here) have TyFamInstEqns
tcTyFamInstEqn fam_tc_shape@(fam_tc_name,_,_)
(L loc (TyFamEqn { tfe_tycon = L _ eqn_tc_name
, tfe_pats = pats
, tfe_rhs = hs_ty }))
= setSrcSpan loc $
tcFamTyPats fam_tc_shape pats (discardResult . (tcCheckLHsType hs_ty)) $
\tvs' pats' res_kind ->
do { checkTc (fam_tc_name == eqn_tc_name)
(wrongTyFamName fam_tc_name eqn_tc_name)
; rhs_ty <- tcCheckLHsType hs_ty res_kind
; rhs_ty <- zonkTcTypeToType emptyZonkEnv rhs_ty
; traceTc "tcTyFamInstEqn" (ppr fam_tc_name <+> ppr tvs')
-- don't print out the pats here, as they might be zonked inside the knot
; return (mkCoAxBranch tvs' pats' rhs_ty loc) }
kcDataDefn :: HsDataDefn Name -> TcKind -> TcM ()
-- Used for 'data instance' only
-- Ordinary 'data' is handled by kcTyClDec
kcDataDefn (HsDataDefn { dd_ctxt = ctxt, dd_cons = cons, dd_kindSig = mb_kind }) res_k
= do { _ <- tcHsContext ctxt
; checkNoErrs $ mapM_ (wrapLocM kcConDecl) cons
-- See Note [Failing early in kcDataDefn]
; kcResultKind mb_kind res_k }
------------------
kcResultKind :: Maybe (LHsKind Name) -> Kind -> TcM ()
kcResultKind Nothing res_k
= checkKind res_k liftedTypeKind
-- type family F a
-- defaults to type family F a :: *
kcResultKind (Just k) res_k
= do { k' <- tcLHsKind k
; checkKind k' res_k }
{-
Kind check type patterns and kind annotate the embedded type variables.
type instance F [a] = rhs
* Here we check that a type instance matches its kind signature, but we do
not check whether there is a pattern for each type index; the latter
check is only required for type synonym instances.
Note [tc_fam_ty_pats vs tcFamTyPats]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tc_fam_ty_pats does the type checking of the patterns, but it doesn't
zonk or generate any desugaring. It is used when kind-checking closed
type families.
tcFamTyPats type checks the patterns, zonks, and then calls thing_inside
to generate a desugaring. It is used during type-checking (not kind-checking).
Note [Type-checking type patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When typechecking the patterns of a family instance declaration, we can't
rely on using the family TyCon, because this is sometimes called
from within a type-checking knot. (Specifically for closed type families.)
The type FamTyConShape gives just enough information to do the job.
The "arity" field of FamTyConShape is the *visible* arity of the family
type constructor, i.e. what the users sees and writes, not including kind
arguments.
See also Note [tc_fam_ty_pats vs tcFamTyPats]
Note [Failing early in kcDataDefn]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to use checkNoErrs when calling kcConDecl. This is because kcConDecl
calls tcConDecl, which checks that the return type of a GADT-like constructor
is actually an instance of the type head. Without the checkNoErrs, potentially
two bad things could happen:
1) Duplicate error messages, because tcConDecl will be called again during
*type* checking (as opposed to kind checking)
2) If we just keep blindly forging forward after both kind checking and type
checking, we can get a panic in rejigConRes. See Trac #8368.
-}
-----------------
type FamTyConShape = (Name, Arity, Kind) -- See Note [Type-checking type patterns]
famTyConShape :: TyCon -> FamTyConShape
famTyConShape fam_tc
= ( tyConName fam_tc
, length (filterOut isKindVar (tyConTyVars fam_tc))
, tyConKind fam_tc )
tc_fam_ty_pats :: FamTyConShape
-> HsWithBndrs Name [LHsType Name] -- Patterns
-> (TcKind -> TcM ()) -- Kind checker for RHS
-- result is ignored
-> TcM ([Kind], [Type], Kind)
-- Check the type patterns of a type or data family instance
-- type instance F <pat1> <pat2> = <type>
-- The 'tyvars' are the free type variables of pats
--
-- NB: The family instance declaration may be an associated one,
-- nested inside an instance decl, thus
-- instance C [a] where
-- type F [a] = ...
-- In that case, the type variable 'a' will *already be in scope*
-- (and, if C is poly-kinded, so will its kind parameter).
tc_fam_ty_pats (name, arity, kind)
(HsWB { hswb_cts = arg_pats, hswb_kvs = kvars, hswb_tvs = tvars })
kind_checker
= do { let (fam_kvs, fam_body) = splitForAllTys kind
-- We wish to check that the pattern has the right number of arguments
-- in checkValidFamPats (in TcValidity), so we can do the check *after*
-- we're done with the knot. But, the splitKindFunTysN below will panic
-- if there are *too many* patterns. So, we do a preliminary check here.
-- Note that we don't have enough information at hand to do a full check,
-- as that requires the full declared arity of the family, which isn't
-- nearby.
; checkTc (length arg_pats == arity) $
wrongNumberOfParmsErr arity
-- Instantiate with meta kind vars
; fam_arg_kinds <- mapM (const newMetaKindVar) fam_kvs
; loc <- getSrcSpanM
; let (arg_kinds, res_kind)
= splitKindFunTysN (length arg_pats) $
substKiWith fam_kvs fam_arg_kinds fam_body
hs_tvs = HsQTvs { hsq_kvs = kvars
, hsq_tvs = userHsTyVarBndrs loc tvars }
-- Kind-check and quantify
-- See Note [Quantifying over family patterns]
; typats <- tcHsTyVarBndrs hs_tvs $ \ _ ->
do { kind_checker res_kind
; tcHsArgTys (quotes (ppr name)) arg_pats arg_kinds }
; return (fam_arg_kinds, typats, res_kind) }
-- See Note [tc_fam_ty_pats vs tcFamTyPats]
tcFamTyPats :: FamTyConShape
-> HsWithBndrs Name [LHsType Name] -- patterns
-> (TcKind -> TcM ()) -- kind-checker for RHS
-> ([TKVar] -- Kind and type variables
-> [TcType] -- Kind and type arguments
-> Kind -> TcM a)
-> TcM a
tcFamTyPats fam_shape@(name,_,_) pats kind_checker thing_inside
= do { (fam_arg_kinds, typats, res_kind)
<- tc_fam_ty_pats fam_shape pats kind_checker
; let all_args = fam_arg_kinds ++ typats
-- Find free variables (after zonking) and turn
-- them into skolems, so that we don't subsequently
-- replace a meta kind var with AnyK
-- Very like kindGeneralize
; qtkvs <- quantifyTyVars emptyVarSet (tyVarsOfTypes all_args)
-- Zonk the patterns etc into the Type world
; (ze, qtkvs') <- zonkTyBndrsX emptyZonkEnv qtkvs
; all_args' <- zonkTcTypeToTypes ze all_args
; res_kind' <- zonkTcTypeToType ze res_kind
; traceTc "tcFamTyPats" (ppr name)
-- don't print out too much, as we might be in the knot
; tcExtendTyVarEnv qtkvs' $
thing_inside qtkvs' all_args' res_kind' }
{-
Note [Quantifying over family patterns]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We need to quantify over two different lots of kind variables:
First, the ones that come from the kinds of the tyvar args of
tcTyVarBndrsKindGen, as usual
data family Dist a
-- Proxy :: forall k. k -> *
data instance Dist (Proxy a) = DP
-- Generates data DistProxy = DP
-- ax8 k (a::k) :: Dist * (Proxy k a) ~ DistProxy k a
-- The 'k' comes from the tcTyVarBndrsKindGen (a::k)
Second, the ones that come from the kind argument of the type family
which we pick up using the (tyVarsOfTypes typats) in the result of
the thing_inside of tcHsTyvarBndrsGen.
-- Any :: forall k. k
data instance Dist Any = DA
-- Generates data DistAny k = DA
-- ax7 k :: Dist k (Any k) ~ DistAny k
-- The 'k' comes from kindGeneralizeKinds (Any k)
Note [Quantified kind variables of a family pattern]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider type family KindFam (p :: k1) (q :: k1)
data T :: Maybe k1 -> k2 -> *
type instance KindFam (a :: Maybe k) b = T a b -> Int
The HsBSig for the family patterns will be ([k], [a])
Then in the family instance we want to
* Bring into scope [ "k" -> k:BOX, "a" -> a:k ]
* Kind-check the RHS
* Quantify the type instance over k and k', as well as a,b, thus
type instance [k, k', a:Maybe k, b:k']
KindFam (Maybe k) k' a b = T k k' a b -> Int
Notice that in the third step we quantify over all the visibly-mentioned
type variables (a,b), but also over the implicitly mentioned kind variables
(k, k'). In this case one is bound explicitly but often there will be
none. The role of the kind signature (a :: Maybe k) is to add a constraint
that 'a' must have that kind, and to bring 'k' into scope.
************************************************************************
* *
Data types
* *
************************************************************************
-}
dataDeclChecks :: Name -> NewOrData -> ThetaType -> [LConDecl Name] -> TcM Bool
dataDeclChecks tc_name new_or_data stupid_theta cons
= do { -- Check that we don't use GADT syntax in H98 world
gadtSyntax_ok <- xoptM Opt_GADTSyntax
; let gadt_syntax = consUseGadtSyntax cons
; checkTc (gadtSyntax_ok || not gadt_syntax) (badGadtDecl tc_name)
-- Check that the stupid theta is empty for a GADT-style declaration
; checkTc (null stupid_theta || not gadt_syntax) (badStupidTheta tc_name)
-- Check that a newtype has exactly one constructor
-- Do this before checking for empty data decls, so that
-- we don't suggest -XEmptyDataDecls for newtypes
; checkTc (new_or_data == DataType || isSingleton cons)
(newtypeConError tc_name (length cons))
-- Check that there's at least one condecl,
-- or else we're reading an hs-boot file, or -XEmptyDataDecls
; empty_data_decls <- xoptM Opt_EmptyDataDecls
; is_boot <- tcIsHsBootOrSig -- Are we compiling an hs-boot file?
; checkTc (not (null cons) || empty_data_decls || is_boot)
(emptyConDeclsErr tc_name)
; return gadt_syntax }
-----------------------------------
consUseGadtSyntax :: [LConDecl a] -> Bool
consUseGadtSyntax (L _ (ConDecl { con_res = ResTyGADT _ _ }) : _) = True
consUseGadtSyntax _ = False
-- All constructors have same shape
-----------------------------------
tcConDecls :: NewOrData -> TyCon -> ([TyVar], Type)
-> [LConDecl Name] -> TcM [DataCon]
tcConDecls new_or_data rep_tycon (tmpl_tvs, res_tmpl) cons
= concatMapM (addLocM $ tcConDecl new_or_data rep_tycon tmpl_tvs res_tmpl)
cons
tcConDecl :: NewOrData
-> TyCon -- Representation tycon
-> [TyVar] -> Type -- Return type template (with its template tyvars)
-- (tvs, T tys), where T is the family TyCon
-> ConDecl Name
-> TcM [DataCon]
tcConDecl new_or_data rep_tycon tmpl_tvs res_tmpl -- Data types
(ConDecl { con_names = names
, con_qvars = hs_tvs, con_cxt = hs_ctxt
, con_details = hs_details, con_res = hs_res_ty })
= addErrCtxt (dataConCtxtName names) $
do { traceTc "tcConDecl 1" (ppr names)
; (ctxt, arg_tys, res_ty, field_lbls, stricts)
<- tcHsTyVarBndrs hs_tvs $ \ _ ->
do { ctxt <- tcHsContext hs_ctxt
; details <- tcConArgs new_or_data hs_details
; res_ty <- tcConRes hs_res_ty
; let (field_lbls, btys) = details
(arg_tys, stricts) = unzip btys
; return (ctxt, arg_tys, res_ty, field_lbls, stricts)
}
-- Generalise the kind variables (returning quantified TcKindVars)
-- and quantify the type variables (substituting their kinds)
-- REMEMBER: 'tkvs' are:
-- ResTyH98: the *existential* type variables only
-- ResTyGADT: *all* the quantified type variables
-- c.f. the comment on con_qvars in HsDecls
; tkvs <- case res_ty of
ResTyH98 -> quantifyTyVars (mkVarSet tmpl_tvs)
(tyVarsOfTypes (ctxt++arg_tys))
ResTyGADT _ res_ty -> quantifyTyVars emptyVarSet
(tyVarsOfTypes (res_ty:ctxt++arg_tys))
-- Zonk to Types
; (ze, qtkvs) <- zonkTyBndrsX emptyZonkEnv tkvs
; arg_tys <- zonkTcTypeToTypes ze arg_tys
; ctxt <- zonkTcTypeToTypes ze ctxt
; res_ty <- case res_ty of
ResTyH98 -> return ResTyH98
ResTyGADT ls ty -> ResTyGADT ls <$> zonkTcTypeToType ze ty
; let (univ_tvs, ex_tvs, eq_preds, res_ty') = rejigConRes tmpl_tvs res_tmpl qtkvs res_ty
; fam_envs <- tcGetFamInstEnvs
; let
buildOneDataCon (L _ name) = do
{ is_infix <- tcConIsInfix name hs_details res_ty
; buildDataCon fam_envs name is_infix
stricts field_lbls
univ_tvs ex_tvs eq_preds ctxt arg_tys
res_ty' rep_tycon
-- NB: we put data_tc, the type constructor gotten from the
-- constructor type signature into the data constructor;
-- that way checkValidDataCon can complain if it's wrong.
}
; mapM buildOneDataCon names
}
tcConIsInfix :: Name
-> HsConDetails (LHsType Name) (Located [LConDeclField Name])
-> ResType Type
-> TcM Bool
tcConIsInfix _ details ResTyH98
= case details of
InfixCon {} -> return True
_ -> return False
tcConIsInfix con details (ResTyGADT _ _)
= case details of
InfixCon {} -> return True
RecCon {} -> return False
PrefixCon arg_tys -- See Note [Infix GADT cons]
| isSymOcc (getOccName con)
, [_ty1,_ty2] <- arg_tys
-> do { fix_env <- getFixityEnv
; return (con `elemNameEnv` fix_env) }
| otherwise -> return False
tcConArgs :: NewOrData -> HsConDeclDetails Name
-> TcM ([Name], [(TcType, HsSrcBang)])
tcConArgs new_or_data (PrefixCon btys)
= do { btys' <- mapM (tcConArg new_or_data) btys
; return ([], btys') }
tcConArgs new_or_data (InfixCon bty1 bty2)
= do { bty1' <- tcConArg new_or_data bty1
; bty2' <- tcConArg new_or_data bty2
; return ([], [bty1', bty2']) }
tcConArgs new_or_data (RecCon fields)
= do { btys' <- mapM (tcConArg new_or_data) btys
; return (field_names, btys') }
where
-- We need a one-to-one mapping from field_names to btys
combined = map (\(L _ f) -> (cd_fld_names f,cd_fld_type f)) (unLoc fields)
explode (ns,ty) = zip (map unLoc ns) (repeat ty)
exploded = concatMap explode combined
(field_names,btys) = unzip exploded
tcConArg :: NewOrData -> LHsType Name -> TcM (TcType, HsSrcBang)
tcConArg new_or_data bty
= do { traceTc "tcConArg 1" (ppr bty)
; arg_ty <- tcHsConArgType new_or_data bty
; traceTc "tcConArg 2" (ppr bty)
; return (arg_ty, getBangStrictness bty) }
tcConRes :: ResType (LHsType Name) -> TcM (ResType Type)
tcConRes ResTyH98 = return ResTyH98
tcConRes (ResTyGADT ls res_ty) = do { res_ty' <- tcHsLiftedType res_ty
; return (ResTyGADT ls res_ty') }
{-
Note [Infix GADT constructors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We do not currently have syntax to declare an infix constructor in GADT syntax,
but it makes a (small) difference to the Show instance. So as a slightly
ad-hoc solution, we regard a GADT data constructor as infix if
a) it is an operator symbol
b) it has two arguments
c) there is a fixity declaration for it
For example:
infix 6 (:--:)
data T a where
(:--:) :: t1 -> t2 -> T Int
Note [Checking GADT return types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There is a delicacy around checking the return types of a datacon. The
central problem is dealing with a declaration like
data T a where
MkT :: a -> Q a
Note that the return type of MkT is totally bogus. When creating the T
tycon, we also need to create the MkT datacon, which must have a "rejigged"
return type. That is, the MkT datacon's type must be transformed to have
a uniform return type with explicit coercions for GADT-like type parameters.
This rejigging is what rejigConRes does. The problem is, though, that checking
that the return type is appropriate is much easier when done over *Type*,
not *HsType*.
So, we want to make rejigConRes lazy and then check the validity of the return
type in checkValidDataCon. But, if the return type is bogus, rejigConRes can't
work -- it will have a failed pattern match. Luckily, if we run
checkValidDataCon before ever looking at the rejigged return type
(checkValidDataCon checks the dataConUserType, which is not rejigged!), we
catch the error before forcing the rejigged type and panicking.
-}
-- Example
-- data instance T (b,c) where
-- TI :: forall e. e -> T (e,e)
--
-- The representation tycon looks like this:
-- data :R7T b c where
-- TI :: forall b1 c1. (b1 ~ c1) => b1 -> :R7T b1 c1
-- In this case orig_res_ty = T (e,e)
rejigConRes :: [TyVar] -> Type -- Template for result type; e.g.
-- data instance T [a] b c = ...
-- gives template ([a,b,c], T [a] b c)
-> [TyVar] -- where MkT :: forall x y z. ...
-> ResType Type
-> ([TyVar], -- Universal
[TyVar], -- Existential (distinct OccNames from univs)
[(TyVar,Type)], -- Equality predicates
Type) -- Typechecked return type
-- We don't check that the TyCon given in the ResTy is
-- the same as the parent tycon, because checkValidDataCon will do it
rejigConRes tmpl_tvs res_ty dc_tvs ResTyH98
= (tmpl_tvs, dc_tvs, [], res_ty)
-- In H98 syntax the dc_tvs are the existential ones
-- data T a b c = forall d e. MkT ...
-- The {a,b,c} are tc_tvs, and {d,e} are dc_tvs
rejigConRes tmpl_tvs res_tmpl dc_tvs (ResTyGADT _ res_ty)
-- E.g. data T [a] b c where
-- MkT :: forall x y z. T [(x,y)] z z
-- Then we generate
-- Univ tyvars Eq-spec
-- a a~(x,y)
-- b b~z
-- z
-- Existentials are the leftover type vars: [x,y]
-- So we return ([a,b,z], [x,y], [a~(x,y),b~z], T [(x,y)] z z)
= (univ_tvs, ex_tvs, eq_spec, res_ty)
where
Just subst = tcMatchTy (mkVarSet tmpl_tvs) res_tmpl res_ty
-- This 'Just' pattern is sure to match, because if not
-- checkValidDataCon will complain first.
-- See Note [Checking GADT return types]
-- /Lazily/ figure out the univ_tvs etc
-- Each univ_tv is either a dc_tv or a tmpl_tv
(univ_tvs, eq_spec) = foldr choose ([], []) tmpl_tvs
choose tmpl (univs, eqs)
| Just ty <- lookupTyVar subst tmpl
= case tcGetTyVar_maybe ty of
Just tv | not (tv `elem` univs)
-> (tv:univs, eqs)
_other -> (new_tmpl:univs, (new_tmpl,ty):eqs)
where -- see Note [Substitution in template variables kinds]
new_tmpl = updateTyVarKind (substTy subst) tmpl
| otherwise = pprPanic "tcResultType" (ppr res_ty)
ex_tvs = dc_tvs `minusList` univ_tvs
{-
Note [Substitution in template variables kinds]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
data List a = Nil | Cons a (List a)
data SList s as where
SNil :: SList s Nil
We call tcResultType with
tmpl_tvs = [(k :: BOX), (s :: k -> *), (as :: List k)]
res_tmpl = SList k s as
res_ty = ResTyGADT (SList k1 (s1 :: k1 -> *) (Nil k1))
We get subst:
k -> k1
s -> s1
as -> Nil k1
Now we want to find out the universal variables and the equivalences
between some of them and types (GADT).
In this example, k and s are mapped to exactly variables which are not
already present in the universal set, so we just add them without any
coercion.
But 'as' is mapped to 'Nil k1', so we add 'as' to the universal set,
and add the equivalence with 'Nil k1' in 'eqs'.
The problem is that with kind polymorphism, as's kind may now contain
kind variables, and we have to apply the template substitution to it,
which is why we create new_tmpl.
The template substitution only maps kind variables to kind variables,
since GADTs are not kind indexed.
************************************************************************
* *
Validity checking
* *
************************************************************************
Validity checking is done once the mutually-recursive knot has been
tied, so we can look at things freely.
-}
checkClassCycleErrs :: Class -> TcM ()
checkClassCycleErrs cls = mapM_ recClsErr (calcClassCycles cls)
checkValidTyCl :: TyThing -> TcM ()
checkValidTyCl thing
= setSrcSpan (getSrcSpan thing) $
addTyThingCtxt thing $
case thing of
ATyCon tc -> checkValidTyCon tc
AnId _ -> return () -- Generic default methods are checked
-- with their parent class
ACoAxiom _ -> return () -- Axioms checked with their parent
-- closed family tycon
_ -> pprTrace "checkValidTyCl" (ppr thing) $ return ()
-------------------------
-- For data types declared with record syntax, we require
-- that each constructor that has a field 'f'
-- (a) has the same result type
-- (b) has the same type for 'f'
-- modulo alpha conversion of the quantified type variables
-- of the constructor.
--
-- Note that we allow existentials to match because the
-- fields can never meet. E.g
-- data T where
-- T1 { f1 :: b, f2 :: a, f3 ::Int } :: T
-- T2 { f1 :: c, f2 :: c, f3 ::Int } :: T
-- Here we do not complain about f1,f2 because they are existential
checkValidTyCon :: TyCon -> TcM ()
checkValidTyCon tc
| Just cl <- tyConClass_maybe tc
= checkValidClass cl
| Just syn_rhs <- synTyConRhs_maybe tc
= checkValidType syn_ctxt syn_rhs
| Just fam_flav <- famTyConFlav_maybe tc
= case fam_flav of
{ ClosedSynFamilyTyCon ax -> checkValidClosedCoAxiom ax
; AbstractClosedSynFamilyTyCon ->
do { hsBoot <- tcIsHsBootOrSig
; checkTc hsBoot $
ptext (sLit "You may omit the equations in a closed type family") $$
ptext (sLit "only in a .hs-boot file") }
; OpenSynFamilyTyCon -> return ()
; BuiltInSynFamTyCon _ -> return () }
| otherwise
= do { -- Check the context on the data decl
traceTc "cvtc1" (ppr tc)
; checkValidTheta (DataTyCtxt name) (tyConStupidTheta tc)
; traceTc "cvtc2" (ppr tc)
; dflags <- getDynFlags
; existential_ok <- xoptM Opt_ExistentialQuantification
; gadt_ok <- xoptM Opt_GADTs
; let ex_ok = existential_ok || gadt_ok -- Data cons can have existential context
; mapM_ (checkValidDataCon dflags ex_ok tc) data_cons
-- Check that fields with the same name share a type
; mapM_ check_fields groups }
where
syn_ctxt = TySynCtxt name
name = tyConName tc
data_cons = tyConDataCons tc
groups = equivClasses cmp_fld (concatMap get_fields data_cons)
cmp_fld (f1,_) (f2,_) = f1 `compare` f2
get_fields con = dataConFieldLabels con `zip` repeat con
-- dataConFieldLabels may return the empty list, which is fine
-- See Note [GADT record selectors] in MkId.lhs
-- We must check (a) that the named field has the same
-- type in each constructor
-- (b) that those constructors have the same result type
--
-- However, the constructors may have differently named type variable
-- and (worse) we don't know how the correspond to each other. E.g.
-- C1 :: forall a b. { f :: a, g :: b } -> T a b
-- C2 :: forall d c. { f :: c, g :: c } -> T c d
--
-- So what we do is to ust Unify.tcMatchTys to compare the first candidate's
-- result type against other candidates' types BOTH WAYS ROUND.
-- If they magically agrees, take the substitution and
-- apply them to the latter ones, and see if they match perfectly.
check_fields ((label, con1) : other_fields)
-- These fields all have the same name, but are from
-- different constructors in the data type
= recoverM (return ()) $ mapM_ checkOne other_fields
-- Check that all the fields in the group have the same type
-- NB: this check assumes that all the constructors of a given
-- data type use the same type variables
where
(tvs1, _, _, res1) = dataConSig con1
ts1 = mkVarSet tvs1
fty1 = dataConFieldType con1 label
checkOne (_, con2) -- Do it bothways to ensure they are structurally identical
= do { checkFieldCompat label con1 con2 ts1 res1 res2 fty1 fty2
; checkFieldCompat label con2 con1 ts2 res2 res1 fty2 fty1 }
where
(tvs2, _, _, res2) = dataConSig con2
ts2 = mkVarSet tvs2
fty2 = dataConFieldType con2 label
check_fields [] = panic "checkValidTyCon/check_fields []"
checkValidClosedCoAxiom :: CoAxiom Branched -> TcM ()
checkValidClosedCoAxiom (CoAxiom { co_ax_branches = branches, co_ax_tc = tc })
= tcAddClosedTypeFamilyDeclCtxt tc $
do { brListFoldlM_ check_accessibility [] branches
; void $ brListMapM (checkValidTyFamInst Nothing tc) branches }
where
check_accessibility :: [CoAxBranch] -- prev branches (in reverse order)
-> CoAxBranch -- cur branch
-> TcM [CoAxBranch] -- cur : prev
-- Check whether the branch is dominated by earlier
-- ones and hence is inaccessible
check_accessibility prev_branches cur_branch
= do { when (cur_branch `isDominatedBy` prev_branches) $
addWarnAt (coAxBranchSpan cur_branch) $
inaccessibleCoAxBranch tc cur_branch
; return (cur_branch : prev_branches) }
checkFieldCompat :: Name -> DataCon -> DataCon -> TyVarSet
-> Type -> Type -> Type -> Type -> TcM ()
checkFieldCompat fld con1 con2 tvs1 res1 res2 fty1 fty2
= do { checkTc (isJust mb_subst1) (resultTypeMisMatch fld con1 con2)
; checkTc (isJust mb_subst2) (fieldTypeMisMatch fld con1 con2) }
where
mb_subst1 = tcMatchTy tvs1 res1 res2
mb_subst2 = tcMatchTyX tvs1 (expectJust "checkFieldCompat" mb_subst1) fty1 fty2
-------------------------------
checkValidDataCon :: DynFlags -> Bool -> TyCon -> DataCon -> TcM ()
checkValidDataCon dflags existential_ok tc con
= setSrcSpan (srcLocSpan (getSrcLoc con)) $
addErrCtxt (dataConCtxt con) $
do { -- Check that the return type of the data constructor
-- matches the type constructor; eg reject this:
-- data T a where { MkT :: Bogus a }
-- c.f. Note [Check role annotations in a second pass]
-- and Note [Checking GADT return types]
let tc_tvs = tyConTyVars tc
res_ty_tmpl = mkFamilyTyConApp tc (mkTyVarTys tc_tvs)
orig_res_ty = dataConOrigResTy con
; traceTc "checkValidDataCon" (vcat
[ ppr con, ppr tc, ppr tc_tvs
, ppr res_ty_tmpl <+> dcolon <+> ppr (typeKind res_ty_tmpl)
, ppr orig_res_ty <+> dcolon <+> ppr (typeKind orig_res_ty)])
; checkTc (isJust (tcMatchTy (mkVarSet tc_tvs)
res_ty_tmpl
orig_res_ty))
(badDataConTyCon con res_ty_tmpl orig_res_ty)
-- Check that the result type is a *monotype*
-- e.g. reject this: MkT :: T (forall a. a->a)
-- Reason: it's really the argument of an equality constraint
; checkValidMonoType orig_res_ty
-- Check all argument types for validity
; checkValidType ctxt (dataConUserType con)
-- Extra checks for newtype data constructors
; when (isNewTyCon tc) (checkNewDataCon con)
-- Check that UNPACK pragmas and bangs work out
-- E.g. reject data T = MkT {-# UNPACK #-} Int -- No "!"
-- data T = MkT {-# UNPACK #-} !a -- Can't unpack
; mapM_ check_bang (zip3 (dataConSrcBangs con) (dataConImplBangs con) [1..])
-- Check that existentials are allowed if they are used
; checkTc (existential_ok || isVanillaDataCon con)
(badExistential con)
-- Check that we aren't doing GADT type refinement on kind variables
-- e.g reject data T (a::k) where
-- T1 :: T Int
-- T2 :: T Maybe
; checkTc (not (any (isKindVar . fst) (dataConEqSpec con)))
(badGadtKindCon con)
; traceTc "Done validity of data con" (ppr con <+> ppr (dataConRepType con))
}
where
ctxt = ConArgCtxt (dataConName con)
check_bang (HsSrcBang _ (Just want_unpack) has_bang, rep_bang, n)
| want_unpack, not has_bang
= addWarnTc (bad_bang n (ptext (sLit "UNPACK pragma lacks '!'")))
| want_unpack
, case rep_bang of { HsUnpack {} -> False; _ -> True }
, not (gopt Opt_OmitInterfacePragmas dflags)
-- If not optimising, se don't unpack, so don't complain!
-- See MkId.dataConArgRep, the (HsBang True) case
= addWarnTc (bad_bang n (ptext (sLit "Ignoring unusable UNPACK pragma")))
check_bang _
= return ()
bad_bang n herald
= hang herald 2 (ptext (sLit "on the") <+> speakNth n
<+> ptext (sLit "argument of") <+> quotes (ppr con))
-------------------------------
checkNewDataCon :: DataCon -> TcM ()
-- Further checks for the data constructor of a newtype
checkNewDataCon con
= do { checkTc (isSingleton arg_tys) (newtypeFieldErr con (length arg_tys))
-- One argument
; check_con (null eq_spec) $
ptext (sLit "A newtype constructor must have a return type of form T a1 ... an")
-- Return type is (T a b c)
; check_con (null theta) $
ptext (sLit "A newtype constructor cannot have a context in its type")
; check_con (null ex_tvs) $
ptext (sLit "A newtype constructor cannot have existential type variables")
-- No existentials
; checkTc (not (any isBanged (dataConSrcBangs con)))
(newtypeStrictError con)
-- No strictness
}
where
(_univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty) = dataConFullSig con
check_con what msg
= checkTc what (msg $$ ppr con <+> dcolon <+> ppr (dataConUserType con))
-------------------------------
checkValidClass :: Class -> TcM ()
checkValidClass cls
= do { constrained_class_methods <- xoptM Opt_ConstrainedClassMethods
; multi_param_type_classes <- xoptM Opt_MultiParamTypeClasses
; nullary_type_classes <- xoptM Opt_NullaryTypeClasses
; fundep_classes <- xoptM Opt_FunctionalDependencies
-- Check that the class is unary, unless multiparameter type classes
-- are enabled; also recognize deprecated nullary type classes
-- extension (subsumed by multiparameter type classes, Trac #8993)
; checkTc (multi_param_type_classes || cls_arity == 1 ||
(nullary_type_classes && cls_arity == 0))
(classArityErr cls_arity cls)
; checkTc (fundep_classes || null fundeps) (classFunDepsErr cls)
-- Check the super-classes
; checkValidTheta (ClassSCCtxt (className cls)) theta
-- Now check for cyclic superclasses
-- If there are superclass cycles, checkClassCycleErrs bails.
; checkClassCycleErrs cls
-- Check the class operations.
-- But only if there have been no earlier errors
-- See Note [Abort when superclass cycle is detected]
; whenNoErrs $
mapM_ (check_op constrained_class_methods) op_stuff
-- Check the associated type defaults are well-formed and instantiated
; mapM_ check_at_defs at_stuff }
where
(tyvars, fundeps, theta, _, at_stuff, op_stuff) = classExtraBigSig cls
cls_arity = count isTypeVar tyvars -- Ignore kind variables
cls_tv_set = mkVarSet tyvars
mini_env = zipVarEnv tyvars (mkTyVarTys tyvars)
check_op constrained_class_methods (sel_id, dm)
= addErrCtxt (classOpCtxt sel_id tau) $ do
{ checkValidTheta ctxt (tail theta)
-- The 'tail' removes the initial (C a) from the
-- class itself, leaving just the method type
; traceTc "class op type" (ppr op_ty <+> ppr tau)
; checkValidType ctxt tau
-- Check that the method type mentions a class variable
-- But actually check that the variables *reachable from*
-- the method type include a class variable.
-- Example: tc223
-- class Error e => Game b mv e | b -> mv e where
-- newBoard :: MonadState b m => m ()
-- Here, MonadState has a fundep m->b, so newBoard is fine
; check_mentions (growThetaTyVars theta (tyVarsOfType tau))
(ptext (sLit "class method") <+> quotes (ppr sel_id))
; case dm of
GenDefMeth dm_name -> do { dm_id <- tcLookupId dm_name
; checkValidType (FunSigCtxt op_name) (idType dm_id) }
_ -> return ()
}
where
ctxt = FunSigCtxt op_name
op_name = idName sel_id
op_ty = idType sel_id
(_,theta1,tau1) = tcSplitSigmaTy op_ty
(_,theta2,tau2) = tcSplitSigmaTy tau1
(theta,tau) | constrained_class_methods = (theta1 ++ theta2, tau2)
| otherwise = (theta1, mkPhiTy (tail theta1) tau1)
-- Ugh! The function might have a type like
-- op :: forall a. C a => forall b. (Eq b, Eq a) => tau2
-- With -XConstrainedClassMethods, we want to allow this, even though the inner
-- forall has an (Eq a) constraint. Whereas in general, each constraint
-- in the context of a for-all must mention at least one quantified
-- type variable. What a mess!
check_at_defs (ATI fam_tc m_dflt_rhs)
= do { check_mentions (mkVarSet fam_tvs) $
ptext (sLit "associated type") <+> quotes (ppr fam_tc)
; whenIsJust m_dflt_rhs $ \ (rhs, loc) ->
checkValidTyFamEqn (Just (cls, mini_env)) fam_tc
fam_tvs (mkTyVarTys fam_tvs) rhs loc }
where
fam_tvs = tyConTyVars fam_tc
check_mentions :: TyVarSet -> SDoc -> TcM ()
-- Check that the thing (method or associated type) mentions at least
-- one of the class type variables
-- The check is disabled for nullary type classes,
-- since there is no possible ambiguity (Trac #10020)
check_mentions thing_tvs thing_doc
= checkTc (cls_arity == 0 || thing_tvs `intersectsVarSet` cls_tv_set)
(noClassTyVarErr cls thing_doc)
checkFamFlag :: Name -> TcM ()
-- Check that we don't use families without -XTypeFamilies
-- The parser won't even parse them, but I suppose a GHC API
-- client might have a go!
checkFamFlag tc_name
= do { idx_tys <- xoptM Opt_TypeFamilies
; checkTc idx_tys err_msg }
where
err_msg = hang (ptext (sLit "Illegal family declaration for") <+> quotes (ppr tc_name))
2 (ptext (sLit "Use TypeFamilies to allow indexed type families"))
{-
Note [Abort when superclass cycle is detected]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must avoid doing the ambiguity check for the methods (in
checkValidClass.check_op) when there are already errors accumulated.
This is because one of the errors may be a superclass cycle, and
superclass cycles cause canonicalization to loop. Here is a
representative example:
class D a => C a where
meth :: D a => ()
class C a => D a
This fixes Trac #9415, #9739
************************************************************************
* *
Checking role validity
* *
************************************************************************
-}
checkValidRoleAnnots :: RoleAnnots -> TyThing -> TcM ()
checkValidRoleAnnots role_annots thing
= case thing of
{ ATyCon tc
| isTypeSynonymTyCon tc -> check_no_roles
| isFamilyTyCon tc -> check_no_roles
| isAlgTyCon tc -> check_roles
where
name = tyConName tc
-- Role annotations are given only on *type* variables, but a tycon stores
-- roles for all variables. So, we drop the kind roles (which are all
-- Nominal, anyway).
tyvars = tyConTyVars tc
roles = tyConRoles tc
(kind_vars, type_vars) = span isKindVar tyvars
type_roles = dropList kind_vars roles
role_annot_decl_maybe = lookupRoleAnnots role_annots name
check_roles
= whenIsJust role_annot_decl_maybe $
\decl@(L loc (RoleAnnotDecl _ the_role_annots)) ->
addRoleAnnotCtxt name $
setSrcSpan loc $ do
{ role_annots_ok <- xoptM Opt_RoleAnnotations
; checkTc role_annots_ok $ needXRoleAnnotations tc
; checkTc (type_vars `equalLength` the_role_annots)
(wrongNumberOfRoles type_vars decl)
; _ <- zipWith3M checkRoleAnnot type_vars the_role_annots type_roles
-- Representational or phantom roles for class parameters
-- quickly lead to incoherence. So, we require
-- IncoherentInstances to have them. See #8773.
; incoherent_roles_ok <- xoptM Opt_IncoherentInstances
; checkTc ( incoherent_roles_ok
|| (not $ isClassTyCon tc)
|| (all (== Nominal) type_roles))
incoherentRoles
; lint <- goptM Opt_DoCoreLinting
; when lint $ checkValidRoles tc }
check_no_roles
= whenIsJust role_annot_decl_maybe illegalRoleAnnotDecl
; _ -> return () }
checkRoleAnnot :: TyVar -> Located (Maybe Role) -> Role -> TcM ()
checkRoleAnnot _ (L _ Nothing) _ = return ()
checkRoleAnnot tv (L _ (Just r1)) r2
= when (r1 /= r2) $
addErrTc $ badRoleAnnot (tyVarName tv) r1 r2
-- This is a double-check on the role inference algorithm. It is only run when
-- -dcore-lint is enabled. See Note [Role inference] in TcTyDecls
checkValidRoles :: TyCon -> TcM ()
-- If you edit this function, you may need to update the GHC formalism
-- See Note [GHC Formalism] in CoreLint
checkValidRoles tc
| isAlgTyCon tc
-- tyConDataCons returns an empty list for data families
= mapM_ check_dc_roles (tyConDataCons tc)
| Just rhs <- synTyConRhs_maybe tc
= check_ty_roles (zipVarEnv (tyConTyVars tc) (tyConRoles tc)) Representational rhs
| otherwise
= return ()
where
check_dc_roles datacon
= do { traceTc "check_dc_roles" (ppr datacon <+> ppr (tyConRoles tc))
; mapM_ (check_ty_roles role_env Representational) $
eqSpecPreds eq_spec ++ theta ++ arg_tys }
-- See Note [Role-checking data constructor arguments] in TcTyDecls
where
(univ_tvs, ex_tvs, eq_spec, theta, arg_tys, _res_ty) = dataConFullSig datacon
univ_roles = zipVarEnv univ_tvs (tyConRoles tc)
-- zipVarEnv uses zipEqual, but we don't want that for ex_tvs
ex_roles = mkVarEnv (zip ex_tvs (repeat Nominal))
role_env = univ_roles `plusVarEnv` ex_roles
check_ty_roles env role (TyVarTy tv)
= case lookupVarEnv env tv of
Just role' -> unless (role' `ltRole` role || role' == role) $
report_error $ ptext (sLit "type variable") <+> quotes (ppr tv) <+>
ptext (sLit "cannot have role") <+> ppr role <+>
ptext (sLit "because it was assigned role") <+> ppr role'
Nothing -> report_error $ ptext (sLit "type variable") <+> quotes (ppr tv) <+>
ptext (sLit "missing in environment")
check_ty_roles env Representational (TyConApp tc tys)
= let roles' = tyConRoles tc in
zipWithM_ (maybe_check_ty_roles env) roles' tys
check_ty_roles env Nominal (TyConApp _ tys)
= mapM_ (check_ty_roles env Nominal) tys
check_ty_roles _ Phantom ty@(TyConApp {})
= pprPanic "check_ty_roles" (ppr ty)
check_ty_roles env role (AppTy ty1 ty2)
= check_ty_roles env role ty1
>> check_ty_roles env Nominal ty2
check_ty_roles env role (FunTy ty1 ty2)
= check_ty_roles env role ty1
>> check_ty_roles env role ty2
check_ty_roles env role (ForAllTy tv ty)
= check_ty_roles (extendVarEnv env tv Nominal) role ty
check_ty_roles _ _ (LitTy {}) = return ()
maybe_check_ty_roles env role ty
= when (role == Nominal || role == Representational) $
check_ty_roles env role ty
report_error doc
= addErrTc $ vcat [ptext (sLit "Internal error in role inference:"),
doc,
ptext (sLit "Please report this as a GHC bug: http://www.haskell.org/ghc/reportabug")]
{-
************************************************************************
* *
Building record selectors
* *
************************************************************************
-}
mkDefaultMethodIds :: [TyThing] -> [Id]
-- See Note [Default method Ids and Template Haskell]
mkDefaultMethodIds things
= [ mkExportedLocalId VanillaId dm_name (idType sel_id)
| ATyCon tc <- things
, Just cls <- [tyConClass_maybe tc]
, (sel_id, DefMeth dm_name) <- classOpItems cls ]
{-
Note [Default method Ids and Template Haskell]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this (Trac #4169):
class Numeric a where
fromIntegerNum :: a
fromIntegerNum = ...
ast :: Q [Dec]
ast = [d| instance Numeric Int |]
When we typecheck 'ast' we have done the first pass over the class decl
(in tcTyClDecls), but we have not yet typechecked the default-method
declarations (because they can mention value declarations). So we
must bring the default method Ids into scope first (so they can be seen
when typechecking the [d| .. |] quote, and typecheck them later.
-}
mkRecSelBinds :: [TyThing] -> HsValBinds Name
-- NB We produce *un-typechecked* bindings, rather like 'deriving'
-- This makes life easier, because the later type checking will add
-- all necessary type abstractions and applications
mkRecSelBinds tycons
= ValBindsOut [(NonRecursive, b) | b <- binds] sigs
where
(sigs, binds) = unzip rec_sels
rec_sels = map mkRecSelBind [ (tc,fld)
| ATyCon tc <- tycons
, fld <- tyConFields tc ]
mkRecSelBind :: (TyCon, FieldLabel) -> (LSig Name, LHsBinds Name)
mkRecSelBind (tycon, sel_name)
= (L loc (IdSig sel_id), unitBag (L loc sel_bind))
where
loc = getSrcSpan sel_name
sel_id = mkExportedLocalId rec_details sel_name sel_ty
rec_details = RecSelId { sel_tycon = tycon, sel_naughty = is_naughty }
-- Find a representative constructor, con1
all_cons = tyConDataCons tycon
cons_w_field = [ con | con <- all_cons
, sel_name `elem` dataConFieldLabels con ]
con1 = ASSERT( not (null cons_w_field) ) head cons_w_field
-- Selector type; Note [Polymorphic selectors]
field_ty = dataConFieldType con1 sel_name
data_ty = dataConOrigResTy con1
data_tvs = tyVarsOfType data_ty
is_naughty = not (tyVarsOfType field_ty `subVarSet` data_tvs)
(field_tvs, field_theta, field_tau) = tcSplitSigmaTy field_ty
sel_ty | is_naughty = unitTy -- See Note [Naughty record selectors]
| otherwise = mkForAllTys (varSetElemsKvsFirst $
data_tvs `extendVarSetList` field_tvs) $
mkPhiTy (dataConStupidTheta con1) $ -- Urgh!
mkPhiTy field_theta $ -- Urgh!
mkFunTy data_ty field_tau
-- Make the binding: sel (C2 { fld = x }) = x
-- sel (C7 { fld = x }) = x
-- where cons_w_field = [C2,C7]
sel_bind = mkTopFunBind Generated sel_lname alts
where
alts | is_naughty = [mkSimpleMatch [] unit_rhs]
| otherwise = map mk_match cons_w_field ++ deflt
mk_match con = mkSimpleMatch [L loc (mk_sel_pat con)]
(L loc (HsVar field_var))
mk_sel_pat con = ConPatIn (L loc (getName con)) (RecCon rec_fields)
rec_fields = HsRecFields { rec_flds = [rec_field], rec_dotdot = Nothing }
rec_field = noLoc (HsRecField { hsRecFieldId = sel_lname
, hsRecFieldArg = L loc (VarPat field_var)
, hsRecPun = False })
sel_lname = L loc sel_name
field_var = mkInternalName (mkBuiltinUnique 1) (getOccName sel_name) loc
-- Add catch-all default case unless the case is exhaustive
-- We do this explicitly so that we get a nice error message that
-- mentions this particular record selector
deflt | all dealt_with all_cons = []
| otherwise = [mkSimpleMatch [L loc (WildPat placeHolderType)]
(mkHsApp (L loc (HsVar (getName rEC_SEL_ERROR_ID)))
(L loc (HsLit msg_lit)))]
-- Do not add a default case unless there are unmatched
-- constructors. We must take account of GADTs, else we
-- get overlap warning messages from the pattern-match checker
-- NB: we need to pass type args for the *representation* TyCon
-- to dataConCannotMatch, hence the calculation of inst_tys
-- This matters in data families
-- data instance T Int a where
-- A :: { fld :: Int } -> T Int Bool
-- B :: { fld :: Int } -> T Int Char
dealt_with con = con `elem` cons_w_field || dataConCannotMatch inst_tys con
inst_tys = substTyVars (mkTopTvSubst (dataConEqSpec con1)) (dataConUnivTyVars con1)
unit_rhs = mkLHsTupleExpr []
msg_lit = HsStringPrim "" $ unsafeMkByteString $
occNameString (getOccName sel_name)
---------------
tyConFields :: TyCon -> [FieldLabel]
tyConFields tc
| isAlgTyCon tc = nub (concatMap dataConFieldLabels (tyConDataCons tc))
| otherwise = []
{-
Note [Polymorphic selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When a record has a polymorphic field, we pull the foralls out to the front.
data T = MkT { f :: forall a. [a] -> a }
Then f :: forall a. T -> [a] -> a
NOT f :: T -> forall a. [a] -> a
This is horrid. It's only needed in deeply obscure cases, which I hate.
The only case I know is test tc163, which is worth looking at. It's far
from clear that this test should succeed at all!
Note [Naughty record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
A "naughty" field is one for which we can't define a record
selector, because an existential type variable would escape. For example:
data T = forall a. MkT { x,y::a }
We obviously can't define
x (MkT v _) = v
Nevertheless we *do* put a RecSelId into the type environment
so that if the user tries to use 'x' as a selector we can bleat
helpfully, rather than saying unhelpfully that 'x' is not in scope.
Hence the sel_naughty flag, to identify record selectors that don't really exist.
In general, a field is "naughty" if its type mentions a type variable that
isn't in the result type of the constructor. Note that this *allows*
GADT record selectors (Note [GADT record selectors]) whose types may look
like sel :: T [a] -> a
For naughty selectors we make a dummy binding
sel = ()
for naughty selectors, so that the later type-check will add them to the
environment, and they'll be exported. The function is never called, because
the tyepchecker spots the sel_naughty field.
Note [GADT record selectors]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For GADTs, we require that all constructors with a common field 'f' have the same
result type (modulo alpha conversion). [Checked in TcTyClsDecls.checkValidTyCon]
E.g.
data T where
T1 { f :: Maybe a } :: T [a]
T2 { f :: Maybe a, y :: b } :: T [a]
T3 :: T Int
and now the selector takes that result type as its argument:
f :: forall a. T [a] -> Maybe a
Details: the "real" types of T1,T2 are:
T1 :: forall r a. (r~[a]) => a -> T r
T2 :: forall r a b. (r~[a]) => a -> b -> T r
So the selector loooks like this:
f :: forall a. T [a] -> Maybe a
f (a:*) (t:T [a])
= case t of
T1 c (g:[a]~[c]) (v:Maybe c) -> v `cast` Maybe (right (sym g))
T2 c d (g:[a]~[c]) (v:Maybe c) (w:d) -> v `cast` Maybe (right (sym g))
T3 -> error "T3 does not have field f"
Note the forall'd tyvars of the selector are just the free tyvars
of the result type; there may be other tyvars in the constructor's
type (e.g. 'b' in T2).
Note the need for casts in the result!
Note [Selector running example]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It's OK to combine GADTs and type families. Here's a running example:
data instance T [a] where
T1 { fld :: b } :: T [Maybe b]
The representation type looks like this
data :R7T a where
T1 { fld :: b } :: :R7T (Maybe b)
and there's coercion from the family type to the representation type
:CoR7T a :: T [a] ~ :R7T a
The selector we want for fld looks like this:
fld :: forall b. T [Maybe b] -> b
fld = /\b. \(d::T [Maybe b]).
case d `cast` :CoR7T (Maybe b) of
T1 (x::b) -> x
The scrutinee of the case has type :R7T (Maybe b), which can be
gotten by appying the eq_spec to the univ_tvs of the data con.
************************************************************************
* *
Error messages
* *
************************************************************************
-}
tcAddTyFamInstCtxt :: TyFamInstDecl Name -> TcM a -> TcM a
tcAddTyFamInstCtxt decl
= tcAddFamInstCtxt (ptext (sLit "type instance")) (tyFamInstDeclName decl)
tcAddDataFamInstCtxt :: DataFamInstDecl Name -> TcM a -> TcM a
tcAddDataFamInstCtxt decl
= tcAddFamInstCtxt (pprDataFamInstFlavour decl <+> ptext (sLit "instance"))
(unLoc (dfid_tycon decl))
tcAddFamInstCtxt :: SDoc -> Name -> TcM a -> TcM a
tcAddFamInstCtxt flavour tycon thing_inside
= addErrCtxt ctxt thing_inside
where
ctxt = hsep [ptext (sLit "In the") <+> flavour
<+> ptext (sLit "declaration for"),
quotes (ppr tycon)]
tcAddClosedTypeFamilyDeclCtxt :: TyCon -> TcM a -> TcM a
tcAddClosedTypeFamilyDeclCtxt tc
= addErrCtxt ctxt
where
ctxt = ptext (sLit "In the equations for closed type family") <+>
quotes (ppr tc)
resultTypeMisMatch :: Name -> DataCon -> DataCon -> SDoc
resultTypeMisMatch field_name con1 con2
= vcat [sep [ptext (sLit "Constructors") <+> ppr con1 <+> ptext (sLit "and") <+> ppr con2,
ptext (sLit "have a common field") <+> quotes (ppr field_name) <> comma],
nest 2 $ ptext (sLit "but have different result types")]
fieldTypeMisMatch :: Name -> DataCon -> DataCon -> SDoc
fieldTypeMisMatch field_name con1 con2
= sep [ptext (sLit "Constructors") <+> ppr con1 <+> ptext (sLit "and") <+> ppr con2,
ptext (sLit "give different types for field"), quotes (ppr field_name)]
dataConCtxtName :: [Located Name] -> SDoc
dataConCtxtName [con]
= ptext (sLit "In the definition of data constructor") <+> quotes (ppr con)
dataConCtxtName con
= ptext (sLit "In the definition of data constructors") <+> interpp'SP con
dataConCtxt :: Outputable a => a -> SDoc
dataConCtxt con = ptext (sLit "In the definition of data constructor") <+> quotes (ppr con)
classOpCtxt :: Var -> Type -> SDoc
classOpCtxt sel_id tau = sep [ptext (sLit "When checking the class method:"),
nest 2 (pprPrefixOcc sel_id <+> dcolon <+> ppr tau)]
classArityErr :: Int -> Class -> SDoc
classArityErr n cls
| n == 0 = mkErr "No" "no-parameter"
| otherwise = mkErr "Too many" "multi-parameter"
where
mkErr howMany allowWhat =
vcat [ptext (sLit $ howMany ++ " parameters for class") <+> quotes (ppr cls),
parens (ptext (sLit $ "Use MultiParamTypeClasses to allow "
++ allowWhat ++ " classes"))]
classFunDepsErr :: Class -> SDoc
classFunDepsErr cls
= vcat [ptext (sLit "Fundeps in class") <+> quotes (ppr cls),
parens (ptext (sLit "Use FunctionalDependencies to allow fundeps"))]
noClassTyVarErr :: Class -> SDoc -> SDoc
noClassTyVarErr clas what
= sep [ptext (sLit "The") <+> what,
ptext (sLit "mentions none of the type or kind variables of the class") <+>
quotes (ppr clas <+> hsep (map ppr (classTyVars clas)))]
recSynErr :: [LTyClDecl Name] -> TcRn ()
recSynErr syn_decls
= setSrcSpan (getLoc (head sorted_decls)) $
addErr (sep [ptext (sLit "Cycle in type synonym declarations:"),
nest 2 (vcat (map ppr_decl sorted_decls))])
where
sorted_decls = sortLocated syn_decls
ppr_decl (L loc decl) = ppr loc <> colon <+> ppr decl
recClsErr :: [TyCon] -> TcRn ()
recClsErr cycles
= addErr (sep [ptext (sLit "Cycle in class declaration (via superclasses):"),
nest 2 (hsep (intersperse (text "->") (map ppr cycles)))])
badDataConTyCon :: DataCon -> Type -> Type -> SDoc
badDataConTyCon data_con res_ty_tmpl actual_res_ty
= hang (ptext (sLit "Data constructor") <+> quotes (ppr data_con) <+>
ptext (sLit "returns type") <+> quotes (ppr actual_res_ty))
2 (ptext (sLit "instead of an instance of its parent type") <+> quotes (ppr res_ty_tmpl))
badGadtKindCon :: DataCon -> SDoc
badGadtKindCon data_con
= hang (ptext (sLit "Data constructor") <+> quotes (ppr data_con)
<+> ptext (sLit "cannot be GADT-like in its *kind* arguments"))
2 (ppr data_con <+> dcolon <+> ppr (dataConUserType data_con))
badGadtDecl :: Name -> SDoc
badGadtDecl tc_name
= vcat [ ptext (sLit "Illegal generalised algebraic data declaration for") <+> quotes (ppr tc_name)
, nest 2 (parens $ ptext (sLit "Use GADTs to allow GADTs")) ]
badExistential :: DataCon -> SDoc
badExistential con
= hang (ptext (sLit "Data constructor") <+> quotes (ppr con) <+>
ptext (sLit "has existential type variables, a context, or a specialised result type"))
2 (vcat [ ppr con <+> dcolon <+> ppr (dataConUserType con)
, parens $ ptext (sLit "Use ExistentialQuantification or GADTs to allow this") ])
badStupidTheta :: Name -> SDoc
badStupidTheta tc_name
= ptext (sLit "A data type declared in GADT style cannot have a context:") <+> quotes (ppr tc_name)
newtypeConError :: Name -> Int -> SDoc
newtypeConError tycon n
= sep [ptext (sLit "A newtype must have exactly one constructor,"),
nest 2 $ ptext (sLit "but") <+> quotes (ppr tycon) <+> ptext (sLit "has") <+> speakN n ]
newtypeStrictError :: DataCon -> SDoc
newtypeStrictError con
= sep [ptext (sLit "A newtype constructor cannot have a strictness annotation,"),
nest 2 $ ptext (sLit "but") <+> quotes (ppr con) <+> ptext (sLit "does")]
newtypeFieldErr :: DataCon -> Int -> SDoc
newtypeFieldErr con_name n_flds
= sep [ptext (sLit "The constructor of a newtype must have exactly one field"),
nest 2 $ ptext (sLit "but") <+> quotes (ppr con_name) <+> ptext (sLit "has") <+> speakN n_flds]
badSigTyDecl :: Name -> SDoc
badSigTyDecl tc_name
= vcat [ ptext (sLit "Illegal kind signature") <+>
quotes (ppr tc_name)
, nest 2 (parens $ ptext (sLit "Use KindSignatures to allow kind signatures")) ]
emptyConDeclsErr :: Name -> SDoc
emptyConDeclsErr tycon
= sep [quotes (ppr tycon) <+> ptext (sLit "has no constructors"),
nest 2 $ ptext (sLit "(EmptyDataDecls permits this)")]
wrongKindOfFamily :: TyCon -> SDoc
wrongKindOfFamily family
= ptext (sLit "Wrong category of family instance; declaration was for a")
<+> kindOfFamily
where
kindOfFamily | isTypeFamilyTyCon family = text "type family"
| isDataFamilyTyCon family = text "data family"
| otherwise = pprPanic "wrongKindOfFamily" (ppr family)
wrongNumberOfParmsErr :: Arity -> SDoc
wrongNumberOfParmsErr max_args
= ptext (sLit "Number of parameters must match family declaration; expected")
<+> ppr max_args
wrongTyFamName :: Name -> Name -> SDoc
wrongTyFamName fam_tc_name eqn_tc_name
= hang (ptext (sLit "Mismatched type name in type family instance."))
2 (vcat [ ptext (sLit "Expected:") <+> ppr fam_tc_name
, ptext (sLit " Actual:") <+> ppr eqn_tc_name ])
inaccessibleCoAxBranch :: TyCon -> CoAxBranch -> SDoc
inaccessibleCoAxBranch tc fi
= ptext (sLit "Overlapped type family instance equation:") $$
(pprCoAxBranch tc fi)
badRoleAnnot :: Name -> Role -> Role -> SDoc
badRoleAnnot var annot inferred
= hang (ptext (sLit "Role mismatch on variable") <+> ppr var <> colon)
2 (sep [ ptext (sLit "Annotation says"), ppr annot
, ptext (sLit "but role"), ppr inferred
, ptext (sLit "is required") ])
wrongNumberOfRoles :: [a] -> LRoleAnnotDecl Name -> SDoc
wrongNumberOfRoles tyvars d@(L _ (RoleAnnotDecl _ annots))
= hang (ptext (sLit "Wrong number of roles listed in role annotation;") $$
ptext (sLit "Expected") <+> (ppr $ length tyvars) <> comma <+>
ptext (sLit "got") <+> (ppr $ length annots) <> colon)
2 (ppr d)
illegalRoleAnnotDecl :: LRoleAnnotDecl Name -> TcM ()
illegalRoleAnnotDecl (L loc (RoleAnnotDecl tycon _))
= setErrCtxt [] $
setSrcSpan loc $
addErrTc (ptext (sLit "Illegal role annotation for") <+> ppr tycon <> char ';' $$
ptext (sLit "they are allowed only for datatypes and classes."))
needXRoleAnnotations :: TyCon -> SDoc
needXRoleAnnotations tc
= ptext (sLit "Illegal role annotation for") <+> ppr tc <> char ';' $$
ptext (sLit "did you intend to use RoleAnnotations?")
incoherentRoles :: SDoc
incoherentRoles = (text "Roles other than" <+> quotes (text "nominal") <+>
text "for class parameters can lead to incoherence.") $$
(text "Use IncoherentInstances to allow this; bad role found")
addTyThingCtxt :: TyThing -> TcM a -> TcM a
addTyThingCtxt thing
= addErrCtxt ctxt
where
name = getName thing
flav = case thing of
ATyCon tc
| isClassTyCon tc -> ptext (sLit "class")
| isTypeFamilyTyCon tc -> ptext (sLit "type family")
| isDataFamilyTyCon tc -> ptext (sLit "data family")
| isTypeSynonymTyCon tc -> ptext (sLit "type")
| isNewTyCon tc -> ptext (sLit "newtype")
| isDataTyCon tc -> ptext (sLit "data")
_ -> pprTrace "addTyThingCtxt strange" (ppr thing)
Outputable.empty
ctxt = hsep [ ptext (sLit "In the"), flav
, ptext (sLit "declaration for"), quotes (ppr name) ]
addRoleAnnotCtxt :: Name -> TcM a -> TcM a
addRoleAnnotCtxt name
= addErrCtxt $
text "while checking a role annotation for" <+> quotes (ppr name)
|
pparkkin/eta
|
compiler/ETA/TypeCheck/TcTyClsDecls.hs
|
Haskell
|
bsd-3-clause
| 100,831
|
{-# OPTIONS -fno-warn-tabs #-}
-- The above warning supression flag is a temporary kludge.
-- While working on this module you are encouraged to remove it and
-- detab the module (please do the detabbing in a separate patch). See
-- http://hackage.haskell.org/trac/ghc/wiki/Commentary/CodingStyle#TabsvsSpaces
-- for details
-- | The assignment of virtual registers to stack slots
-- We have lots of stack slots. Memory-to-memory moves are a pain on most
-- architectures. Therefore, we avoid having to generate memory-to-memory moves
-- by simply giving every virtual register its own stack slot.
-- The StackMap stack map keeps track of virtual register - stack slot
-- associations and of which stack slots are still free. Once it has been
-- associated, a stack slot is never "freed" or removed from the StackMap again,
-- it remains associated until we are done with the current CmmProc.
--
module RegAlloc.Linear.StackMap (
StackSlot,
StackMap(..),
emptyStackMap,
getStackSlotFor
)
where
import RegAlloc.Linear.FreeRegs
import Outputable
import Platform
import UniqFM
import Unique
-- | Identifier for a stack slot.
type StackSlot = Int
data StackMap
= StackMap
{ -- | The slots that are still available to be allocated.
stackMapFreeSlots :: [StackSlot]
-- | Assignment of vregs to stack slots.
, stackMapAssignment :: UniqFM StackSlot }
-- | An empty stack map, with all slots available.
emptyStackMap :: Platform -> StackMap
emptyStackMap platform = StackMap [0 .. maxSpillSlots platform] emptyUFM
-- | If this vreg unique already has a stack assignment then return the slot number,
-- otherwise allocate a new slot, and update the map.
--
getStackSlotFor :: StackMap -> Unique -> (StackMap, Int)
getStackSlotFor (StackMap [] _) _
-- This happens all the time when trying to compile darcs' SHA1.hs, see Track #1993
-- SHA1.lhs has also been added to the Crypto library on Hackage,
-- so we see this all the time.
--
-- It would be better to automatically invoke the graph allocator, or do something
-- else besides panicing, but that's a job for a different day. -- BL 2009/02
--
= panic $ "RegAllocLinear.getStackSlotFor: out of stack slots\n"
++ " If you are trying to compile SHA1.hs from the crypto library then this\n"
++ " is a known limitation in the linear allocator.\n"
++ "\n"
++ " Try enabling the graph colouring allocator with -fregs-graph instead."
++ " You can still file a bug report if you like.\n"
getStackSlotFor fs@(StackMap (freeSlot:stack') reserved) reg =
case lookupUFM reserved reg of
Just slot -> (fs, slot)
Nothing -> (StackMap stack' (addToUFM reserved reg freeSlot), freeSlot)
|
nomeata/ghc
|
compiler/nativeGen/RegAlloc/Linear/StackMap.hs
|
Haskell
|
bsd-3-clause
| 2,719
|
-- | Basic operations on graphs.
--
module GraphOps (
addNode, delNode, getNode, lookupNode, modNode,
size,
union,
addConflict, delConflict, addConflicts,
addCoalesce, delCoalesce,
addExclusion, addExclusions,
addPreference,
coalesceNodes, coalesceGraph,
freezeNode, freezeOneInGraph, freezeAllInGraph,
scanGraph,
setColor,
validateGraph,
slurpNodeConflictCount
)
where
import GraphBase
import Outputable
import Unique
import UniqSet
import UniqFM
import Data.List hiding (union)
import Data.Maybe
-- | Lookup a node from the graph.
lookupNode
:: Uniquable k
=> Graph k cls color
-> k -> Maybe (Node k cls color)
lookupNode graph k
= lookupUFM (graphMap graph) k
-- | Get a node from the graph, throwing an error if it's not there
getNode
:: Uniquable k
=> Graph k cls color
-> k -> Node k cls color
getNode graph k
= case lookupUFM (graphMap graph) k of
Just node -> node
Nothing -> panic "ColorOps.getNode: not found"
-- | Add a node to the graph, linking up its edges
addNode :: Uniquable k
=> k -> Node k cls color
-> Graph k cls color -> Graph k cls color
addNode k node graph
= let
-- add back conflict edges from other nodes to this one
map_conflict =
nonDetFoldUFM
-- It's OK to use nonDetFoldUFM here because the
-- operation is commutative
(adjustUFM_C (\n -> n { nodeConflicts =
addOneToUniqSet (nodeConflicts n) k}))
(graphMap graph)
(nodeConflicts node)
-- add back coalesce edges from other nodes to this one
map_coalesce =
nonDetFoldUFM
-- It's OK to use nonDetFoldUFM here because the
-- operation is commutative
(adjustUFM_C (\n -> n { nodeCoalesce =
addOneToUniqSet (nodeCoalesce n) k}))
map_conflict
(nodeCoalesce node)
in graph
{ graphMap = addToUFM map_coalesce k node}
-- | Delete a node and all its edges from the graph.
delNode :: (Uniquable k)
=> k -> Graph k cls color -> Maybe (Graph k cls color)
delNode k graph
| Just node <- lookupNode graph k
= let -- delete conflict edges from other nodes to this one.
graph1 = foldl' (\g k1 -> let Just g' = delConflict k1 k g in g') graph
$ nonDetEltsUFM (nodeConflicts node)
-- delete coalesce edge from other nodes to this one.
graph2 = foldl' (\g k1 -> let Just g' = delCoalesce k1 k g in g') graph1
$ nonDetEltsUFM (nodeCoalesce node)
-- See Note [Unique Determinism and code generation]
-- delete the node
graph3 = graphMapModify (\fm -> delFromUFM fm k) graph2
in Just graph3
| otherwise
= Nothing
-- | Modify a node in the graph.
-- returns Nothing if the node isn't present.
--
modNode :: Uniquable k
=> (Node k cls color -> Node k cls color)
-> k -> Graph k cls color -> Maybe (Graph k cls color)
modNode f k graph
= case lookupNode graph k of
Just Node{}
-> Just
$ graphMapModify
(\fm -> let Just node = lookupUFM fm k
node' = f node
in addToUFM fm k node')
graph
Nothing -> Nothing
-- | Get the size of the graph, O(n)
size :: Graph k cls color -> Int
size graph
= sizeUFM $ graphMap graph
-- | Union two graphs together.
union :: Graph k cls color -> Graph k cls color -> Graph k cls color
union graph1 graph2
= Graph
{ graphMap = plusUFM (graphMap graph1) (graphMap graph2) }
-- | Add a conflict between nodes to the graph, creating the nodes required.
-- Conflicts are virtual regs which need to be colored differently.
addConflict
:: Uniquable k
=> (k, cls) -> (k, cls)
-> Graph k cls color -> Graph k cls color
addConflict (u1, c1) (u2, c2)
= let addNeighbor u c u'
= adjustWithDefaultUFM
(\node -> node { nodeConflicts = addOneToUniqSet (nodeConflicts node) u' })
(newNode u c) { nodeConflicts = unitUniqSet u' }
u
in graphMapModify
( addNeighbor u1 c1 u2
. addNeighbor u2 c2 u1)
-- | Delete a conflict edge. k1 -> k2
-- returns Nothing if the node isn't in the graph
delConflict
:: Uniquable k
=> k -> k
-> Graph k cls color -> Maybe (Graph k cls color)
delConflict k1 k2
= modNode
(\node -> node { nodeConflicts = delOneFromUniqSet (nodeConflicts node) k2 })
k1
-- | Add some conflicts to the graph, creating nodes if required.
-- All the nodes in the set are taken to conflict with each other.
addConflicts
:: Uniquable k
=> UniqSet k -> (k -> cls)
-> Graph k cls color -> Graph k cls color
addConflicts conflicts getClass
-- just a single node, but no conflicts, create the node anyway.
| (u : []) <- nonDetEltsUFM conflicts
= graphMapModify
$ adjustWithDefaultUFM
id
(newNode u (getClass u))
u
| otherwise
= graphMapModify
$ (\fm -> foldl' (\g u -> addConflictSet1 u getClass conflicts g) fm
$ nonDetEltsUFM conflicts)
-- See Note [Unique Determinism and code generation]
addConflictSet1 :: Uniquable k
=> k -> (k -> cls) -> UniqSet k
-> UniqFM (Node k cls color)
-> UniqFM (Node k cls color)
addConflictSet1 u getClass set
= case delOneFromUniqSet set u of
set' -> adjustWithDefaultUFM
(\node -> node { nodeConflicts = unionUniqSets set' (nodeConflicts node) } )
(newNode u (getClass u)) { nodeConflicts = set' }
u
-- | Add an exclusion to the graph, creating nodes if required.
-- These are extra colors that the node cannot use.
addExclusion
:: (Uniquable k, Uniquable color)
=> k -> (k -> cls) -> color
-> Graph k cls color -> Graph k cls color
addExclusion u getClass color
= graphMapModify
$ adjustWithDefaultUFM
(\node -> node { nodeExclusions = addOneToUniqSet (nodeExclusions node) color })
(newNode u (getClass u)) { nodeExclusions = unitUniqSet color }
u
addExclusions
:: (Uniquable k, Uniquable color)
=> k -> (k -> cls) -> [color]
-> Graph k cls color -> Graph k cls color
addExclusions u getClass colors graph
= foldr (addExclusion u getClass) graph colors
-- | Add a coalescence edge to the graph, creating nodes if requried.
-- It is considered adventageous to assign the same color to nodes in a coalesence.
addCoalesce
:: Uniquable k
=> (k, cls) -> (k, cls)
-> Graph k cls color -> Graph k cls color
addCoalesce (u1, c1) (u2, c2)
= let addCoalesce u c u'
= adjustWithDefaultUFM
(\node -> node { nodeCoalesce = addOneToUniqSet (nodeCoalesce node) u' })
(newNode u c) { nodeCoalesce = unitUniqSet u' }
u
in graphMapModify
( addCoalesce u1 c1 u2
. addCoalesce u2 c2 u1)
-- | Delete a coalescence edge (k1 -> k2) from the graph.
delCoalesce
:: Uniquable k
=> k -> k
-> Graph k cls color -> Maybe (Graph k cls color)
delCoalesce k1 k2
= modNode (\node -> node { nodeCoalesce = delOneFromUniqSet (nodeCoalesce node) k2 })
k1
-- | Add a color preference to the graph, creating nodes if required.
-- The most recently added preference is the most prefered.
-- The algorithm tries to assign a node it's prefered color if possible.
--
addPreference
:: Uniquable k
=> (k, cls) -> color
-> Graph k cls color -> Graph k cls color
addPreference (u, c) color
= graphMapModify
$ adjustWithDefaultUFM
(\node -> node { nodePreference = color : (nodePreference node) })
(newNode u c) { nodePreference = [color] }
u
-- | Do aggressive coalescing on this graph.
-- returns the new graph and the list of pairs of nodes that got coaleced together.
-- for each pair, the resulting node will have the least key and be second in the pair.
--
coalesceGraph
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool -- ^ If True, coalesce nodes even if this might make the graph
-- less colorable (aggressive coalescing)
-> Triv k cls color
-> Graph k cls color
-> ( Graph k cls color
, [(k, k)]) -- pairs of nodes that were coalesced, in the order that the
-- coalescing was applied.
coalesceGraph aggressive triv graph
= coalesceGraph' aggressive triv graph []
coalesceGraph'
:: (Uniquable k, Ord k, Eq cls, Outputable k)
=> Bool
-> Triv k cls color
-> Graph k cls color
-> [(k, k)]
-> ( Graph k cls color
, [(k, k)])
coalesceGraph' aggressive triv graph kkPairsAcc
= let
-- find all the nodes that have coalescence edges
cNodes = filter (\node -> not $ isEmptyUniqSet (nodeCoalesce node))
$ nonDetEltsUFM $ graphMap graph
-- See Note [Unique Determinism and code generation]
-- build a list of pairs of keys for node's we'll try and coalesce
-- every pair of nodes will appear twice in this list
-- ie [(k1, k2), (k2, k1) ... ]
-- This is ok, GrapOps.coalesceNodes handles this and it's convenient for
-- build a list of what nodes get coalesced together for later on.
--
cList = [ (nodeId node1, k2)
| node1 <- cNodes
, k2 <- nonDetEltsUFM $ nodeCoalesce node1 ]
-- See Note [Unique Determinism and code generation]
-- do the coalescing, returning the new graph and a list of pairs of keys
-- that got coalesced together.
(graph', mPairs)
= mapAccumL (coalesceNodes aggressive triv) graph cList
-- keep running until there are no more coalesces can be found
in case catMaybes mPairs of
[] -> (graph', reverse kkPairsAcc)
pairs -> coalesceGraph' aggressive triv graph' (reverse pairs ++ kkPairsAcc)
-- | Coalesce this pair of nodes unconditionally \/ aggressively.
-- The resulting node is the one with the least key.
--
-- returns: Just the pair of keys if the nodes were coalesced
-- the second element of the pair being the least one
--
-- Nothing if either of the nodes weren't in the graph
coalesceNodes
:: (Uniquable k, Ord k, Eq cls)
=> Bool -- ^ If True, coalesce nodes even if this might make the graph
-- less colorable (aggressive coalescing)
-> Triv k cls color
-> Graph k cls color
-> (k, k) -- ^ keys of the nodes to be coalesced
-> (Graph k cls color, Maybe (k, k))
coalesceNodes aggressive triv graph (k1, k2)
| (kMin, kMax) <- if k1 < k2
then (k1, k2)
else (k2, k1)
-- the nodes being coalesced must be in the graph
, Just nMin <- lookupNode graph kMin
, Just nMax <- lookupNode graph kMax
-- can't coalesce conflicting modes
, not $ elementOfUniqSet kMin (nodeConflicts nMax)
, not $ elementOfUniqSet kMax (nodeConflicts nMin)
-- can't coalesce the same node
, nodeId nMin /= nodeId nMax
= coalesceNodes_merge aggressive triv graph kMin kMax nMin nMax
-- don't do the coalescing after all
| otherwise
= (graph, Nothing)
coalesceNodes_merge
:: (Uniquable k, Eq cls)
=> Bool
-> Triv k cls color
-> Graph k cls color
-> k -> k
-> Node k cls color
-> Node k cls color
-> (Graph k cls color, Maybe (k, k))
coalesceNodes_merge aggressive triv graph kMin kMax nMin nMax
-- sanity checks
| nodeClass nMin /= nodeClass nMax
= error "GraphOps.coalesceNodes: can't coalesce nodes of different classes."
| not (isNothing (nodeColor nMin) && isNothing (nodeColor nMax))
= error "GraphOps.coalesceNodes: can't coalesce colored nodes."
---
| otherwise
= let
-- the new node gets all the edges from its two components
node =
Node { nodeId = kMin
, nodeClass = nodeClass nMin
, nodeColor = Nothing
-- nodes don't conflict with themselves..
, nodeConflicts
= (unionUniqSets (nodeConflicts nMin) (nodeConflicts nMax))
`delOneFromUniqSet` kMin
`delOneFromUniqSet` kMax
, nodeExclusions = unionUniqSets (nodeExclusions nMin) (nodeExclusions nMax)
, nodePreference = nodePreference nMin ++ nodePreference nMax
-- nodes don't coalesce with themselves..
, nodeCoalesce
= (unionUniqSets (nodeCoalesce nMin) (nodeCoalesce nMax))
`delOneFromUniqSet` kMin
`delOneFromUniqSet` kMax
}
in coalesceNodes_check aggressive triv graph kMin kMax node
coalesceNodes_check
:: Uniquable k
=> Bool
-> Triv k cls color
-> Graph k cls color
-> k -> k
-> Node k cls color
-> (Graph k cls color, Maybe (k, k))
coalesceNodes_check aggressive triv graph kMin kMax node
-- Unless we're coalescing aggressively, if the result node is not trivially
-- colorable then don't do the coalescing.
| not aggressive
, not $ triv (nodeClass node) (nodeConflicts node) (nodeExclusions node)
= (graph, Nothing)
| otherwise
= let -- delete the old nodes from the graph and add the new one
Just graph1 = delNode kMax graph
Just graph2 = delNode kMin graph1
graph3 = addNode kMin node graph2
in (graph3, Just (kMax, kMin))
-- | Freeze a node
-- This is for the iterative coalescer.
-- By freezing a node we give up on ever coalescing it.
-- Move all its coalesce edges into the frozen set - and update
-- back edges from other nodes.
--
freezeNode
:: Uniquable k
=> k -- ^ key of the node to freeze
-> Graph k cls color -- ^ the graph
-> Graph k cls color -- ^ graph with that node frozen
freezeNode k
= graphMapModify
$ \fm ->
let -- freeze all the edges in the node to be frozen
Just node = lookupUFM fm k
node' = node
{ nodeCoalesce = emptyUniqSet }
fm1 = addToUFM fm k node'
-- update back edges pointing to this node
freezeEdge k node
= if elementOfUniqSet k (nodeCoalesce node)
then node { nodeCoalesce = delOneFromUniqSet (nodeCoalesce node) k }
else node -- panic "GraphOps.freezeNode: edge to freeze wasn't in the coalesce set"
-- If the edge isn't actually in the coelesce set then just ignore it.
fm2 = nonDetFoldUFM (adjustUFM_C (freezeEdge k)) fm1
-- It's OK to use nonDetFoldUFM here because the operation
-- is commutative
$ nodeCoalesce node
in fm2
-- | Freeze one node in the graph
-- This if for the iterative coalescer.
-- Look for a move related node of low degree and freeze it.
--
-- We probably don't need to scan the whole graph looking for the node of absolute
-- lowest degree. Just sample the first few and choose the one with the lowest
-- degree out of those. Also, we don't make any distinction between conflicts of different
-- classes.. this is just a heuristic, after all.
--
-- IDEA: freezing a node might free it up for Simplify.. would be good to check for triv
-- right here, and add it to a worklist if known triv\/non-move nodes.
--
freezeOneInGraph
:: (Uniquable k)
=> Graph k cls color
-> ( Graph k cls color -- the new graph
, Bool ) -- whether we found a node to freeze
freezeOneInGraph graph
= let compareNodeDegree n1 n2
= compare (sizeUniqSet $ nodeConflicts n1) (sizeUniqSet $ nodeConflicts n2)
candidates
= sortBy compareNodeDegree
$ take 5 -- 5 isn't special, it's just a small number.
$ scanGraph (\node -> not $ isEmptyUniqSet (nodeCoalesce node)) graph
in case candidates of
-- there wasn't anything available to freeze
[] -> (graph, False)
-- we found something to freeze
(n : _)
-> ( freezeNode (nodeId n) graph
, True)
-- | Freeze all the nodes in the graph
-- for debugging the iterative allocator.
--
freezeAllInGraph
:: (Uniquable k)
=> Graph k cls color
-> Graph k cls color
freezeAllInGraph graph
= foldr freezeNode graph
$ map nodeId
$ nonDetEltsUFM $ graphMap graph
-- See Note [Unique Determinism and code generation]
-- | Find all the nodes in the graph that meet some criteria
--
scanGraph
:: (Node k cls color -> Bool)
-> Graph k cls color
-> [Node k cls color]
scanGraph match graph
= filter match $ nonDetEltsUFM $ graphMap graph
-- See Note [Unique Determinism and code generation]
-- | validate the internal structure of a graph
-- all its edges should point to valid nodes
-- If they don't then throw an error
--
validateGraph
:: (Uniquable k, Outputable k, Eq color)
=> SDoc -- ^ extra debugging info to display on error
-> Bool -- ^ whether this graph is supposed to be colored.
-> Graph k cls color -- ^ graph to validate
-> Graph k cls color -- ^ validated graph
validateGraph doc isColored graph
-- Check that all edges point to valid nodes.
| edges <- unionManyUniqSets
( (map nodeConflicts $ nonDetEltsUFM $ graphMap graph)
++ (map nodeCoalesce $ nonDetEltsUFM $ graphMap graph))
, nodes <- mkUniqSet $ map nodeId $ nonDetEltsUFM $ graphMap graph
, badEdges <- minusUniqSet edges nodes
, not $ isEmptyUniqSet badEdges
= pprPanic "GraphOps.validateGraph"
( text "Graph has edges that point to non-existant nodes"
$$ text " bad edges: " <> pprUFM badEdges (vcat . map ppr)
$$ doc )
-- Check that no conflicting nodes have the same color
| badNodes <- filter (not . (checkNode graph))
$ nonDetEltsUFM $ graphMap graph
-- See Note [Unique Determinism and code generation]
, not $ null badNodes
= pprPanic "GraphOps.validateGraph"
( text "Node has same color as one of it's conflicts"
$$ text " bad nodes: " <> hcat (map (ppr . nodeId) badNodes)
$$ doc)
-- If this is supposed to be a colored graph,
-- check that all nodes have a color.
| isColored
, badNodes <- filter (\n -> isNothing $ nodeColor n)
$ nonDetEltsUFM $ graphMap graph
, not $ null badNodes
= pprPanic "GraphOps.validateGraph"
( text "Supposably colored graph has uncolored nodes."
$$ text " uncolored nodes: " <> hcat (map (ppr . nodeId) badNodes)
$$ doc )
-- graph looks ok
| otherwise
= graph
-- | If this node is colored, check that all the nodes which
-- conflict with it have different colors.
checkNode
:: (Uniquable k, Eq color)
=> Graph k cls color
-> Node k cls color
-> Bool -- ^ True if this node is ok
checkNode graph node
| Just color <- nodeColor node
, Just neighbors <- sequence $ map (lookupNode graph)
$ nonDetEltsUFM $ nodeConflicts node
-- See Note [Unique Determinism and code generation]
, neighbourColors <- catMaybes $ map nodeColor neighbors
, elem color neighbourColors
= False
| otherwise
= True
-- | Slurp out a map of how many nodes had a certain number of conflict neighbours
slurpNodeConflictCount
:: Graph k cls color
-> UniqFM (Int, Int) -- ^ (conflict neighbours, num nodes with that many conflicts)
slurpNodeConflictCount graph
= addListToUFM_C
(\(c1, n1) (_, n2) -> (c1, n1 + n2))
emptyUFM
$ map (\node
-> let count = sizeUniqSet $ nodeConflicts node
in (count, (count, 1)))
$ nonDetEltsUFM
-- See Note [Unique Determinism and code generation]
$ graphMap graph
-- | Set the color of a certain node
setColor
:: Uniquable k
=> k -> color
-> Graph k cls color -> Graph k cls color
setColor u color
= graphMapModify
$ adjustUFM_C
(\n -> n { nodeColor = Just color })
u
{-# INLINE adjustWithDefaultUFM #-}
adjustWithDefaultUFM
:: Uniquable k
=> (a -> a) -> a -> k
-> UniqFM a -> UniqFM a
adjustWithDefaultUFM f def k map
= addToUFM_C
(\old _ -> f old)
map
k def
-- Argument order different from UniqFM's adjustUFM
{-# INLINE adjustUFM_C #-}
adjustUFM_C
:: Uniquable k
=> (a -> a)
-> k -> UniqFM a -> UniqFM a
adjustUFM_C f k map
= case lookupUFM map k of
Nothing -> map
Just a -> addToUFM map k (f a)
|
olsner/ghc
|
compiler/utils/GraphOps.hs
|
Haskell
|
bsd-3-clause
| 23,303
|
{-# LANGUAGE RankNTypes, PolyKinds #-}
-- NB: -fprint-explicit-runtime-reps enabled in all.T
module TypeSkolEscape where
import GHC.Types
import GHC.Exts
type Bad = forall (v :: RuntimeRep) (a :: TYPE v). a
|
sdiehl/ghc
|
testsuite/tests/dependent/should_fail/TypeSkolEscape.hs
|
Haskell
|
bsd-3-clause
| 210
|
module Time {-(
ClockTime,
Month(January,February,March,April,May,June,
July,August,September,October,November,December),
Day(Sunday,Monday,Tuesday,Wednesday,Thursday,Friday,Saturday),
CalendarTime(CalendarTime, ctYear, ctMonth, ctDay, ctHour, ctMin,
ctPicosec, ctWDay, ctYDay, ctTZName, ctTZ, ctIsDST),
TimeDiff(TimeDiff, tdYear, tdMonth, tdDay,
tdHour, tdMin, tdSec, tdPicosec),
getClockTime, addToClockTime, diffClockTimes,
toCalendarTime, toUTCTime, toClockTime,
calendarTimeToString, formatCalendarTime )-} where
import Prelude
import Ix(Ix)
import Locale --(TimeLocale(..),defaultTimeLocale)
import Char ( intToDigit )
newtype ClockTime
= ClockTime Int
deriving (Eq,Ord,Show)
-- The Show instance is a deviation from Haskell 98, provided for
-- compatibility with Hugs and GHC.
data Month = January | February | March | April
| May | June | July | August
| September | October | November | December
deriving (Eq, Ord, Enum, Bounded, Ix, Read, Show)
--instance Enum Month
data Day = Sunday | Monday | Tuesday | Wednesday | Thursday
| Friday | Saturday
deriving (Eq, Ord, Enum, Bounded, Ix, Read, Show)
--instance Enum Day
data CalendarTime = CalendarTime {
ctYear :: Int,
ctMonth :: Month,
ctDay, ctHour, ctMin, ctSec :: Int,
ctPicosec :: Integer,
ctWDay :: Day,
ctYDay :: Int,
ctTZName :: String,
ctTZ :: Int,
ctIsDST :: Bool
} deriving (Eq, Ord, Read, Show)
data TimeDiff = TimeDiff {
tdYear, tdMonth, tdDay, tdHour, tdMin, tdSec :: Int,
tdPicosec :: Integer
} deriving (Eq, Ord, Read, Show)
getClockTime :: IO ClockTime
getClockTime = undefined -- Implementation-dependent
addToClockTime :: TimeDiff -> ClockTime -> ClockTime
addToClockTime td ct = undefined -- Implementation-dependent
diffClockTimes :: ClockTime -> ClockTime -> TimeDiff
diffClockTimes ct1 ct2 = undefined -- Implementation-dependent
toCalendarTime :: ClockTime -> IO CalendarTime
toCalendarTime ct = undefined -- Implementation-dependent
toUTCTime :: ClockTime -> CalendarTime
toUTCTime ct = undefined -- Implementation-dependent
toClockTime :: CalendarTime -> ClockTime
toClockTime cal = undefined -- Implementation-dependent
calendarTimeToString :: CalendarTime -> String
calendarTimeToString = formatCalendarTime defaultTimeLocale "%c"
formatCalendarTime :: TimeLocale -> String -> CalendarTime -> String
formatCalendarTime l fmt ct@(CalendarTime year mon day hour min sec sdec
wday yday tzname _ _) =
doFmt fmt
where doFmt ('%':c:cs) = decode c ++ doFmt cs
doFmt (c:cs) = c : doFmt cs
doFmt "" = ""
to12 :: Int -> Int
to12 h = let h' = h `mod` 12 in if h' == 0 then 12 else h'
decode 'A' = fst (wDays l !! fromEnum wday)
decode 'a' = snd (wDays l !! fromEnum wday)
decode 'B' = fst (months l !! fromEnum mon)
decode 'b' = snd (months l !! fromEnum mon)
decode 'h' = snd (months l !! fromEnum mon)
decode 'C' = show2 (year `quot` 100)
decode 'c' = doFmt (dateTimeFmt l)
decode 'D' = doFmt "%m/%d/%y"
decode 'd' = show2 day
decode 'e' = show2' day
decode 'H' = show2 hour
decode 'I' = show2 (to12 hour)
decode 'j' = show3 yday
decode 'k' = show2' hour
decode 'l' = show2' (to12 hour)
decode 'M' = show2 min
decode 'm' = show2 (fromEnum mon+1)
decode 'n' = "\n"
decode 'p' = (if hour < 12 then fst else snd) (amPm l)
decode 'R' = doFmt "%H:%M"
decode 'r' = doFmt (time12Fmt l)
decode 'T' = doFmt "%H:%M:%S"
decode 't' = "\t"
decode 'S' = show2 sec
--decode 's' = undefined -- Implementation-dependent
decode 'U' = show2 ((yday + 7 - fromEnum wday) `div` 7)
decode 'u' = show (let n = fromEnum wday in
if n == 0 then 7 else n)
decode 'V' =
let (week, days) =
(yday + 7 - if fromEnum wday > 0 then
fromEnum wday - 1 else 6) `divMod` 7
in show2 (if days >= 4 then
week+1
else if week == 0 then 53 else week)
decode 'W' =
show2 ((yday + 7 - if fromEnum wday > 0 then
fromEnum wday - 1 else 6) `div` 7)
decode 'w' = show (fromEnum wday)
decode 'X' = doFmt (timeFmt l)
decode 'x' = doFmt (dateFmt l)
decode 'Y' = show year
decode 'y' = show2 (year `rem` 100)
decode 'Z' = tzname
decode '%' = "%"
decode c = [c]
show2, show2', show3 :: Int -> String
show2 x = [intToDigit (x `quot` 10), intToDigit (x `rem` 10)]
show2' x = if x < 10 then [ ' ', intToDigit x] else show2 x
show3 x = intToDigit (x `quot` 100) : show2 (x `rem` 100)
|
forste/haReFork
|
tools/base/tests/HaskellLibraries/Time.hs
|
Haskell
|
bsd-3-clause
| 5,602
|
{-# OPTIONS_GHC -fno-warn-redundant-constraints #-}
{-# LANGUAGE RankNTypes, ExistentialQuantification #-}
-- An interesting interaction of universals and existentials, prompted by
-- http://www.haskell.org/pipermail/haskell-cafe/2004-October/007160.html
--
-- Note the nested pattern-match in runProg; tc183 checks the
-- non-nested version
-- 3 Sept 2010: with the new typechecker, this one succeeds
module Foo where
import Control.Monad.Trans
data Bar m
= forall t. (MonadTrans t, Monad (t m))
=> Bar (t m () -> m ()) (t m Int)
data Foo = Foo (forall m. Monad m => Bar m)
runProg :: Foo -> IO ()
runProg (Foo (Bar run op)) = run (prog op)
-- This nested match "ought" to work; because
-- runProg (Foo b) = case b of
-- Bar run op -> run (prog op)
-- does work. But the interactions with GADTs and
-- desugaring defeated me, and I removed (in GHC 6.4) the ability
-- to instantiate functions on the left
prog :: (MonadTrans t, Monad (t IO)) => a -> t IO ()
prog x = error "urk"
|
urbanslug/ghc
|
testsuite/tests/typecheck/should_fail/tcfail126.hs
|
Haskell
|
bsd-3-clause
| 1,007
|
-- | Check universe constraints.
module Idris.Core.Constraints ( ucheck ) where
import Idris.Core.TT ( TC(..), UExp(..), UConstraint(..), FC(..),
ConstraintFC(..), Err'(..) )
import Control.Applicative
import Control.Monad.State.Strict
import Data.List ( partition )
import qualified Data.Map.Strict as M
import qualified Data.Set as S
-- | Check that a list of universe constraints can be satisfied.
ucheck :: S.Set ConstraintFC -> TC ()
ucheck = void . solve 10 . S.filter (not . ignore)
where
-- TODO: remove the first ignore clause once Idris.Core.Binary:598 is dealt with
ignore (ConstraintFC c _) | any (== Var (-1)) (varsIn c) = True
ignore (ConstraintFC (ULE a b) _) = a == b
ignore _ = False
newtype Var = Var Int
deriving (Eq, Ord, Show)
data Domain = Domain Int Int
deriving (Eq, Ord, Show)
data SolverState =
SolverState
{ queue :: Queue
, domainStore :: M.Map Var ( Domain
, S.Set ConstraintFC -- constraints that effected this variable
)
, cons_lhs :: M.Map Var (S.Set ConstraintFC)
, cons_rhs :: M.Map Var (S.Set ConstraintFC)
}
data Queue = Queue [ConstraintFC] (S.Set UConstraint)
solve :: Int -> S.Set ConstraintFC -> TC (M.Map Var Int)
solve maxUniverseLevel ucs =
evalStateT (propagate >> extractSolution) initSolverState
where
inpConstraints = S.toAscList ucs
-- | initial solver state.
-- the queue contains all constraints, the domain store contains the initial domains.
initSolverState :: SolverState
initSolverState =
let
(initUnaryQueue, initQueue) = partition (\ c -> length (varsIn (uconstraint c)) == 1) inpConstraints
in
SolverState
{ queue = Queue (initUnaryQueue ++ initQueue) (S.fromList (map uconstraint (initUnaryQueue ++ initQueue)))
, domainStore = M.fromList
[ (v, (Domain 0 maxUniverseLevel, S.empty))
| v <- ordNub [ v
| ConstraintFC c _ <- inpConstraints
, v <- varsIn c
]
]
, cons_lhs = constraintsLHS
, cons_rhs = constraintsRHS
}
lhs (ULT (UVar x) _) = Just (Var x)
lhs (ULE (UVar x) _) = Just (Var x)
lhs _ = Nothing
rhs (ULT _ (UVar x)) = Just (Var x)
rhs (ULE _ (UVar x)) = Just (Var x)
rhs _ = Nothing
-- | a map from variables to the list of constraints the variable occurs in. (in the LHS of a constraint)
constraintsLHS :: M.Map Var (S.Set ConstraintFC)
constraintsLHS = M.fromListWith S.union
[ (v, S.singleton (ConstraintFC c fc))
| (ConstraintFC c fc) <- inpConstraints
, let vars = varsIn c
, length vars > 1 -- do not register unary constraints
, v <- vars
, lhs c == Just v
]
-- | a map from variables to the list of constraints the variable occurs in. (in the RHS of a constraint)
constraintsRHS :: M.Map Var (S.Set ConstraintFC)
constraintsRHS = M.fromListWith S.union
[ (v, S.singleton (ConstraintFC c fc))
| (ConstraintFC c fc) <- inpConstraints
, let vars = varsIn c
, length vars > 1 -- do not register unary constraints
, v <- vars
, rhs c == Just v
]
-- | this is where the actual work is done.
-- dequeue the first constraint,
-- filter domains,
-- update domains (possibly resulting in a domain wipe out),
-- until the queue is empty.
propagate :: StateT SolverState TC ()
propagate = do
mcons <- nextConstraint
case mcons of
Nothing -> return ()
Just (ConstraintFC cons fc) -> do
case cons of
ULE a b -> do
Domain lowerA upperA <- domainOf a
Domain lowerB upperB <- domainOf b
when (upperB < upperA) $ updateUpperBoundOf (ConstraintFC cons fc) a upperB
when (lowerA > lowerB) $ updateLowerBoundOf (ConstraintFC cons fc) b lowerA
ULT a b -> do
Domain lowerA upperA <- domainOf a
Domain lowerB upperB <- domainOf b
let upperB_pred = pred upperB
let lowerA_succ = succ lowerA
when (upperB_pred < upperA) $ updateUpperBoundOf (ConstraintFC cons fc) a upperB_pred
when (lowerA_succ > lowerB) $ updateLowerBoundOf (ConstraintFC cons fc) b lowerA_succ
propagate
-- | extract a solution from the state.
extractSolution :: (MonadState SolverState m, Functor m) => m (M.Map Var Int)
extractSolution = M.map (extractValue . fst) <$> gets domainStore
extractValue :: Domain -> Int
extractValue (Domain x _) = x
-- | dequeue the first constraint.
nextConstraint :: MonadState SolverState m => m (Maybe ConstraintFC)
nextConstraint = do
Queue list set <- gets queue
case list of
[] -> return Nothing
(q:qs) -> do
modify $ \ st -> st { queue = Queue qs (S.delete (uconstraint q) set) }
return (Just q)
-- | look up the domain of a variable from the state.
-- for convenience, this function also accepts UVal's and returns a singleton domain for them.
domainOf :: MonadState SolverState m => UExp -> m Domain
domainOf (UVar var) = gets (fst . (M.! Var var) . domainStore)
domainOf (UVal val) = return (Domain val val)
asPair :: Domain -> (Int, Int)
asPair (Domain x y) = (x, y)
updateUpperBoundOf :: ConstraintFC -> UExp -> Int -> StateT SolverState TC ()
updateUpperBoundOf suspect (UVar var) upper = do
doms <- gets domainStore
let (oldDom@(Domain lower _), suspects) = doms M.! Var var
let newDom = Domain lower upper
when (wipeOut newDom) $
lift $ Error $
UniverseError (ufc suspect) (UVar var)
(asPair oldDom) (asPair newDom)
(suspect : S.toList suspects)
modify $ \ st -> st { domainStore = M.insert (Var var) (newDom, S.insert suspect suspects) doms }
addToQueueRHS (uconstraint suspect) (Var var)
updateUpperBoundOf _ UVal{} _ = return ()
updateLowerBoundOf :: ConstraintFC -> UExp -> Int -> StateT SolverState TC ()
updateLowerBoundOf suspect (UVar var) lower = do
doms <- gets domainStore
let (oldDom@(Domain _ upper), suspects) = doms M.! Var var
let newDom = Domain lower upper
when (wipeOut newDom) $ lift $ Error $ At (ufc suspect) $ Msg $ unlines
$ "Universe inconsistency."
: ("Working on: " ++ show (UVar var))
: ("Old domain: " ++ show oldDom)
: ("New domain: " ++ show newDom)
: "Involved constraints: "
: map (("\t"++) . show) (suspect : S.toList suspects)
modify $ \ st -> st { domainStore = M.insert (Var var) (newDom, S.insert suspect suspects) doms }
addToQueueLHS (uconstraint suspect) (Var var)
updateLowerBoundOf _ UVal{} _ = return ()
-- | add all constraints (with the given var on the lhs) to the queue
addToQueueLHS :: MonadState SolverState m => UConstraint -> Var -> m ()
addToQueueLHS thisCons var = do
clhs <- gets cons_lhs
case M.lookup var clhs of
Nothing -> return ()
Just cs -> do
Queue list set <- gets queue
let set' = S.insert thisCons set
let newCons = [ c | c <- S.toList cs, uconstraint c `S.notMember` set' ]
if null newCons
then return ()
else modify $ \ st -> st { queue = Queue (list ++ newCons)
(S.union set (S.fromList (map uconstraint newCons))) }
-- | add all constraints (with the given var on the rhs) to the queue
addToQueueRHS :: MonadState SolverState m => UConstraint -> Var -> m ()
addToQueueRHS thisCons var = do
crhs <- gets cons_rhs
case M.lookup var crhs of
Nothing -> return ()
Just cs -> do
Queue list set <- gets queue
let set' = S.insert thisCons set
let newCons = [ c | c <- S.toList cs, uconstraint c `S.notMember` set' ]
if null newCons
then return ()
else modify $ \ st -> st { queue = Queue (list ++ newCons)
(insertAll (map uconstraint newCons) set) }
insertAll [] s = s
insertAll (x : xs) s = insertAll xs (S.insert x s)
-- | check if a domain is wiped out.
wipeOut :: Domain -> Bool
wipeOut (Domain l u) = l > u
ordNub :: Ord a => [a] -> [a]
ordNub = S.toList . S.fromList
-- | variables in a constraint
varsIn :: UConstraint -> [Var]
varsIn (ULT a b) = [ Var v | UVar v <- [a,b] ]
varsIn (ULE a b) = [ Var v | UVar v <- [a,b] ]
|
osa1/Idris-dev
|
src/Idris/Core/Constraints.hs
|
Haskell
|
bsd-3-clause
| 9,942
|
module Rhodium.Context where
import Control.Exception.Base (assert)
type Label = String
data UpTerm var
= UpVar var
| UpPred Label [DnTerm var]
-- ^ variables and atoms
| UpPi (UpTerm var) (UpTerm var)
-- ^ dependant product
| UpSigma (UpTerm var) (UpTerm var)
-- ^ dependant sum
| UpWType (UpTerm var) (UpTerm var)
-- ^ W-types
| UpType
-- ^ the type of Types
deriving (Eq,Show)
data DnTerm var
= DnVar var
| DnPred Label [DnTerm var]
-- ^ variables and atoms
| DnType (UpTerm var)
-- ^ type reflected as a term
| DnLambda (DnTerm var)
| DnApp (DnTerm var) (DnTerm var)
-- ^ dependant product
| DnPair (DnTerm var) (DnTerm var)
| DnSplit (DnTerm var)
-- ^ dependant sum
| DnSup (DnTerm var) (DnTerm var)
| DnWRec (UpTerm var) (DnTerm var)
deriving (Eq,Show)
liftTerm :: DnTerm var -> UpTerm var
liftTerm (DnVar n) = UpVar n
liftTerm (DnPred s ts) = UpPred s ts
liftTerm (DnType typ) = typ
-- other terms are not liftable
-- | An Object of the Contextual Category: a context in type theory.
-- A context is a list of Terms of type Type, with variables
-- represented by deBruijn indices indicating an *offset* in the list, relative
-- to the current term.
data ObC = ObC [UpTerm Int] deriving (Eq)
-- | A Morphism of the Contextual Category: a term in type theory.
data HomC = HomC {
source :: [UpTerm Int],
target :: [UpTerm Int],
morph :: [DnTerm Int]
}
deriving (Eq)
-- | A Pair of *composable* morphisms of the Contextual Category.
-- Such a constructed pair should always respect the condition
-- source lpart == target rpart
data Hom2C = Hom2C {
lpart :: HomC,
rpart :: HomC
}
deriving (Eq)
(<.>) :: HomC -> HomC -> HomC
g <.> f = comp $ Hom2C g f
infixr 9 <.>
-----
-- Structural rules
-----
-- | Identity morphism.
unit :: ObC -> HomC
unit (ObC obs) = HomC {
source = obs,
target = obs,
morph = zipWith (\_ i -> DnVar i) obs (iterate (+ 1) 0)
}
-- | Composition of morphisms.
comp :: Hom2C -> HomC
comp (Hom2C g f) =
-- pre-condition
assert (target f == source g) $
-- code
HomC {
source = source f,
target = target g,
morph = map (substDn $ morph f) (morph g)
}
---- Dependent projection
ft :: ObC -> ObC
ft (ObC ob) = ObC $ tail ob
-- | Build a canonical projection morphism out of this object.
-- ## TODO: proj has a special case on 'DnApp'
proj :: ObC -> HomC
proj (ObC obs) =
-- pre-condition
assert (not $ null obs) $
-- code
HomC {
source = obs,
target = tail obs,
morph = zipWith (\_ i -> DnVar i) (tail obs) (iterate (+ 1) 1)
}
-- | True if the given morphism is a section of the canonical projection.
isSection :: HomC -> Bool
isSection f =
source f == tail (target f) &&
tail (morph f) == morph (unit (ObC (source f)))
-- | Pullback the canonical projection 'proj' from object 'x' along 'f'
pullback :: HomC -> ObC -> ObC
pullback f (ObC obs) =
-- pre-condition
assert (tail obs == target f) $
-- code
ObC $ (substUp (morph f) (head obs)) : (source f)
q :: HomC -> ObC -> HomC
q f ob@(ObC obs) =
assert (tail obs == target f) $
let ObC fstar = pullback f ob
in HomC {
source = fstar,
target = obs,
morph = (DnVar 0) : (offset 1 $ morph f)
}
-- helpers
substUp :: [DnTerm Int] -> UpTerm Int -> UpTerm Int
substUp s (UpVar i) =
liftTerm $ s !! i
-- ^ ## WRONG: 'DnApp' counts for two objects
substUp s (UpPred p vs) = UpPred p (map (substUp s) vs)
substUp s (UpPi a b) =
substDn :: [DnTerm Int] -> DnTerm Int -> DnTerm Int
substDn s (DnVar i) =
s !! i
-- ## WRONG: 'DnApp' counts for two objects
substDn s (DnPred p vs) =
DnPred p (map (substDn s) vs)
substDn s (DnLambda f) =
DnLambda (substDn s f) -- ## WRONG !!!
incr :: [DnTerm Int] -> [DnTerm Int]
incr = offset 1
offset :: Int -> [DnTerm Int] -> [DnTerm Int]
offset n = map $ fmap (+ n)
{-# INLINE[2] offset #-}
{-# RULES
"offset/offset" forall n m ts. offset n (offset m ts) = offset (n + m) ts
#-}
-----
-- Dependent Functions
-----
-- ∏-FORM
pi :: ObC -> ObC
pi (ObC (b:a:os)) = ObC $ (UpPi a b) : os
-- ∏-INTRO
-- b : [G,A] -> [G,A,B]
-- lambda b : [G] -> [G,Pi(A,B)]
lambda :: HomC -> HomC
lambda b =
let upB:upA:gamma = target b
f:_:bs = morph b
in HomC {
source = tail (source b),
target = (UpPi upA upB) : gamma,
morph = (DnLambda f) : bs
}
-- ∏-ELIM
app :: HomC -> HomC
app g =
-- pre-condition: a == upA
let a:(UpPi upA upB):gamma = target g
x:f:morphs = morph g
in HomC {
source = source g,
target = upB : upA : gamma,
morph = (DnApp f x) : morphs
}
-- k : [Г] -> [Г,∏(A,B)] , c : [Г] -> [Г,A]
app2 :: HomC -> HomC -> HomC
app2 k a =
-- pre-conditions
assert (source k == source a) $
assert (let (UpPi _ _):g = target k in source k == g) $
assert (let _:g = target a in source a == g) $
assert (let (UpPi upA _):_ = target k
upA':_ = target a
in upA == upA') $
-- code
let (UpPi upA upB):gamma = target k
f:_ = morph k
x:morphs = morph a
in HomC {
source = gamma,
target = upB : upA : gamma,
morph = (DnApp f x) : morphs
}
-----
-- W-Types
-----
-- W-FORM
w :: ObC -> ObC
w (ObC (b:a:os)) = ObC $ (UpWType a b) : os
-- W-INTRO
-- for each object [Г,A,B]
-- a map [Г,A,∏(B,p^*_B p^*_A W(A,B))] -> [Г,W(A,B)]
sup :: ObC -> HomC
sup (ObC (b:a:gamma)) =
let ppw = UpWType a b -- ## TODO: should be a pullback of p^*_A, P^*_B
in HomC {
source = (UpPi b ppw):a:gamma,
target = (UpWType a b):gamma,
morph = (DnSup (DnVar 1) (DnVar 0)):(offset 2 $ morph $ unit (ObC gamma))
}
-- W-ELIM
-- for each map 'd', a map 'wrec' such that
-- wrec . sup = d . λ(wrec . app(...))
wrec :: HomC -> HomC
wrec d =
let
in HomC {
source = tail $ target d,
target = target d,
morph = []
}
-----
-- Validation
-----
-- | Check that the given object is valid in the given environment containing
-- bindings for named values.
-- validOb :: Env -> ObC -> Bool
-- | Check that the given morphism is valid in the given environment containing
-- bindings for named values.
-- validHom :: Env -> HomC -> Bool
-----
-- Instances
-----
instance Functor DnTerm where
fmap f (DnVar x) = DnVar (f x)
fmap f (DnPred a xs) = DnPred a (map (fmap f) xs)
fmap f (DnLambda k) = DnLambda (fmap f k)
fmap f (DnPair a b) = DnPair (fmap f a) (fmap f b)
instance Show ObC where
show (ObC []) = "[obQ||]"
show (ObC (o:[])) = "[obQ|" ++ showUpTerm o ++ "|]"
show (ObC (o:os)) = "[obQ|" ++ showListWith showUpTerm os ++ ", " ++ showUpTerm o ++ "|]"
instance Show HomC where
show f =
"[homQ|" ++ showListWith showUpTerm (source f) ++
" :- " ++ showJudgList (zip (morph f) (target f)) ++
"|]"
showUpTerm :: Show var => UpTerm var -> String
showUpTerm (UpVar v) = '$' : (show v)
showUpTerm (UpPred a vs) = case vs of
[] -> a
vs -> a ++ "(" ++ showListWith showUpTerm vs ++ ")"
showUpTerm (UpPi a b) =
"∏[" ++ showUpTerm a ++ "]" ++ showUpTerm b
showUpTerm (UpSigma a b) =
"∑[" ++ showUpTerm a ++ "]" ++ showUpTerm b
showDnTerm :: Show var => DnTerm var -> String
showDnTerm (DnVar v) = '$' : (show v)
showDnTerm (DnPred a vs) = case vs of
[] -> a
vs -> a ++ "(" ++ showListWith showDnTerm vs ++ ")"
showDnTerm (DnLambda k) =
"λ." ++ showDnTerm k
showDnTerm (DnPair a b) =
"(" ++ showDnTerm a ++ "," ++ showDnTerm b ++ ")"
showListWith :: (v -> String) -> [v] -> String
showListWith s [] = ""
showListWith s (v:[]) = s v
showListWith s (v:vs) = showListWith s vs ++ ", " ++ s v
showJudgList :: (Show var) => [(DnTerm var,UpTerm var)] -> String
showJudgList [] = ""
showJudgList ((trm,typ):[]) = showDnTerm trm ++ ":" ++ showUpTerm typ
showJudgList ((trm,typ):js) = showJudgList js ++ ", " ++ showDnTerm trm ++ ":" ++ showUpTerm typ
|
DrNico/rhodium
|
tools/rhc-strap/Rhodium/Context.hs
|
Haskell
|
mit
| 8,149
|
{-
{-
Nested comment
-}
-- Note: still commented
fibs :: [Int]
fibs = 1 : 1 : zipWith (+) fibs (tail fibs)
-}
main :: IO ()
main = print [1..]
|
cgag/loc
|
tests/data/nested-comments.hs
|
Haskell
|
mit
| 147
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TemplateHaskell #-}
module Ringo.ArgParser (ProgArgs(..), parseArgs) where
import qualified Data.Text as Text
import qualified Distribution.Package as P
import qualified Distribution.PackageDescription as P
import qualified Distribution.CurrentPackageDescription as P
import qualified Distribution.Text as DText
import Data.List (intercalate)
import Options.Applicative
import Ringo.Types
data ProgArgs = ProgArgs
{ progSettings :: Settings
, progInputFile :: FilePath
, progOutputDir :: FilePath
} deriving (Eq, Show)
settingsParser :: Parser Settings
settingsParser = let Settings {..} = defSettings
in Settings
<$> (Text.pack <$> strOption (long "dim-prefix"
<> short 'd'
<> value (Text.unpack settingDimPrefix)
<> showDefault
<> help "Prefix for dimension tables"))
<*> (Text.pack <$> strOption (long "fact-prefix"
<> short 'f'
<> value (Text.unpack settingFactPrefix)
<> showDefault
<> help "Prefix for fact tables"))
<*> option auto (let timeunits = map show [Second ..]
in long "timeunit"
<> short 't'
<> value settingTimeUnit
<> showDefault
<> completeWith timeunits
<> help ("Time unit granularity for fact tables. Possible values: "
++ intercalate ", " timeunits))
<*> minorOption "avg-count-col-suffix"
settingAvgCountColumSuffix
"Suffix for average count columns"
<*> minorOption "avg-sum-col-suffix"
settingAvgSumColumnSuffix
"Suffix for average sum columns"
<*> minorOption "dim-id-col-name"
settingDimTableIdColumnName
"Name of dimension table id columns"
<*> minorOption "dim-id-col-type"
settingDimTableIdColumnType
"Type of dimension table id columns"
<*> minorOption "fact-count-col-type"
settingFactCountColumnType
"Type of fact table count columns"
<*> option auto (long "fact-count-distinct-error-rate"
<> hidden
<> value settingFactCountDistinctErrorRate
<> showDefault
<> help "Error rate for count distinct calulations")
<*> minorOption "fact-infix"
settingFactInfix
"Infix for fact tables"
<*> minorOption "dependencies-json-file"
settingDependenciesJSONFileName
"Name of the output dependencies json file"
<*> minorOption "facts-json-file"
settingFactsJSONFileName
"Name of the output facts json file"
<*> minorOption "dimensions-json-file"
settingDimensionJSONFileName
"Name of the output dimensions json file"
<*> option auto (long "foreign-key-id-coalesce-val"
<> hidden
<> value settingForeignKeyIdCoalesceValue
<> showDefault
<> help "Value to coalease missing foriegn key ids to, in fact tables")
<*> minorOption "tablename-suffix-template"
settingTableNameSuffixTemplate
"Suffix template for table names in SQL"
where
minorOption longDesc defValue helpTxt =
Text.pack <$> strOption (long longDesc
<> hidden
<> value (Text.unpack defValue)
<> showDefault
<> help helpTxt)
progArgsParser :: Parser ProgArgs
progArgsParser =
ProgArgs
<$> settingsParser
<*> argument str (metavar "INPUT"
<> action "file"
<> help "Input file")
<*> argument str (metavar "OUTPUT"
<> action "directory"
<> help "Output directory")
progName :: String
progName = $(P.getField (DText.display . P.pkgName . P.package))
versionParser :: Parser (a -> a)
versionParser = infoOption (progName ++ " " ++ version)
(long "version"
<> help "Print version information")
where
version = $(P.getField (DText.display . P.pkgVersion . P.package))
parseArgs :: IO ProgArgs
parseArgs = execParser $
info (helper <*> versionParser <*> progArgsParser)
(fullDesc
<> progDesc $(P.getField P.description)
<> header (progName ++ " - " ++ $(P.getField P.synopsis))
<> footer ("© " ++ $(P.getField P.copyright) ++ ". " ++ $(P.getField P.homepage)))
|
quintype/ringo
|
app/Ringo/ArgParser.hs
|
Haskell
|
mit
| 5,122
|
{-# LANGUAGE PatternSynonyms #-}
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
{-# OPTIONS_GHC -fno-warn-unused-imports #-}
module JSDOM.Generated.MediaKeySystemAccess
(getConfiguration, getConfiguration_, createMediaKeys,
createMediaKeys_, getKeySystem, MediaKeySystemAccess(..),
gTypeMediaKeySystemAccess)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.getConfiguration Mozilla MediaKeySystemAccess.getConfiguration documentation>
getConfiguration ::
(MonadDOM m) =>
MediaKeySystemAccess -> m MediaKeySystemConfiguration
getConfiguration self
= liftDOM
((self ^. jsf "getConfiguration" ()) >>= fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.getConfiguration Mozilla MediaKeySystemAccess.getConfiguration documentation>
getConfiguration_ :: (MonadDOM m) => MediaKeySystemAccess -> m ()
getConfiguration_ self
= liftDOM (void (self ^. jsf "getConfiguration" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.createMediaKeys Mozilla MediaKeySystemAccess.createMediaKeys documentation>
createMediaKeys ::
(MonadDOM m) => MediaKeySystemAccess -> m MediaKeys
createMediaKeys self
= liftDOM
(((self ^. jsf "createMediaKeys" ()) >>= readPromise) >>=
fromJSValUnchecked)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.createMediaKeys Mozilla MediaKeySystemAccess.createMediaKeys documentation>
createMediaKeys_ :: (MonadDOM m) => MediaKeySystemAccess -> m ()
createMediaKeys_ self
= liftDOM (void (self ^. jsf "createMediaKeys" ()))
-- | <https://developer.mozilla.org/en-US/docs/Web/API/MediaKeySystemAccess.keySystem Mozilla MediaKeySystemAccess.keySystem documentation>
getKeySystem ::
(MonadDOM m, FromJSString result) =>
MediaKeySystemAccess -> m result
getKeySystem self
= liftDOM ((self ^. js "keySystem") >>= fromJSValUnchecked)
|
ghcjs/jsaddle-dom
|
src/JSDOM/Generated/MediaKeySystemAccess.hs
|
Haskell
|
mit
| 2,815
|
{-# LANGUAGE CPP #-}
module GHCJS.DOM.RTCDataChannel (
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
module GHCJS.DOM.JSFFI.Generated.RTCDataChannel
#else
#endif
) where
#if (defined(ghcjs_HOST_OS) && defined(USE_JAVASCRIPTFFI)) || !defined(USE_WEBKIT)
import GHCJS.DOM.JSFFI.Generated.RTCDataChannel
#else
#endif
|
plow-technologies/ghcjs-dom
|
src/GHCJS/DOM/RTCDataChannel.hs
|
Haskell
|
mit
| 355
|
module Str where
import qualified Data.Text as DT
-- type Str = BS.ByteString
type Str = DT.Text
|
dancor/melang
|
src/Str.hs
|
Haskell
|
mit
| 99
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE MultiWayIf #-}
module RWPAS.Level.Type
( Level()
-- * Level construction
, generateLevel
, generateLevelM
, emptyLevel
, roomLevel
, portalOnRightSideLevel
, addPortal
, terrainFeature
, TerrainFeature(..)
, levelName
, levelSize
, impassable
-- * Items
, itemByCoordinates
-- * Decorations
, Decoration(..)
, decorationByCoordinate
-- * Actor handling
--
-- Some of these functions are in RWPAS.World instead that's a bit higher
-- level than these.
, eachActor
, getMemoryAt
, insertActor
, tryMoveActor
, removeActor
, actorById
, actorByCoordinates
, updateActorMemories
-- * Types, coordinates, sizes
, LevelCoordinates
, Size
, LevelID
, diagonalDistance
-- * Decorations
, removeDecorations
-- * Computing field of view
, levelFieldOfView
-- * Stepping
, step
, StepResult(..) )
where
import Control.Lens hiding ( Level )
import Control.Monad.State.Strict
import Data.Data
import Data.Foldable
import Data.IntMap ( IntMap )
import qualified Data.IntMap as IM
import Data.IntSet ( IntSet )
import qualified Data.IntSet as IS
import Data.Map.Strict ( Map )
import qualified Data.Map.Strict as M
import Data.Maybe
import Data.SafeCopy
import Data.Text ( Text )
import GHC.Generics
import Linear.V2
import RWPAS.Actor
import RWPAS.Direction
import RWPAS.FieldOfView
import RWPAS.Item
import RWPAS.SafeCopyOrphanInstances()
import RWPAS.TwoDimensionalVector
import RWPAS.WorldCoordinates
data Decoration
= Spikes !Direction8
| BloodySpikes !Direction8
| NotDecorated
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic )
instance Enum Decoration where
toEnum x | x >= 1 && x <= 8 =
let dir = toEnum (x-1) :: Direction8
in Spikes dir
toEnum x | x >= 9 && x <= 16 =
let dir = toEnum (x-9) :: Direction8
in BloodySpikes dir
toEnum 0 = NotDecorated
toEnum _ = error "toEnum (Decoration): invalid value"
{-# INLINE toEnum #-}
fromEnum (Spikes dir) = fromEnum dir + 1
fromEnum (BloodySpikes dir) = fromEnum dir + 9
fromEnum NotDecorated = 0
{-# INLINE fromEnum #-}
data Level = Level
{ _terrain :: !Vector2D
, _decorations :: !(Map LevelCoordinates Decoration)
, _items :: !(Map LevelCoordinates Item)
, _portals :: !(IntMap Portal)
, _portalKeys :: !(Map LevelCoordinates IntSet)
, _actorKeys :: !(Map LevelCoordinates ActorID)
, _actors :: !(IntMap Actor)
, _actorMemories :: !(Map ActorID (Map LevelCoordinates TerrainFeature))
, _levelName :: !Text }
deriving ( Eq, Ord, Show, Typeable, Generic )
-- | Coordinates relative to some `Level`.
type LevelCoordinates = V2 Int
type LevelID = Int
data Portal = Portal
{ _axis :: !Direction4
, _targetLevel :: !LevelID
, _targetLevelAxisTopPosition :: !Int
, _targetLevelAxisPosition :: !Int
, _portalLength :: !Int
, _axisTopPosition :: !Int
, _axisPosition :: !Int }
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic )
-- | Describes the size of something.
type Size = V2 Int
data TerrainFeature
= Floor
| Wall
| Planks
| PlanksFloor
| Tree1
| Tree2
| Dirt
| Grass
| Rock -- ^ Same as `Wall` but completely black.
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic, Enum )
type PortalID = Int
-- Derive lenses here
makeLenses ''Level
makeLenses ''Portal
deriveSafeCopy 0 'base ''Level
deriveSafeCopy 0 'base ''Portal
deriveSafeCopy 0 'base ''TerrainFeature
deriveSafeCopy 0 'base ''Decoration
-- | If there's no feature at some coordinate, what we should assume it is?
defaultTerrainFeature :: TerrainFeature
defaultTerrainFeature = Rock
getMemoryAt :: ActorID -> Level -> LevelCoordinates -> Maybe TerrainFeature
getMemoryAt aid level coords = do
memory <- level^.actorMemories.at aid
memory^.at coords
{-
A portal example:
Portal (axis = DRight, portalLength = 3, axisPosition = 3, axisTopPosition = 2)
x is marked to portal map, y is marked if axis = DLeft. The portal is the line
between xs and ys.
1234
.... 1
.xy. 2
.xy. 3
.xy. 4
.... 5
-}
addPortal :: Portal -> PortalID -> Level -> Level
addPortal portal portal_id = execState $ do
case portal^.axis of
DRight -> for_ [0..portal^.portalLength-1] $ \offset ->
set_key (portal^.axisPosition-1) (offset + portal^.axisTopPosition)
DLeft -> for_ [0..portal^.portalLength-1] $ \offset ->
set_key (portal^.axisPosition) (offset + portal^.axisTopPosition)
DDown -> for_ [0..portal^.portalLength-1] $ \offset ->
set_key (offset + portal^.axisTopPosition) (portal^.axisPosition-1)
DUp -> for_ [0..portal^.portalLength-1] $ \offset ->
set_key (offset + portal^.axisTopPosition) (portal^.axisPosition)
portals.at portal_id .= Just portal
where
set_key x y =
let pos = V2 x y
in portalKeys.at pos %= Just . \case
Nothing -> IS.singleton portal_id
Just set -> IS.insert portal_id set
decorationByCoordinate :: LevelCoordinates -> Lens' Level Decoration
decorationByCoordinate coords = lens get_it set_it
where
get_it lvl = fromMaybe NotDecorated (lvl^.decorations.at coords)
set_it lvl NotDecorated = lvl & decorations.at coords .~ Nothing
set_it lvl x = lvl & decorations.at coords .~ Just x
{-# INLINE decorationByCoordinate #-}
-- | Generate a level with a generator function.
generateLevel :: Text -> Int -> Int -> (Int -> Int -> TerrainFeature) -> Level
generateLevel name w h generator = (emptyLevel name)
{ _terrain = generate w h $ \x y -> fromIntegral $ fromEnum $ generator x y }
generateLevelM :: Monad m
=> Text
-> Int
-> Int
-> (Int -> Int -> m TerrainFeature)
-> m Level
generateLevelM name w h generator = do
generated <- generateM w h $ \x y -> fromIntegral . fromEnum <$> generator x y
return (emptyLevel name) { _terrain = generated }
-- | A completely empty level.
emptyLevel :: Text -> Level
emptyLevel name = Level { _terrain = generate 1 1 $ \_ _ -> fromIntegral $ fromEnum defaultTerrainFeature
, _actors = mempty
, _actorMemories = mempty
, _actorKeys = mempty
, _items = mempty
, _portals = mempty
, _portalKeys = mempty
, _decorations = mempty
, _levelName = name }
updateActorMemories :: ActorID -> M.Map LevelCoordinates TerrainFeature -> Level -> Level
updateActorMemories aid memories levels =
case levels^.actorMemories.at aid of
Nothing -> levels & actorMemories.at aid .~ Just memories
Just m -> levels & actorMemories.at aid .~ Just (M.union memories m)
-- | Same as `roomLevel` but adds a portal to the right side of the room.
--
-- The portal leads to the left side of the room. Pass the same `LevelID` as
-- the level itself.
portalOnRightSideLevel :: Size -> PortalID -> PortalID -> LevelID -> Level
portalOnRightSideLevel sz@(V2 w h) pid pid2 lid =
let initial_level = roomLevel sz
in addPortal Portal { _axis = DLeft
, _targetLevel = lid
, _targetLevelAxisTopPosition = 1
, _targetLevelAxisPosition = w-1
, _portalLength = h-1
, _axisTopPosition = 1
, _axisPosition = 1 }
pid2 $
addPortal Portal { _axis = DRight
, _targetLevel = lid
, _targetLevelAxisTopPosition = 1
, _targetLevelAxisPosition = 1
, _portalLength = h-1
, _axisTopPosition = 1
, _axisPosition = w-1 }
pid
initial_level
-- | A level that just has a single rectangular room. The walkable area is
-- sized according to the given coordinates, with (1, 1) being the top-left
-- corner of the room and (0, 0) is top-left wall.
roomLevel :: Size -> Level
roomLevel (V2 w h) = Level { _terrain = makeOneRoom w h
, _actors = mempty
, _actorKeys = mempty
, _actorMemories = mempty
, _items = mempty
, _portals = mempty
, _portalKeys = mempty
, _decorations = mempty
, _levelName = "Rectangular Room" }
where
makeOneRoom w h = generate (w+1) (h+1) $ \x y ->
if | x == 0 || y == 0 || x == w || y == h
-> fromIntegral $ fromEnum Wall
| x == w `div` 2 && y > 5 && y < h-6
-> fromIntegral $ fromEnum Wall
| otherwise -> fromIntegral $ fromEnum Floor
-- | Lens to a terrain feature at some location.
terrainFeature :: LevelCoordinates -> Level -> TerrainFeature
terrainFeature coords level =
toEnum $ fromIntegral $ getAt coords (level^.terrain) (fromIntegral $ fromEnum Rock)
{-# INLINE terrainFeature #-}
impassable :: TerrainFeature -> Bool
impassable Floor = False
impassable Dirt = False
impassable Grass = False
impassable PlanksFloor = False
impassable _ = True
seeThrough :: TerrainFeature -> Int
seeThrough Floor = 0
seeThrough Dirt = 0
seeThrough Grass = 0
seeThrough PlanksFloor = 0
seeThrough Tree1 = 1
seeThrough Tree2 = 1
seeThrough _ = 10000
tryMoveActor :: ActorID -> Direction8 -> LevelID -> Level -> (LevelID -> Maybe Level) -> Maybe (Level, Maybe (LevelID, Level))
tryMoveActor aid dir source_level_id level get_level = do
actor <- IM.lookup aid (level^.actors)
let actor_pos = actor^.position
case step dir actor_pos level of
SameLevel new_actor_pos ->
if impassable (terrainFeature new_actor_pos level) ||
isJust (actorByCoordinates new_actor_pos level)
then Nothing
else Just (level &
(actorKeys.at new_actor_pos .~ Just aid) .
(actors.at aid .~ Just (actor & position .~ new_actor_pos)) .
(actorKeys.at actor_pos .~ Nothing), Nothing)
EnterLevel (WorldCoordinates new_actor_pos new_level_id) ->
-- TODO: check any complications if new level is the same as old one
-- (that is, portal goes to level itself)
--
-- Right now it should be safe because RWPAS.Control sets the latter
-- level last, overwriting the operation of removing the actor from the
-- level.
case get_level new_level_id of
Nothing -> Nothing
Just new_level -> if impassable (terrainFeature new_actor_pos new_level) ||
isJust (actorByCoordinates new_actor_pos new_level)
then Nothing
else Just (level & (actors.at aid .~ Nothing) .
(actorKeys.at actor_pos .~ Nothing)
,Just (new_level_id
,new_level &
(actors.at aid .~ Just (actor & position .~ new_actor_pos)) .
(actorKeys.at new_actor_pos .~ Just aid) .
(if source_level_id == new_level_id
then actorKeys.at actor_pos .~ Nothing
else id)))
data StepResult
= SameLevel !LevelCoordinates
| EnterLevel !WorldCoordinates
deriving ( Eq, Ord, Show, Read, Typeable, Data, Generic )
couldPotentiallyGoThroughPortal :: Direction8 -> Direction4 -> Bool
couldPotentiallyGoThroughPortal D8Up DUp = True
couldPotentiallyGoThroughPortal D8UpLeft DUp = True
couldPotentiallyGoThroughPortal D8UpRight DUp = True
couldPotentiallyGoThroughPortal _ DUp = False
couldPotentiallyGoThroughPortal D8Down DDown = True
couldPotentiallyGoThroughPortal D8DownLeft DDown = True
couldPotentiallyGoThroughPortal D8DownRight DDown = True
couldPotentiallyGoThroughPortal _ DDown = False
couldPotentiallyGoThroughPortal D8Left DLeft = True
couldPotentiallyGoThroughPortal D8DownLeft DLeft = True
couldPotentiallyGoThroughPortal D8UpLeft DLeft = True
couldPotentiallyGoThroughPortal _ DLeft = False
couldPotentiallyGoThroughPortal D8Right DRight = True
couldPotentiallyGoThroughPortal D8DownRight DRight = True
couldPotentiallyGoThroughPortal D8UpRight DRight = True
couldPotentiallyGoThroughPortal _ DRight = False
swapV2 :: V2 a -> V2 a
swapV2 (V2 x y) = V2 y x
{-# INLINE swapV2 #-}
-- | Steps one unit to some direction in a level. Returns a `StepResult` that
-- tells if the step moved through a portal or stayed on the same level.
step :: Direction8 -> LevelCoordinates -> Level -> StepResult
step dir coords@(V2 x y) level =
case level^.portalKeys.at coords of
Nothing -> SameLevel local_target
Just set | IS.null set -> SameLevel local_target
Just set ->
case findInSet doesItGoThrough set of
Nothing -> SameLevel local_target
Just portal ->
let initial_position_on_the_other_side =
V2 (negate (portal^.axisPosition) + portal^.targetLevelAxisPosition)
(negate (portal^.axisTopPosition) + portal^.targetLevelAxisTopPosition)
final_position_on_the_other_side =
initial_position_on_the_other_side +
direction8ToDelta dir
fixed_position_on_the_other_side = case portal^.axis of
DLeft -> final_position_on_the_other_side + V2 x y
DRight -> final_position_on_the_other_side + V2 x y
DUp -> swapV2 final_position_on_the_other_side + V2 y x
DDown -> swapV2 final_position_on_the_other_side + V2 y x
in EnterLevel (WorldCoordinates fixed_position_on_the_other_side (portal^.targetLevel))
where
local_target = direction8ToDelta dir + coords
findInSet fun set =
let lst = IS.toList set
in case find (\portal_id ->
case level^.portals.at portal_id of
Nothing -> False
Just ok -> fun ok) lst of
Nothing -> Nothing
Just pid -> level^.portals.at pid
doesItGoThrough portal = couldPotentiallyGoThroughPortal dir (portal^.axis)
data AugmentedCoords = AugmentedCoords !LevelCoordinates !(V2 Int)
levelFieldOfView :: Monad m
=> Int
-> Int
-> LevelCoordinates
-> Level
-> LevelID
-> (LevelID -> Maybe Level)
-> (LevelCoordinates -> V2 Int -> Level -> LevelID -> m ())
-> m ()
levelFieldOfView x_extent y_extent coords level level_id get_level i_see =
void $ flip execStateT (level, level_id) $
computeFieldOfView
(\(AugmentedCoords coords offset_coords) -> do
(lvl, lvl_id) <- get
lift $ i_see coords offset_coords lvl lvl_id)
(\(AugmentedCoords coords _) -> do
(lvl, _) <- get
return $ seeThrough (terrainFeature coords lvl))
ByDirection
{ _leftD = goThrough D8Left
, _rightD = goThrough D8Right
, _upD = goThrough D8Up
, _downD = goThrough D8Down
, _leftupD = goThrough D8UpLeft
, _leftdownD = goThrough D8DownLeft
, _uprightD = goThrough D8UpRight
, _downrightD = goThrough D8DownRight }
(AugmentedCoords coords coords)
2
x_extent
y_extent
where
goThrough dir8 (AugmentedCoords coords offset_coords) = do
result <- goThrough' dir8 coords
case result of
Nothing -> return Nothing
Just ok ->
let new_offset_coords = offset_coords + direction8ToDelta dir8
in return $ Just $ AugmentedCoords ok new_offset_coords
goThrough' dir8 coords = do
(lvl, _) <- get
case step dir8 coords lvl of
SameLevel new_coords -> return $ Just new_coords
EnterLevel (WorldCoordinates new_coords new_level_id) ->
case get_level new_level_id of
Nothing -> return Nothing
Just new_level -> do
put (new_level, new_level_id)
return $ Just new_coords
{-# INLINE levelFieldOfView #-}
-- | Removes all decorations from a level.
removeDecorations :: Level -> Level
removeDecorations lvl = lvl & decorations .~ mempty
-- | Lens to an actor using some actor ID.
actorById :: ActorID -> Lens' Level (Maybe Actor)
actorById aid = actors.at aid
actorByCoordinates :: LevelCoordinates -> Level -> Maybe ActorID
actorByCoordinates coords level = level^.actorKeys.at coords
levelSize :: Level -> V2 Int
levelSize lvl = V2 (viewWidth (lvl^.terrain)) (viewHeight (lvl^.terrain))
{-# INLINE levelSize #-}
-- | Returns the diagonal distance between two coordinates.
diagonalDistance :: V2 Int -> V2 Int -> Int
diagonalDistance (V2 x1 y1) (V2 x2 y2) =
max (abs $ x1-x2) (abs $ y1-y2)
{-# INLINE diagonalDistance #-}
-- | Inserts an actor somewhere on the level.
--
-- Actor already at the target position is overwritten, if there was anything
-- there.
insertActor :: ActorID -> Actor -> Level -> Level
insertActor aid actor =
(actors.at aid .~ Just actor) .
(actorKeys.at (actor^.position) .~ Just aid)
-- | Removes an actor from the level.
--
-- Does nothing if the actor is not in the level.
removeActor :: ActorID -> Level -> Level
removeActor aid level =
case level^.actors.at aid of
Nothing -> level
Just actor ->
level & (actors.at aid .~ Nothing) .
(actorKeys.at (actor^.position) .~ Nothing)
-- | A fold all actors in a level.
eachActor :: IndexedFold ActorID Level Actor
eachActor = actors.ifolded
itemByCoordinates :: LevelCoordinates -> Lens' Level (Maybe Item)
itemByCoordinates x = items.at x
|
Noeda/rwpas
|
src/RWPAS/Level/Type.hs
|
Haskell
|
mit
| 18,371
|
{-# LANGUAGE FlexibleContexts #-}
module Ch27.SyslogUDPClient
where
import Data.Bits
import Network.Socket hiding (sendTo)
import Network.Socket.ByteString (sendTo)
import Ch27.SyslogTypes
import qualified Data.ByteString.Char8 as Strict
import qualified Data.ByteString.Lazy.Char8 as Lazy (toStrict)
import Data.ByteString.Builder (Builder, toLazyByteString, stringUtf8, charUtf8, byteString)
import Data.Monoid ((<>))
import Control.Monad.Except (MonadError, throwError, catchError, runExceptT)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Exception -- (IOException)
import Data.Typeable (Typeable)
-- import Control.Monad.Catch
-- import Control.Monad (join)
data SyslogHandle =
SyslogHandle { slSocket :: Socket,
slProgram :: Strict.ByteString,
slAddress :: SockAddr
}
-- port number or name
type Port =
Strict.ByteString
type ProgName =
Strict.ByteString
openlog :: MonadIO m => HostName -> Port -> ProgName -> m SyslogHandle
openlog hostname port progname = do
addrinfos <- liftIO $ getAddrInfo Nothing (Just hostname) (Just (Strict.unpack port))
let
serveraddr =
head addrinfos
sock <- liftIO $ socket (addrFamily serveraddr) Datagram defaultProtocol
return $ SyslogHandle sock progname (addrAddress serveraddr)
data SyslogException
= InvalidCode
| Generic String
deriving (Show, Typeable)
instance Exception SyslogException
-- Goal: catch both SyslogExceptions and IOExceptions
{-
1st try: use the MonadError type class.
Issue: the context can never be satisfied in the IO monad!
That's because there is a fundep in the MonadError class declaration:
class MonadError e m | m -> e
and the instance for IO is:
instance MonadError IOException IO
so the following won't compile:
foo :: SyslogHandle -> IO ()
foo h = syslog h fac prio "foo"
-}
syslog :: (MonadError SomeException m, MonadIO m)
=> SyslogHandle
-> Facility
-> Priority
-> Strict.ByteString
-> m ()
syslog syslogh fac pri msg =
case toSyslogCode fac pri of
Nothing ->
throwError (toException InvalidCode)
Just code -> do
eres <- runExceptT $ sendstr $ Lazy.toStrict (toLazyByteString sendmsgBuilder)
either (throwError . toException) return eres
where
sendmsgBuilder :: Builder
sendmsgBuilder =
charUtf8 '<' <> stringUtf8 (show code) <> charUtf8 '>'
<> byteString (slProgram syslogh) <> stringUtf8 ": " <> byteString msg
sendstr :: (MonadError IOException m, MonadIO m)
=> Strict.ByteString
-> m ()
sendstr omsg
| Strict.null omsg =
return ()
| otherwise = do
liftIO $ catchError
(() <$ sendTo (slSocket syslogh) omsg (slAddress syslogh))
throwError
{-
2hd try: do without the MonadError type class and manually handle failure
Issue: it's re-doing what MonadError already does under the hood
-}
syslog' :: (MonadIO m)
=> Facility
-> Priority
-> Strict.ByteString
-> SyslogHandle
-> m (Either SomeException ())
syslog' fac pri msg h =
case toSyslogCode fac pri of
Nothing ->
return . Left . toException $ InvalidCode
Just code -> do
liftIO $ catch
(Right <$> sendstr (Lazy.toStrict (toLazyByteString sendmsgBuilder)))
(return . Left . toException :: MonadIO m
=> IOException
-> m (Either SomeException ()))
where
sendmsgBuilder :: Builder
sendmsgBuilder =
charUtf8 '<' <> stringUtf8 (show code) <> charUtf8 '>'
<> byteString (slProgram h) <> stringUtf8 ": " <> byteString msg
sendstr :: (MonadIO m) => Strict.ByteString -> m ()
sendstr omsg
| Strict.null omsg =
return ()
| otherwise = do
sent <- liftIO $ sendTo (slSocket h) omsg (slAddress h)
sendstr (Strict.drop sent omsg)
-- liftIO $ throwIO (userError "Boom") -- caught
-- liftIO $ throwIO InvalidCode -- uncaught and the type system doesn't help
closelog :: MonadIO m => SyslogHandle -> m ()
closelog syslogh =
liftIO $ close (slSocket syslogh)
toSyslogCode :: Facility -> Priority -> Maybe Int
toSyslogCode fac prio =
mkCode <$> codeOfFac fac <*> Just (fromEnum prio)
where
mkCode :: Int -> Int -> Int
mkCode facCode prioCode =
(facCode `shiftL` 3) .|. prioCode
oneshotlog :: HostName -> Port -> ProgName
-> Facility -> Priority -> Strict.ByteString
-> IO (Either SomeException ())
oneshotlog hn p pn fac prio msg =
bracket (openlog hn p pn) closelog (syslog' fac prio msg)
-- this doesn't compile: how can I solve it? Maybe ExceptT isn't the right tool for the job
-- bracket (openlog hn p pn) closelog (\h -> syslog h fac prio msg)
|
futtetennista/IntroductionToFunctionalProgramming
|
RWH/src/Ch27/SyslogUDPClient.hs
|
Haskell
|
mit
| 4,975
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE TypeFamilies #-}
module Apollo.Reflection
( Demote'
, Demote
, ReflectS(..)
, Proxiable(..)
, Proxy(..)
, KProxy(..)
) where
import Data.Proxy
-- | Type level function that selects a canonical type constructor for a given
-- kind. Generally, the selected type constructor consist of singletons, taking
-- each type in the kind @k@ to a distinct type.
type family Demote' (p :: KProxy k) :: k -> *
-- | The proxy argument can be inferred if a concrete type of kind @k@ is
-- available.
type Demote (a :: k) = Demote' ('KProxy :: KProxy k)
-- | Types in some kind @k@ that can be reflected into values.
class ReflectS (a :: k) where
reflectS :: Proxy a -> Demote a a
-- | Class of type constructors that can be demoted to proxies.
--
-- Useful for converting various things into proxies without needing to write
-- explicit type synonyms.
class Proxiable (s :: k -> *) where
proxy :: s a -> Proxy a
instance Proxiable [] where
proxy _ = Proxy
instance Proxiable Maybe where
proxy _ = Proxy
|
tsani/apollo
|
src/Apollo/Reflection.hs
|
Haskell
|
mit
| 1,071
|
module Util.GL where
import qualified Graphics.Rendering.OpenGL as GL
float2gl :: Float -> GL.GLfloat
float2gl = realToFrac :: Float -> GL.GLfloat
|
kaisellgren/ankka
|
src/Util/GL.hs
|
Haskell
|
mit
| 149
|
{-# LANGUAGE DeriveDataTypeable #-}
{-
Copyright (C) 2012-2017 Jimmy Liang, Michal Antkiewicz <http://gsd.uwaterloo.ca>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
-}
module Main where
import Language.Clafer.IG.ClaferIG
import Language.Clafer.IG.ClaferModel
import Language.Clafer.IG.CommandLine
import Language.Clafer.IG.Solution
import Language.Clafer.IG.Sugarer
import Language.Clafer.ClaferArgs
import Language.Clafer.JSONMetaData
import Language.ClaferT
import Language.Clafer.Common
import Control.Monad
import Control.Monad.IO.Class
import Data.Either
import qualified Data.Map as Map
import Data.List (partition)
import Data.Maybe
import Data.IORef
import Prelude hiding (all)
import System.Console.CmdArgs
import System.Directory
import System.FilePath
claferIGArgsDef :: IGArgs
claferIGArgsDef = IGArgs {
all = def &= opt "1" &= help "Saves all instances or a counterexample. Reads scopes from a `.cfr.scope` file or uses the provided global scope." &= typ "INTEGER",
saveDir = def &= help "Specify the directory for storing saved files." &= typ "FILE",
bitwidth = 4 &= help "Set the bitwidth for integers." &= typ "INTEGER", -- Default bitwidth is 4.
maxInt = 7 &= help "Set the bitwidth for integers based on the largest required number. Overrides --bitwidth argument." &= typ "INTEGER",
alloySolution = def &= help "Convert Alloy solution to a Clafer solution." &= typ "FILE",
claferModelFile = def &= argPos 0 &= typ "FILE",
useUids = False &= help "Use unique clafer names in the Clafer solution.",
addTypes = False &= help "Add colon/reference types to the Clafer solution.",
json = False &= help "Render solution as JSON (forces 'addUids').",
flatten_inheritance_comp = def &= help "Flatten inheritance during compiling ('alloy' mode only)" &= name "i",
no_layout_comp = def &= help "Don't resolve off-side rule layout during compiling" &= name "l",
check_duplicates_comp = def &= help "Check duplicated clafer names during compiling" &= name "c",
skip_resolver_comp = def &= help "Skip name resolution during compiling" &= name "f",
scope_strategy_comp = Simple &= help "Use scope computation strategy during compiling: none, simple (default), or full." &= name "ss"
} &= summary claferIGVersion &= program "claferig"
main :: IO ()
main =
do
args' <- cmdArgs claferIGArgsDef
let
bw = bitwidth args'
mi = maxInt args'
-- maxInt overrides the bitwidth setting
args'' = if (mi > allowedMaxInt bw)
then args' {bitwidth = requiredBitwidth mi}
else args'
if (not $ null $ alloySolution args'')
then do
_ <- runAlloySolution args''
return ()
else if (json args'')
then
tryClaferIG (args'' { useUids = True })
else
tryClaferIG args''
where
tryClaferIG args3 =
do
try <- runClaferIG args3
case try of
Right r -> return r
Left l -> do
mapM_ putStrLn $ printError l
putStrLn "Press enter to retry."
void getLine
tryClaferIG args3
runClaferIG :: IGArgs -> IO (Either ClaferErrs ())
runClaferIG args' =
runClaferIGT args' $ do
let claferModelFileName = claferModelFile args'
cModel <- liftIO $ strictReadFile claferModelFileName
if null cModel
then error "Cannot instantiate an empty model."
else liftIO $ putStrLn "Compiling the Clafer model..."
oldBw <- getBitwidth
env <- getClaferEnv
let ir = fst3 $ fromJust $ cIr env
scopes <- getScopes
setBitwidth $ findNecessaryBitwidth ir oldBw $ map snd scopes
solve
case all args' of
Just scope -> do
-- copied from CommandLine LoadScopes command
qNameMaps' <- getQNameMaps
maybeUidScopes <- liftIO $ readCfrScopeFile qNameMaps' claferModelFileName
case maybeUidScopes of
Nothing -> do
liftIO $ putStrLn "Using the provided global scope as a `.cfr-scope` file does not exist. Use the command `saveScopes` to create one."
setGlobalScope scope
Just uidScopes -> do
let
(globalScopes, normalScopes) = partition (\(uid, _) -> null uid) uidScopes
-- from the globalScopes, take the maximum
globalScopeVals = map snd globalScopes
globalScope = maximum globalScopeVals
-- add the "this/" prefix
normalScopesAlloy = map (\(uid, scope2) -> ("this/"++uid, scope2)) normalScopes
setGlobalScope globalScope
mapM_ (\(uid, val) -> setAlloyScope val uid) normalScopesAlloy
-- end copied
solve
counterRef <- liftIO $ newIORef 1
let saveDirectory = fromMaybe return $ underDirectory `liftM` saveDir args'
saveAll (savePath claferModelFileName counterRef >>= saveDirectory)
quit
Nothing -> do
liftIO $ putStrLn "Type 'h' for the list of available REPL commands\n"
runCommandLine
-- | Convert an Alloy XML file into an instance in Clafer
runAlloySolution :: IGArgs -> IO (Either ClaferErrs ())
runAlloySolution args' =
runClaferIGT args' $ do
let claferModelFileName = claferModelFile args'
cModel <- liftIO $ strictReadFile claferModelFileName
when (cModel == "") $ error $ "Cannot convert Alloy solution without the Clafer model from which the instance was created.\n"
++ "Usage: claferIG [OPTIONS] <model.cfr> --alloy-solution=<instance.xml>\n"
alloyInstance <- liftIO $ strictReadFile $ alloySolution args' -- It's an Alloy XML file in this case
when (null alloyInstance) $ error $ "Provide an Alloy solution Alloy file name.\n"
++ "Usage: claferIG [OPTIONS] <model.cfr> --alloy-solution=<instance.xml>\n"
env <- getClaferEnv
let (_, genv', _) = fromJust $ cIr env
let
sMap = Map.empty
uidIClaferMap' = uidClaferMap genv'
liftIO $ putStrLn $ show $ (sugarClaferModel (useUids args') (addTypes args') uidIClaferMap' $ buildClaferModel $ parseSolution alloyInstance) $ sMap
savePath :: FilePath -> IORef Int -> IO FilePath
savePath file' counterRef =
do
counter <- readIORef counterRef
writeIORef counterRef (counter + 1)
return $ file' ++ "." ++ (show counter) ++ ".data"
underDirectory :: FilePath -> FilePath -> IO FilePath
underDirectory dir file' =
do
createDirectoryIfMissing True dir
return $ joinPath [dir, file']
saveAll :: IO FilePath -> ClaferIGT IO ()
saveAll nextFile =
do
file' <- liftIO nextFile
liftIO $ createDirectoryIfMissing True $ takeDirectory file'
solution <- next
case solution of
Instance{modelInstance = modelInstance'} -> do
liftIO $ writeFile file' (show modelInstance')
saveAll nextFile
_ -> return ()
|
gsdlab/claferIG
|
src-cmd/Main.hs
|
Haskell
|
mit
| 8,717
|
module Y2016.M07.D21.Solution where
import Data.Graph
import Data.Tree (subForest, rootLabel)
import Data.Maybe (maybeToList, mapMaybe)
import Y2016.M07.D20.Solution
import Y2016.M07.D19.Exercise (figure2)
{--
you can get a Graph-Figure of figure from:
*Y2016.M07.D21.Solution> let (gr,fnPt,fnVertM) = graphit figure2 lineSegments
--}
pathing :: FigureC -> Char -> Char -> String
pathing fig@(gr,_,vf) start end = -- no path is "", so:
let dest = head (maybeToList (vf end))
branches = subForest (head (dfs gr (maybeToList (vf start))))
nextnodes = map rootLabel branches
in start : p' fig nextnodes dest
p' :: FigureC -> [Vertex] -> Vertex -> String
p' fig@(gr,toNodef,toVertf) roots dest =
let branch = head (filter (flip (path gr) dest) roots)
(_,label,branches) = toNodef branch
in label : if branch == dest then ""
else p' fig (mapMaybe toVertf branches) dest
{--
What is _a_ pathing from 'a' to 'c'?
*Y2016.M07.D21.Solution> let fig = graphit figure2 lineSegments
*Y2016.M07.D21.Solution> pathing fig 'a' 'c' ~> "abgc"
-- What is _a_ path from 'b' to 't'?
*Y2016.M07.D21.Solution> pathing fig 'b' 't' ~> "bgcdt"
Not the shortest path, but it is a path. However:
*Y2016.M07.D21.Solution> pathing fig 'b' 'h' ~>
"b*** Exception: Prelude.head: empty list
Shows us that the above does not treat all edges as bidirectional, as it says
the network does not support a b -> h path, when the figure shows there is such
a path. There are several ways to address this issue. We'll tackle this tomorrow.
--}
|
geophf/1HaskellADay
|
exercises/HAD/Y2016/M07/D21/Solution.hs
|
Haskell
|
mit
| 1,574
|
import Data.List.Split
main = getContents >>= putStr . main'
main' :: String -> String
main' cs = unlines $ map getShell (lines cs)
getShell :: String -> String
getShell ln = last $ splitOn ":" ln
|
ryuichiueda/UspMagazineHaskell
|
Study1_Q2/q1_2_1.hs
|
Haskell
|
mit
| 200
|
{- |
== Getting Started
To get started with golden testing and this library, see
<https://ro-che.info/articles/2017-12-04-golden-tests Introduction to golden testing>.
This module provides a simplified interface. If you want more, see
"Test.Tasty.Golden.Advanced".
== Filenames
Filenames are looked up in the usual way, Thus relative
names are relative to the processes current working directory.
It is common to run tests from the package's root directory (via @cabal
test@ or @cabal install --enable-tests@), so if your test files are under
the @tests\/@ subdirectory, your relative file names should start with
@tests\/@ (even if your @test.hs@ is itself under @tests\/@, too).
== Line endings
The best way to avoid headaches with line endings
(when running tests both on UNIX and Windows) is to treat your golden files
as binary, even when they are actually textual.
This means:
* When writing output files from Haskell code, open them in binary mode
(see 'openBinaryFile', 'withBinaryFile' and 'hSetBinaryMode'). This will
disable automatic @\\n -> \\r\\n@ conversion on Windows. For convenience, this
module exports 'writeBinaryFile' which is just like `writeFile` but opens
the file in binary mode. When using 'ByteString's note that
"Data.ByteString" and "Data.ByteString.Lazy" use binary mode for
@writeFile@, while "Data.ByteString.Char8" and "Data.ByteString.Lazy.Char8"
use text mode.
* Tell your VCS not to do any newline conversion for golden files. For
git check in a @.gitattributes@ file with the following contents (assuming
your golden files have @.golden@ extension):
>*.golden -text
On its side, tasty-golden reads and writes files in binary mode, too.
Why not let Haskell/git do automatic conversion on Windows? Well, for
instance, @tar@ will not do the conversion for you when unpacking a release
tarball, so when you run @cabal install your-package --enable-tests@, the
tests will be broken.
As a last resort, you can strip all @\\r@s from both arguments in your
comparison function when necessary. But most of the time treating the files
as binary does the job.
== Linking
The test suite should be compiled with @-threaded@ if you want to avoid
blocking any other threads while 'goldenVsFileDiff' and similar functions
wait for the result of the diff command.
== Windows limitations
When using 'goldenVsFileDiff' or 'goldenVsStringDiff' under Windows the exit
code from the diff program that you specify will not be captured correctly
if that program uses @exec@.
More specifically, you will get the exit code of the /original child/
(which always exits with code 0, since it called @exec@), not the exit
code of the process which carried on with execution after @exec@.
This is different from the behavior prescribed by POSIX but is the best
approximation that can be realised under the restrictions of the
Windows process model. See 'System.Process' for further details or
<https://github.com/haskell/process/pull/168> for even more.
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE OverloadedStrings #-}
module Test.Tasty.Golden
(
-- * Functions to create a golden test
goldenVsFile
, goldenVsString
, goldenVsFileDiff
, goldenVsStringDiff
-- * Options
, SizeCutoff(..)
, DeleteOutputFile(..)
-- * Various utilities
, writeBinaryFile
, findByExtension
, createDirectoriesAndWriteFile
)
where
import Test.Tasty
import Test.Tasty.Golden.Advanced
import Test.Tasty.Golden.Internal
import Text.Printf
import qualified Data.ByteString.Lazy as LBS
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import System.IO
import System.IO.Temp
import qualified System.Process.Typed as PT
import System.Exit
import System.FilePath
import System.Directory
import System.PosixCompat.Files
import Control.Exception
import Control.Monad
import qualified Data.Set as Set
#if !MIN_VERSION_base(4,11,0)
import Data.Monoid
#endif
-- | Compare the output file's contents against the golden file's contents
-- after the given action has created the output file.
goldenVsFile
:: TestName -- ^ test name
-> FilePath -- ^ path to the «golden» file (the file that contains correct output)
-> FilePath -- ^ path to the output file
-> IO () -- ^ action that creates the output file
-> TestTree -- ^ the test verifies that the output file contents is the same as the golden file contents
goldenVsFile name ref new act =
goldenTest2
name
(readFileStrict ref)
(act >> readFileStrict new)
cmp
upd
del
where
cmp = simpleCmp $ printf "Files '%s' and '%s' differ" ref new
upd = createDirectoriesAndWriteFile ref
del = removeFile new
-- | Compare a given string against the golden file's contents.
goldenVsString
:: TestName -- ^ test name
-> FilePath -- ^ path to the «golden» file (the file that contains correct output)
-> IO LBS.ByteString -- ^ action that returns a string
-> TestTree -- ^ the test verifies that the returned string is the same as the golden file contents
goldenVsString name ref act =
askOption $ \sizeCutoff ->
goldenTest
name
(readFileStrict ref)
act
(cmp sizeCutoff)
upd
where
cmp sizeCutoff x y = simpleCmp msg x y
where
msg = printf "Test output was different from '%s'. It was:\n" ref <>
unpackUtf8 (truncateLargeOutput sizeCutoff y)
upd = createDirectoriesAndWriteFile ref
simpleCmp :: Eq a => String -> a -> a -> IO (Maybe String)
simpleCmp e x y =
return $ if x == y then Nothing else Just e
-- | Same as 'goldenVsFile', but invokes an external diff command.
--
-- See the notes at the top of this module regarding linking with
-- @-threaded@ and Windows-specific issues.
goldenVsFileDiff
:: TestName -- ^ test name
-> (FilePath -> FilePath -> [String])
-- ^ function that constructs the command line to invoke the diff
-- command.
--
-- E.g.
--
-- >\ref new -> ["diff", "-u", ref, new]
-> FilePath -- ^ path to the golden file
-> FilePath -- ^ path to the output file
-> IO () -- ^ action that produces the output file
-> TestTree
goldenVsFileDiff name cmdf ref new act =
askOption $ \sizeCutoff ->
goldenTest2
name
(getFileStatus ref >> return ())
-- Use getFileStatus to check if the golden file exists. If the file
-- doesn't exist, getFileStatus will throw an isDoesNotExistError that
-- runGolden will handle by creating the golden file before proceeding.
-- See #32.
act
(\_ _ -> runDiff (cmdf ref new) sizeCutoff)
upd
del
where
upd _ = readFileStrict new >>= createDirectoriesAndWriteFile ref
del = removeFile new
-- | Same as 'goldenVsString', but invokes an external diff command.
--
-- See the notes at the top of this module regarding linking with
-- @-threaded@ and Windows-specific issues.
goldenVsStringDiff
:: TestName -- ^ test name
-> (FilePath -> FilePath -> [String])
-- ^ function that constructs the command line to invoke the diff
-- command.
--
-- E.g.
--
-- >\ref new -> ["diff", "-u", ref, new]
-> FilePath -- ^ path to the golden file
-> IO LBS.ByteString -- ^ action that returns a string
-> TestTree
goldenVsStringDiff name cmdf ref act =
askOption $ \sizeCutoff ->
goldenTest
name
(readFileStrict ref)
(act)
(cmp sizeCutoff)
upd
where
template = takeBaseName ref <.> "actual"
cmp sizeCutoff _ actBS = withSystemTempFile template $ \tmpFile tmpHandle -> do
-- Write act output to temporary ("new") file
LBS.hPut tmpHandle actBS >> hFlush tmpHandle
let cmd = cmdf ref tmpFile
diff_result :: Maybe String <- runDiff cmd sizeCutoff
return $ flip fmap diff_result $ \diff ->
printf "Test output was different from '%s'. Output of %s:\n" ref (show cmd) <> diff
upd = createDirectoriesAndWriteFile ref
truncateLargeOutput
:: SizeCutoff
-> LBS.ByteString
-> LBS.ByteString
truncateLargeOutput (SizeCutoff n) str =
if LBS.length str <= n
then str
else
LBS.take n str <> "<truncated>" <>
"\nUse --accept or increase --size-cutoff to see full output."
-- | Like 'writeFile', but uses binary mode. (Needed only when you work
-- with 'String'.)
writeBinaryFile :: FilePath -> String -> IO ()
writeBinaryFile f txt = withBinaryFile f WriteMode (\hdl -> hPutStr hdl txt)
-- | Find all files in the given directory and its subdirectories that have
-- the given extensions.
--
-- It is typically used to find all test files and produce a golden test
-- per test file.
--
-- The returned paths use forward slashes to separate path components,
-- even on Windows. Thus if the file name ends up in a golden file, it
-- will not differ when run on another platform.
--
-- The semantics of extensions is the same as in 'takeExtension'. In
-- particular, non-empty extensions should have the form @".ext"@.
--
-- This function may throw any exception that 'getDirectoryContents' may
-- throw.
--
-- It doesn't do anything special to handle symlinks (in particular, it
-- probably won't work on symlink loops).
--
-- Nor is it optimized to work with huge directory trees (you'd probably
-- want to use some form of coroutines for that).
findByExtension
:: [FilePath] -- ^ extensions
-> FilePath -- ^ directory
-> IO [FilePath] -- ^ paths
findByExtension extsList = go where
exts = Set.fromList extsList
go dir = do
allEntries <- getDirectoryContents dir
let entries = filter (not . (`elem` [".", ".."])) allEntries
liftM concat $ forM entries $ \e -> do
let path = dir ++ "/" ++ e
isDir <- doesDirectoryExist path
if isDir
then go path
else
return $
if takeExtension path `Set.member` exts
then [path]
else []
-- | Like 'LBS.writeFile', but also create parent directories if they are
-- missing.
createDirectoriesAndWriteFile
:: FilePath
-> LBS.ByteString
-> IO ()
createDirectoriesAndWriteFile path bs = do
let dir = takeDirectory path
createDirectoryIfMissing
True -- create parents too
dir
LBS.writeFile path bs
-- | Force the evaluation of a lazily-produced bytestring.
--
-- This is important to close the file handles.
--
-- See <https://ro-che.info/articles/2015-05-28-force-list>.
forceLbs :: LBS.ByteString -> ()
forceLbs = LBS.foldr seq ()
readFileStrict :: FilePath -> IO LBS.ByteString
readFileStrict path = do
s <- LBS.readFile path
evaluate $ forceLbs s
return s
unpackUtf8 :: LBS.ByteString -> String
unpackUtf8 = LT.unpack . LT.decodeUtf8
runDiff
:: [String] -- ^ the diff command
-> SizeCutoff
-> IO (Maybe String)
runDiff cmd sizeCutoff =
case cmd of
[] -> throwIO $ ErrorCall "tasty-golden: empty diff command"
prog : args -> do
let
procConf =
PT.setStdin PT.closed
. PT.setStderr PT.inherit
$ PT.proc prog args
(exitCode, out) <- PT.readProcessStdout procConf
return $ case exitCode of
ExitSuccess -> Nothing
_ -> Just . unpackUtf8 . truncateLargeOutput sizeCutoff $ out
|
feuerbach/tasty-golden
|
Test/Tasty/Golden.hs
|
Haskell
|
mit
| 11,066
|
{-# htermination sin :: Float -> Float #-}
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/full_haskell/Prelude_sin_1.hs
|
Haskell
|
mit
| 43
|
-- Benchmark.hs
-- A set of (micro-) benchmarks for the Haskell
-- programming language
--
-- vim: ft=haskell sw=2 ts=2 et
--
{-# LANGUAGE OverloadedStrings #-}
module Main where
import System.CPUTime
import Fibonacci as Fib
import PerfectNumber as Pn
import qualified Mandelbrot as M
-- a helper fun for timing measurement
--timeIt :: (Fractional c) => (a -> b) -> a -> IO (b, c)
--timeIt action arg =
-- do startTime <- getCPUTime
-- res <- action arg
-- finishTime <- getCPUTime
-- return $ (res, fromIntegral (finishTime - startTime) / 1000000000000)
toMSec :: (Fractional a) => Integer -> Integer -> a
toMSec tic toc =
fromIntegral (toc -tic) / 1000000000
toMSecStr :: Integer -> Integer -> String
toMSecStr tic toc =
show $ fromIntegral (toc -tic) / 1000000000
putElapsedSince :: Integer -> IO ()
putElapsedSince tic =
do toc <- getCPUTime
putStrLn $ "Elapsed: " ++ show (toMSec tic toc) ++ "msec."
--timeIt' :: (Fractional c) => (a -> b) -> a -> IO (b, c)
--timeIt' f arg =
-- do tic <- getCPUTime
-- res <- f arg
-- toc <- getCPUTime
-- return (res, fromIntegral $ (toc - tic) / 1000000000000)
-- main entry point
main :: IO ()
main =
do putStrLn "Haskell Benchmarks"
putStrLn "=================="
putStrLn ""
putStrLn "Fibonacci numbers:"
putStrLn "------------------"
tic0 <- getCPUTime
putStrLn $ "fibNaive(35) = " ++ show (fibNaive 35)
putElapsedSince tic0
tic1 <- getCPUTime
putStrLn $ "fib(35) = " ++ show (fib 35)
putElapsedSince tic1
tic2 <- getCPUTime
putStrLn $ "fib(1000) = " ++ show (fib 1000)
putElapsedSince tic2
putStrLn ""
putStrLn "Perfect numbers:"
putStrLn "----------------"
tic3 <- getCPUTime
putStrLn $ "perfectNumbers(10000) = " ++ show (perfectNumbers 10000)
putElapsedSince tic3
putStrLn ""
putStrLn "Perfect numbers:"
putStrLn "----------------"
tic4 <- getCPUTime
putStrLn $ "mandelbrot(640x480): "
++ show (length (M.mandelbrot 640 480 (-0.5) 0.0 (4/640)))
++ " pixel calculated"
putElapsedSince tic4
putStrLn ""
putStrLn "Done!"
putStrLn "Press <ENTER> to continue.."
_ <- getLine
return ()
|
kkirstein/proglang-playground
|
Haskell/src/Benchmark/Benchmark.hs
|
Haskell
|
mit
| 2,251
|
{-# LANGUAGE NoImplicitPrelude, TypeSynonymInstances, FlexibleInstances #-}
module IHaskell.Display.Diagrams (diagram) where
import ClassyPrelude
import System.Directory
import qualified Data.ByteString.Char8 as Char
import System.IO.Unsafe
import Diagrams.Prelude
import Diagrams.Backend.Cairo
import IHaskell.Display
instance IHaskellDisplay (Diagram Cairo R2) where
display renderable = do
png <- diagramData renderable PNG
svg <- diagramData renderable SVG
return $ Display [png, svg]
diagramData :: Diagram Cairo R2 -> OutputType -> IO DisplayData
diagramData renderable format = do
switchToTmpDir
-- Compute width and height.
let w = width renderable
h = height renderable
aspect = w / h
imgHeight = 300
imgWidth = aspect * imgHeight
-- Write the image.
let filename = ".ihaskell-diagram." ++ extension format
renderCairo filename (Height imgHeight) renderable
-- Convert to base64.
imgData <- readFile $ fpFromString filename
let value = case format of
PNG -> png (floor imgWidth) (floor imgHeight) $ base64 imgData
SVG -> svg $ Char.unpack imgData
return value
where
extension SVG = "svg"
extension PNG = "png"
-- Rendering hint.
diagram :: Diagram Cairo R2 -> Diagram Cairo R2
diagram = id
|
aostiles/LiveHaskell
|
ihaskell-display/ihaskell-diagrams/IHaskell/Display/Diagrams.hs
|
Haskell
|
mit
| 1,295
|
{-# LANGUAGE PatternSynonyms, ForeignFunctionInterface, JavaScriptFFI #-}
module GHCJS.DOM.JSFFI.Generated.SVGTransformList
(js_clear, clear, js_initialize, initialize, js_getItem, getItem,
js_insertItemBefore, insertItemBefore, js_replaceItem, replaceItem,
js_removeItem, removeItem, js_appendItem, appendItem,
js_createSVGTransformFromMatrix, createSVGTransformFromMatrix,
js_consolidate, consolidate, js_getNumberOfItems, getNumberOfItems,
SVGTransformList, castToSVGTransformList, gTypeSVGTransformList)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"clear\"]()" js_clear ::
SVGTransformList -> IO ()
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.clear Mozilla SVGTransformList.clear documentation>
clear :: (MonadIO m) => SVGTransformList -> m ()
clear self = liftIO (js_clear (self))
foreign import javascript unsafe "$1[\"initialize\"]($2)"
js_initialize ::
SVGTransformList ->
Nullable SVGTransform -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.initialize Mozilla SVGTransformList.initialize documentation>
initialize ::
(MonadIO m) =>
SVGTransformList -> Maybe SVGTransform -> m (Maybe SVGTransform)
initialize self item
= liftIO
(nullableToMaybe <$> (js_initialize (self) (maybeToNullable item)))
foreign import javascript unsafe "$1[\"getItem\"]($2)" js_getItem
:: SVGTransformList -> Word -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.getItem Mozilla SVGTransformList.getItem documentation>
getItem ::
(MonadIO m) => SVGTransformList -> Word -> m (Maybe SVGTransform)
getItem self index
= liftIO (nullableToMaybe <$> (js_getItem (self) index))
foreign import javascript unsafe "$1[\"insertItemBefore\"]($2, $3)"
js_insertItemBefore ::
SVGTransformList ->
Nullable SVGTransform -> Word -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.insertItemBefore Mozilla SVGTransformList.insertItemBefore documentation>
insertItemBefore ::
(MonadIO m) =>
SVGTransformList ->
Maybe SVGTransform -> Word -> m (Maybe SVGTransform)
insertItemBefore self item index
= liftIO
(nullableToMaybe <$>
(js_insertItemBefore (self) (maybeToNullable item) index))
foreign import javascript unsafe "$1[\"replaceItem\"]($2, $3)"
js_replaceItem ::
SVGTransformList ->
Nullable SVGTransform -> Word -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.replaceItem Mozilla SVGTransformList.replaceItem documentation>
replaceItem ::
(MonadIO m) =>
SVGTransformList ->
Maybe SVGTransform -> Word -> m (Maybe SVGTransform)
replaceItem self item index
= liftIO
(nullableToMaybe <$>
(js_replaceItem (self) (maybeToNullable item) index))
foreign import javascript unsafe "$1[\"removeItem\"]($2)"
js_removeItem ::
SVGTransformList -> Word -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.removeItem Mozilla SVGTransformList.removeItem documentation>
removeItem ::
(MonadIO m) => SVGTransformList -> Word -> m (Maybe SVGTransform)
removeItem self index
= liftIO (nullableToMaybe <$> (js_removeItem (self) index))
foreign import javascript unsafe "$1[\"appendItem\"]($2)"
js_appendItem ::
SVGTransformList ->
Nullable SVGTransform -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.appendItem Mozilla SVGTransformList.appendItem documentation>
appendItem ::
(MonadIO m) =>
SVGTransformList -> Maybe SVGTransform -> m (Maybe SVGTransform)
appendItem self item
= liftIO
(nullableToMaybe <$> (js_appendItem (self) (maybeToNullable item)))
foreign import javascript unsafe
"$1[\"createSVGTransformFromMatrix\"]($2)"
js_createSVGTransformFromMatrix ::
SVGTransformList ->
Nullable SVGMatrix -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.createSVGTransformFromMatrix Mozilla SVGTransformList.createSVGTransformFromMatrix documentation>
createSVGTransformFromMatrix ::
(MonadIO m) =>
SVGTransformList -> Maybe SVGMatrix -> m (Maybe SVGTransform)
createSVGTransformFromMatrix self matrix
= liftIO
(nullableToMaybe <$>
(js_createSVGTransformFromMatrix (self) (maybeToNullable matrix)))
foreign import javascript unsafe "$1[\"consolidate\"]()"
js_consolidate :: SVGTransformList -> IO (Nullable SVGTransform)
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.consolidate Mozilla SVGTransformList.consolidate documentation>
consolidate ::
(MonadIO m) => SVGTransformList -> m (Maybe SVGTransform)
consolidate self
= liftIO (nullableToMaybe <$> (js_consolidate (self)))
foreign import javascript unsafe "$1[\"numberOfItems\"]"
js_getNumberOfItems :: SVGTransformList -> IO Word
-- | <https://developer.mozilla.org/en-US/docs/Web/API/SVGTransformList.numberOfItems Mozilla SVGTransformList.numberOfItems documentation>
getNumberOfItems :: (MonadIO m) => SVGTransformList -> m Word
getNumberOfItems self = liftIO (js_getNumberOfItems (self))
|
manyoo/ghcjs-dom
|
ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGTransformList.hs
|
Haskell
|
mit
| 6,358
|
import Control.Applicative
main :: IO ()
main = show . sum . map read . words <$> getLine >>= putStrLn
|
fabianm/olympiad
|
2015-2016/round-1/a0.hs
|
Haskell
|
apache-2.0
| 104
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Kubernetes.V1.NamespaceList where
import GHC.Generics
import Data.Text
import Kubernetes.Unversioned.ListMeta
import Kubernetes.V1.Namespace
import qualified Data.Aeson
-- | NamespaceList is a list of Namespaces.
data NamespaceList = NamespaceList
{ kind :: Maybe Text -- ^ Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, apiVersion :: Maybe Text -- ^ APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources
, metadata :: Maybe ListMeta -- ^ Standard list metadata. More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds
, items :: [Namespace] -- ^ Items is the list of Namespace objects in the list. More info: http://releases.k8s.io/HEAD/docs/user-guide/namespaces.md
} deriving (Show, Eq, Generic)
instance Data.Aeson.FromJSON NamespaceList
instance Data.Aeson.ToJSON NamespaceList
|
minhdoboi/deprecated-openshift-haskell-api
|
kubernetes/lib/Kubernetes/V1/NamespaceList.hs
|
Haskell
|
apache-2.0
| 1,441
|
module HelperSequences.A000005Spec (main, spec) where
import Test.Hspec
import HelperSequences.A000005 (a000005)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A000005" $
it "correctly computes the first 20 elements" $
take 20 (map a000005 [1..]) `shouldBe` expectedValue where
expectedValue = [1,2,2,3,2,4,2,4,3,4,2,6,2,4,4,5,2,6,2,6]
|
peterokagey/haskellOEIS
|
test/HelperSequences/A000005Spec.hs
|
Haskell
|
apache-2.0
| 365
|
-- |This module exports functions for testing the compiler on Flapjax source
-- files.
module Test.FileTests(compileFlapjaxFile,compileFlapjaxFilesIn) where
import System.IO
import System.Directory
import Text.ParserCombinators.Parsec(ParseError,parseFromFile)
import Html.Parser(parse)
import Flapjax.Compiler(compilePage,defaults)
import Computation(Result(..),runComputation)
suffixOf:: Eq a => [a] -> [a] -> Bool
suffixOf suffix string =
if suffixLen > stringLen then False else suffix' == suffix where
stringLen = length string
suffixLen = length suffix
suffix' = drop (stringLen-suffixLen) string
withFlapjaxFile:: (FilePath -> IO ()) -> FilePath -> IO ()
withFlapjaxFile action path = do
exists <- doesFileExist path
(if exists && (".fj" `suffixOf` path)
then action path
else putStr ("Ignoring " ++ path ++ "\n") >> return ())
compileFlapjaxFile:: FilePath -> IO ()
compileFlapjaxFile path = do
htmlOrError <- parseFromFile parse path
(case htmlOrError of
(Left err) -> putStr ("Parse error in " ++ path ++ ":\n"
++ (show err) ++ "\n")
(Right html) -> do (Success _ html) <- runComputation (compilePage defaults html)
writeFile (path ++ ".html") (show html))
compileFlapjaxFilesIn:: FilePath -> IO ()
compileFlapjaxFilesIn path = do
files <- getDirectoryContents path
putStr $ show (length files) ++ " items in " ++ path ++ "...\n"
mapM_ (withFlapjaxFile compileFlapjaxFile) (map ((path ++ "/") ++) files)
|
brownplt/ovid
|
src/Test/FileTests.hs
|
Haskell
|
bsd-2-clause
| 1,531
|
-- 1258
import Data.Function(on)
import Data.List(group, permutations, sort, sortBy)
import Data.Ratio(denominator, numerator)
invnum = -101010101
myadd a b
| a == invnum || b == invnum = invnum
| otherwise = a + b
mysub a b
| a == invnum || b == invnum = invnum
| otherwise = a - b
mymul a b
| a == invnum || b == invnum = invnum
| otherwise = a * b
mydiv a b
| a == invnum || b == invnum || b == 0 = invnum
| otherwise = a / b
genDigits = [(a,b,c,d) | a <- [1..9], b <- [a+1..9],
c <- [b+1..9], d <- [c+1..9]]
genOps = [(x,y,z) | x <- ops, y <- ops, z <- ops]
where ops = [myadd, mysub, mymul, mydiv]
evalTree1 (x,y,z) (a,b,c,d) = x a (y b (z c d))
evalTree2 (x,y,z) (a,b,c,d) = x a (y (z b c) d)
evalTree3 (x,y,z) (a,b,c,d) = x (y (z a b) c) d
evalTree4 (x,y,z) (a,b,c,d) = x (y a (z b c)) d
evalTree5 (x,y,z) (a,b,c,d) = x (y a b) (z c d)
tuplePerms (a,b,c,d) = [(w,x,y,z) | [w,x,y,z] <- permutations [a,b,c,d]]
evalAllTrees xs = [v | ops <- genOps, ys <- tuplePerms xs,
f <- fs, let r = f ops ys, denominator r == 1,
let v = numerator r, v >= 1]
where fs = [evalTree1, evalTree2, evalTree3, evalTree4, evalTree5]
runLength xs = (runLen, xs)
where getRun = map head $ group $ sort $ evalAllTrees xs
runLen = length $ takeWhile id $ zipWith (==) [1..] getRun
bestRunLength = numerator $ a*1000 + b*100 + c*10 + d
where getBest = last $ sortBy (compare `on` fst) $ map runLength genDigits
(a,b,c,d) = snd getBest
main = putStrLn $ show $ bestRunLength
|
higgsd/euler
|
hs/93.hs
|
Haskell
|
bsd-2-clause
| 1,601
|
{-# OPTIONS_GHC -fwarn-unused-imports #-}
module Instances () where
import Control.Monad.IO.Control
import Data.Enumerator
import Control.Monad.IO.Class
import Control.Exception.Control
instance MonadIO m => MonadControlIO (Iteratee a m) where
liftControlIO f = liftIO $ f run'
where
run' iter = return $ Iteratee $ do
stp <- runIteratee iter
case stp of
Error exc -> throwIO exc
s -> return s
|
konn/konnfav
|
Instances.hs
|
Haskell
|
bsd-2-clause
| 451
|
-- vim: sw=2: ts=2: set expandtab:
{-# LANGUAGE TemplateHaskell,
ScopedTypeVariables,
FlexibleInstances,
MultiParamTypeClasses,
FlexibleContexts,
UndecidableInstances,
OverloadedStrings,
CPP #-}
-----------------------------------------------------------------------------
--
-- Module : Syntax
-- Copyright : BSD
-- License : AllRightsReserved
--
-- Maintainer : Ki Yung Ahn
-- Stability :
-- Portability :
--
-- |
--
-----------------------------------------------------------------------------
module Syntax
( KiName, Ki(..)
, TyName, Ty(..)
, TmName, Tm(..)
, IxMap
) where
import Unbound.LocallyNameless hiding (Con)
import GHC.Exts( IsString(..) )
type KiName = Name Ki
type TyName = Name Ty
type TmName = Name Tm
data Ki
= KVar KiName
| Star
| KArr Ki Ki
data Ty
= TVar TyName
| TCon TyName
| TArr Ty Ty
| TApp Ty Ty
| TFix Ty -- Ty must be TCon or applications headed TCon
data Tm
= Var TmName
| Con TmName
| In Integer Tm
| MPr (Bind (TmName,TmName) Tm) -- Tm must be Alt
| Lam (Bind TmName Tm)
| App Tm Tm
| Let (Bind (TmName, Embed Tm) Tm)
| Alt (Maybe IxMap) [(TmName, Bind [TmName] Tm)]
type IxMap = Bind [TyName] Ty
$(derive [''Ki, ''Ty, ''Tm])
instance Rep a => IsString (Name a) where
fromString = string2Name
-- Alpha and Sbust instances are in Parser module
-- in order to avoid mutually recursive module imports
-- since Show class instantces for Ki, Ty, Tm depends on LBNF functions
|
kyagrd/micronax
|
src/Syntax.hs
|
Haskell
|
bsd-2-clause
| 1,567
|
-- | This module implements various functions that return a probabilistic result,
-- defined as unitary operators, and quantum computations.
module QIO.QIORandom where
import Data.Monoid as Monoid
import QIO.QioSyn
import QIO.Qdata
import QIO.Qio
import Data.Complex
-- | The exponentiated Pauli-X rotation
rX :: RR -> Rotation
rX r (x,y) = if x==y then (cos (r/2):+0) else (0:+ (-(sin (r/2))))
-- | The exponentiated Pauli-Y rotation
rY :: RR -> Rotation
rY r (x,y) = if x==y then (cos (r/2):+0) else (s * sin (r/2):+0) where s = if x then 1 else -1
-- | Applies a Hadamard rotation to each qubit in the given list of qubits
hadamards :: [Qbit] -> U
hadamards [] = mempty
hadamards (q:qs) = uhad q `mappend` hadamards qs
-- | returns the highest integer power of 2 that is less than or equal to \x\
pow2 :: Int -> Int
pow2 x = pow2' 0
where pow2' y | 2^(y+1) > x = 2^y
| otherwise = pow2' (y+1)
-- | A rotation that, given a qubit in state 0, leaves it in a super-position of
-- 0 and 1, such that the probability of measuring as state 0 is \ps\.
weightedU :: RR -> Qbit -> U
weightedU ps q | sqrt ps <= 1 = rot q (rX (2*(acos (sqrt ps))))
| otherwise = error ("weightedU: Invalid Probability: " ++ show ps)
-- | A QIO computation that uses the "weightedU" unitary, to return a Bool that
-- has a probablity of \pf\ of being False.
weightedBool :: RR -> QIO Bool
weightedBool pf = do q <- mkQbit False
applyU (weightedU pf q)
measQ q
-- | removes any leading Falses from a list of booleans
rlf :: [Bool] -> [Bool]
rlf (False:bs) = rlf bs
rlf bs = bs
-- | removes any leading Falses from the (big-endian) bit-wise representation
-- of the given Int.
rlf_l :: Int -> [Bool]
rlf_l x = rlf (reverse (int2bits x))
-- | returns the number of bits left after calling the "flf_l" function
rlf_n :: Int -> Int
rlf_n x = length (rlf_l x)
-- | Given an Int \max\ that is the largest number required to be represented in
-- a quantum register, this function trims the front off the given register, to
-- leave the number of qubits required to represent \max\.
trim :: Int -> [Qbit] -> [Qbit]
trim max qbs = drop ((length qbs)-(rlf_n max)) qbs
-- | Given an Int \max\, and a quantum register in the state \max\, this function
-- defines a unitary operation that will leave the quantum register in state that
-- has equal probability of being measured in any of the states 0 to \max\.
randomU :: Int -> [Qbit] -> U
randomU max qbs = randomU' max (trim max qbs)
where
randomU' _ [] = mempty
randomU' 0 _ = mempty
randomU' max (q:qbs) = weightedU (fromIntegral ((max+1)-p)/fromIntegral (max+1)) q
`mappend`
condQ q (\x -> if x then (randomU (max-p) qbs)
else (hadamards qbs))
where p = pow2 max
-- | A quantum computation that will return a quantum integer in a state that
-- has equal probabilities of being measured in any of the state 0 to \max\.
randomQInt :: Int -> QIO QInt
randomQInt max = do
qbs <- mkQ (reverse (int2bits max))
applyU (randomU max qbs)
return (QInt (reverse qbs))
-- | A quantum computation that will return a quantum integer in a state that
-- has equal probabilities of being measured in any of the state \min\ to \max\.
randomQIO :: (Int,Int) -> QIO Int
randomQIO (min,max) = do q <- randomInt (max-min)
return (q + min)
-- | A quantum computation that measures the outcome of "randomQInt"
randomInt :: Int -> QIO Int
randomInt max = do
q <- randomQInt max
measQ q
-- | A quantum computation that returns an integer that is equally likely to be
-- any number in the range 0 to \x\-1
random :: Int -> QIO Int
random x = randomInt (x-1)
-- | This function uses a Quantum computation to simulate the roll of a dice
dice :: IO Int
dice = do
x <- run (randomInt 5)
return (x+1)
-- | This function simulates the given number of repitions of dice rolls
dice_rolls :: Int -> IO [Int]
dice_rolls 0 = return []
dice_rolls y = do
x <- dice
xs <- dice_rolls (y-1)
return (x:xs)
-- | Returns the number of occurences of 1 through 6 in the given list of Ints
occs :: [Int] -> (Int,Int,Int,Int,Int,Int)
occs rs = (rs' 1,rs' 2,rs' 3,rs' 4,rs' 5,rs' 6)
where
rs' x = length ([y|y<-rs,y==x])
-- | Returns the number of occurences of 1 through 6 in the given number of
-- rolls of the dice.
probs' :: Int -> IO (Int,Int,Int,Int,Int,Int)
probs' x = do
xs <- dice_rolls x
return (occs xs)
-- | Returns the percentage of occurences of 1 through 6, after the given number
-- of rolls of the dice.
probs :: Int -> IO (RR,RR,RR,RR,RR,RR)
probs x = do
(a,b,c,d,e,f) <- probs' x
return (fromIntegral a/x',fromIntegral b/x',fromIntegral c/x',fromIntegral d/x',fromIntegral e/x',fromIntegral f/x')
where x' = fromIntegral x
|
alexandersgreen/qio-haskell
|
QIO/QIORandom.hs
|
Haskell
|
bsd-2-clause
| 4,890
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# OPTIONS -Wall #-}
module Main (
main
) where
import Control.Monad (foldM)
import Control.Monad.Trans (liftIO)
import qualified Data.ByteString.Char8 as C
import Data.Default
import qualified Data.IntMap as IM
import Data.Time.Clock (getCurrentTime)
import System.Console.GetOpt
import UI.Command
import Data.ZoomCache
import Data.ZoomCache.Dump
import Data.ZoomCache.Multichannel()
------------------------------------------------------------
data Config = Config
{ noRaw :: Bool
, channels :: Int
, wmLevel :: Int
, track :: TrackNo
, intData :: Bool
, variable :: Bool
, spec :: TrackSpec
}
instance Default Config where
def = defConfig
defConfig :: Config
defConfig = Config
{ noRaw = False
, channels = 1
, wmLevel = 1024
, track = 1
, intData = False
, variable = False
, spec = def { specDeltaEncode = False
, specZlibCompress = False
, specName = "gen"
}
}
data Option = NoRaw
| Channels String
| Watermark String
| Track String
| Delta
| ZLib
| Variable
| IntData
| Rate String
| Label String
deriving (Eq)
options :: [OptDescr Option]
options = genOptions
genOptions :: [OptDescr Option]
genOptions =
[ Option ['z'] ["no-raw"] (NoArg NoRaw)
"Do NOT include raw data in the output"
, Option ['c'] ["channels"] (ReqArg Channels "channels")
"Set number of channels"
, Option ['w'] ["watermark"] (ReqArg Watermark "watermark")
"Set high-watermark level"
, Option ['t'] ["track"] (ReqArg Track "trackNo")
"Set or select track number"
, Option ['d'] ["delta"] (NoArg Delta)
"Delta-encode data"
, Option ['Z'] ["zlib"] (NoArg ZLib)
"Zlib-compress data"
, Option ['b'] ["variable"] (NoArg Variable)
"Generate variable-rate data"
, Option ['i'] ["integer"] (NoArg IntData)
"Generate integer data"
, Option ['r'] ["rate"] (ReqArg Rate "data-rate")
"Set track rate"
, Option ['l'] ["label"] (ReqArg Label "label")
"Set track label"
]
processArgs :: [String] -> IO (Config, [String])
processArgs args = do
case getOpt RequireOrder options args of
(opts, args', [] ) -> do
config <- processConfig def opts
return (config, args')
(_, _, _:_) -> return (def, args)
processConfig :: Config -> [Option] -> IO Config
processConfig = foldM processOneOption
where
processOneOption config NoRaw = do
return $ config {noRaw = True}
processOneOption config (Channels s) = do
return $ config {channels = read s}
processOneOption config (Watermark s) = do
return $ config {wmLevel = read s}
processOneOption config (Track s) = do
return $ config {track = read s}
processOneOption config Delta = do
return $ config { spec = (spec config){specDeltaEncode = True} }
processOneOption config ZLib = do
return $ config { spec = (spec config){specZlibCompress = True} }
processOneOption config Variable = do
return $ config { variable = True
, spec = (spec config){specSRType = VariableSR}
}
processOneOption config IntData = do
return $ config { intData = True
, spec = setCodec (undefined :: Int) (spec config)
}
processOneOption config (Rate s) = do
return $ config { spec = (spec config){specRate = fromInteger $ read s} }
processOneOption config (Label s) = do
return $ config { spec = (spec config){specName = C.pack s} }
------------------------------------------------------------
zoomGen :: Command ()
zoomGen = defCmd {
cmdName = "gen"
, cmdHandler = zoomGenHandler
, cmdCategory = "Writing"
, cmdShortDesc = "Generate zoom-cache data"
, cmdExamples = [("Generate a file called foo.zoom", "foo.zoom")]
}
zoomGenHandler :: App () ()
zoomGenHandler = do
(config, filenames) <- liftIO . processArgs =<< appArgs
liftIO $ mapM_ (zoomWriteFile config) filenames
zoomWriteFile :: Config -> FilePath -> IO ()
zoomWriteFile Config{..} path
| intData = w ints
| otherwise = w doubles
where
w :: (ZoomReadable a, ZoomWrite a, ZoomWritable a, ZoomWrite (SampleOffset, a))
=> [a] -> IO ()
w d
| variable && channels == 1 =
writeData (sW >> mapM_ (write track) (zip (map SO [1,3..]) d))
| channels == 1 =
writeData (sW >> mapM_ (write track) d)
| variable =
writeData (sW >> mapM_ (write track)
(zip (map SO [1,3..])
(map (replicate channels) d)))
| otherwise =
writeData (sW >> mapM_ (write track) (map (replicate channels) d))
writeData ds = do
now <- getCurrentTime
withFileWrite trackMap (Just now) (not noRaw) ds path
sW = setWatermark track wmLevel
trackMap = IM.singleton track spec'
spec' | channels == 1 && intData = setCodec (undefined :: Int) spec
| channels == 1 = setCodec (undefined :: Double) spec
| intData = setCodecMultichannel channels (undefined :: Int) spec
| otherwise = setCodecMultichannel channels (undefined :: Double) spec
------------------------------------------------------------
doubles :: [Double]
doubles = take 10000000 $ map ((* 1000.0) . sin) [0.0, 0.01 ..]
ints :: [Int]
ints = map round doubles
------------------------------------------------------------
zoomInfo :: Command ()
zoomInfo = defCmd {
cmdName = "info"
, cmdHandler = zoomInfoHandler
, cmdCategory = "Reading"
, cmdShortDesc = "Display basic info about a zoom-cache file"
, cmdExamples = [("Display info about foo.zoom", "foo.zoom")]
}
zoomInfoHandler :: App () ()
zoomInfoHandler = mapM_ (liftIO . zoomInfoFile standardIdentifiers) =<< appArgs
------------------------------------------------------------
zoomDump :: Command ()
zoomDump = defCmd {
cmdName = "dump"
, cmdHandler = zoomDumpHandler
, cmdCategory = "Reading"
, cmdShortDesc = "Read zoom-cache data"
, cmdExamples = [("Yo", "")]
}
zoomDumpHandler :: App () ()
zoomDumpHandler = do
(config, filenames) <- liftIO . processArgs =<< appArgs
mapM_ (liftIO . zoomDumpFile standardIdentifiers (track config)) filenames
------------------------------------------------------------
zoomSummary :: Command ()
zoomSummary = defCmd {
cmdName = "summary"
, cmdHandler = zoomSummaryHandler
, cmdCategory = "Reading"
, cmdShortDesc = "Read zoom-cache summary data"
, cmdExamples = [("Read summary level 3 from foo.zoom", "3 foo.zoom")]
}
zoomSummaryHandler :: App () ()
zoomSummaryHandler = do
(config, filenames) <- liftIO . processArgs =<< appArgs
liftIO . (f (track config)) $ filenames
where
f trackNo (lvl:paths) = mapM_ (zoomDumpSummaryLevel (read lvl)
standardIdentifiers trackNo) paths
f _ _ = putStrLn "Usage: zoom-cache summary n file.zoom"
------------------------------------------------------------
-- The Application
--
zoom :: Application () ()
zoom = def {
appName = "zoom"
, appVersion = "0.1"
, appAuthors = ["Conrad Parker"]
, appBugEmail = "conrad@metadecks.org"
, appShortDesc = "Trivial zoom-cache inspection tools"
, appLongDesc = longDesc
, appCategories = ["Reading", "Writing"]
, appSeeAlso = [""]
, appProject = "Zoom"
, appCmds = [ zoomGen
, zoomInfo
, zoomDump
, zoomSummary
]
}
longDesc :: String
longDesc = "Manipulate zoom-cache files"
------------------------------------------------------------
-- Main
--
main :: IO ()
main = appMain zoom
|
kfish/zoom-cache
|
tools/zoom-cache.hs
|
Haskell
|
bsd-2-clause
| 8,530
|
{-# LANGUAGE FlexibleContexts #-}
module CubicleMaze (solve) where
import Data.Bits (popCount)
import Data.Maybe (fromJust)
import Data.List (foldl')
import qualified Data.OrdPSQ as PSQ
import qualified Data.Set as Set
import Text.Parsec.Prim (Stream, ParsecT, parse)
import Text.Parsec.Char (digit, endOfLine)
import Text.Parsec.Combinator (many1, eof)
type Maze = (Int, Int) -> Bool
designersNumber :: Stream s m Char => ParsecT s u m Int
designersNumber = read <$> (many1 digit <* endOfLine <* eof)
maze :: Int -> Maze
maze c (x, y) = even . popCount $ x*x + 3*x + 2*x*y + y + y*y + c
neighbors :: (Int, Int) -> [(Int, Int)]
neighbors (x, y) = filter (\(a, b) -> a >= 0 && b >= 0) [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)]
openSpaceNeighbors :: Maze -> (Int, Int) -> [(Int, Int)]
openSpaceNeighbors m xy = filter m $ neighbors xy
ucs :: Maze -> (Int, Int) -> PSQ.OrdPSQ (Int, Int) Int () -> Set.Set (Int, Int) -> Int
ucs m dst pq visited | minK == dst = minP
| otherwise = ucs m dst newQ (Set.insert minK visited)
where
(minK, minP, _) = fromJust . PSQ.findMin $ pq
newQ = foldl' (\q n -> PSQ.insert n (minP + 1) () q) (PSQ.deleteMin pq) $ openSpaceNeighbors m minK
shortestPathLength :: Maze -> (Int, Int) -> (Int, Int) -> Int
shortestPathLength m src dst = ucs m dst (PSQ.singleton src 0 ()) Set.empty
possibleDestinations :: Maze -> (Int, Int) -> Int -> Set.Set (Int, Int)
possibleDestinations m src n = iterate stepOnce (Set.singleton src) !! n
where
stepOnce xs = Set.foldl' (\acc x -> Set.union acc (Set.fromList . openSpaceNeighbors m $ x)) xs xs
solve :: String -> IO ()
solve input = do
let parsed = parse designersNumber "" input
case parsed of
Left err -> print err
Right favoriteNumber -> do
let cubicleMaze = maze favoriteNumber
let pathLength = shortestPathLength cubicleMaze (1, 1) (31, 39)
print pathLength
let uniqDests = possibleDestinations cubicleMaze (1, 1) 50
print . Set.size $ uniqDests
|
cjlarose/advent-2016
|
src/CubicleMaze.hs
|
Haskell
|
bsd-3-clause
| 2,012
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
module Hassistant.Header where
import qualified GHC
import qualified Exception
import qualified DynFlags
import qualified Util
import qualified Outputable
import qualified GHC.Paths
import Control.Applicative
import qualified Data.Text as T
import qualified Data.Attoparsec.Text as A
import Data.Maybe(catMaybes)
import Data.List(sort,nub)
import Data.Either (rights)
import Data.Int(Int32)
import Hassistant.Common
import Hassistant.Parser
imports :: T.Text -> [String]
imports = go [] . dropWhile (not . importLine).
rights . map (A.parseOnly dropCommentP) . T.lines
where
importLine ('i':'m':'p':'o':'r':'t':o) = null o || head o == ' '
importLine _ = False
go a [] = [unlines $ reverse a]
go a (l:ls) | null l = go a ls
| ' ' == head l = go (l:a) ls
| importLine l = (unlines $ reverse a) : go [l] ls
| otherwise = [unlines $ reverse a]
parseImports :: String -> GHC.Ghc (Maybe (GHC.ImportDecl GHC.RdrName))
parseImports i = (Just <$> GHC.parseImportDecl i) `Exception.gcatch` handler
where handler (_::Exception.SomeException) = return Nothing
calcHash :: T.Text -> IO Int32
calcHash cont = GHC.runGhc (Just GHC.Paths.libdir) $ do
dyn <- GHC.getSessionDynFlags
imps <- sort . map (Outputable.showPpr dyn) . catMaybes <$> mapM parseImports (imports cont)
let langs = map T.unpack . nub . sort $ languages cont
return . Util.hashString $ unlines (langs ++ imps)
listLANGAUGE :: [Candidate]
listLANGAUGE =
let obj s = (candidate $ T.pack s) { menu = Just "LANGAUGE" }
in concatMap (\(s,_,_) -> [obj s, obj $ "No" ++ s]) DynFlags.xFlags
languages :: T.Text -> [T.Text]
languages = concat . rights . map (A.parseOnly languageP) . T.lines
|
philopon/hassistant.vim
|
src/Hassistant/Header.hs
|
Haskell
|
bsd-3-clause
| 1,841
|
-- Copyright (c) 2014-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is distributed under the terms of a BSD license,
-- found in the LICENSE file.
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE StandaloneDeriving #-}
{-# LANGUAGE TypeFamilies #-}
-- |
-- A generic Haxl datasource for performing arbitrary IO concurrently.
-- Every IO operation will be performed in a separate thread.
-- You can use this with any kind of IO, but each different operation
-- requires an instance of the 'ConcurrentIO' class.
--
-- For example, to make a concurrent sleep operation:
--
-- > sleep :: Int -> GenHaxl u Int
-- > sleep n = dataFetch (Sleep n)
-- >
-- > data Sleep
-- > instance ConcurrentIO Sleep where
-- > data ConcurrentIOReq Sleep a where
-- > Sleep :: Int -> ConcurrentIOReq Sleep Int
-- >
-- > performIO (Sleep n) = threadDelay (n*1000) >> return n
-- >
-- > deriving instance Eq (ConcurrentIOReq Sleep a)
-- > deriving instance Show (ConcurrentIOReq Sleep a)
-- >
-- > instance ShowP (ConcurrentIOReq Sleep) where showp = show
-- >
-- > instance Hashable (ConcurrentIOReq Sleep a) where
-- > hashWithSalt s (Sleep n) = hashWithSalt s n
--
-- Note that you can have any number of constructors in your
-- ConcurrentIOReq GADT, so most of the boilerplate only needs to be
-- written once.
module Haxl.DataSource.ConcurrentIO
( mkConcurrentIOState
, ConcurrentIO(..)
) where
import Control.Concurrent
import Control.Exception as Exception
import Control.Monad
import qualified Data.Text as Text
import Data.Typeable
import Haxl.Core
class ConcurrentIO tag where
data ConcurrentIOReq tag a
performIO :: ConcurrentIOReq tag a -> IO a
deriving instance Typeable ConcurrentIOReq -- not needed by GHC 7.10 and later
instance (Typeable tag) => StateKey (ConcurrentIOReq tag) where
data State (ConcurrentIOReq tag) = ConcurrentIOState
getStateType _ = typeRep (Proxy :: Proxy ConcurrentIOReq)
mkConcurrentIOState :: IO (State (ConcurrentIOReq ()))
mkConcurrentIOState = return ConcurrentIOState
instance Typeable tag => DataSourceName (ConcurrentIOReq tag) where
dataSourceName _ =
Text.pack (show (typeRepTyCon (typeRep (Proxy :: Proxy tag))))
instance
(Typeable tag, ShowP (ConcurrentIOReq tag), ConcurrentIO tag)
=> DataSource u (ConcurrentIOReq tag)
where
fetch _state _flags _u = BackgroundFetch $ \bfs -> do
forM_ bfs $ \(BlockedFetch req rv) ->
mask $ \unmask ->
forkFinally (unmask (performIO req)) (putResultFromChildThread rv)
|
simonmar/Haxl
|
Haxl/DataSource/ConcurrentIO.hs
|
Haskell
|
bsd-3-clause
| 2,689
|
-- | contains a prettyprinter for the
-- Template Haskell datatypes
module Language.Haskell.TH.Ppr where
-- All of the exports from this module should
-- be "public" functions. The main module TH
-- re-exports them all.
import Text.PrettyPrint (render)
import Language.Haskell.TH.PprLib
import Language.Haskell.TH.Syntax
import Data.Word ( Word8 )
import Data.Char ( toLower, chr)
import GHC.Show ( showMultiLineString )
import GHC.Lexeme( startsVarSym )
import Data.Ratio ( numerator, denominator )
import Prelude hiding ((<>))
nestDepth :: Int
nestDepth = 4
type Precedence = Int
appPrec, unopPrec, opPrec, noPrec :: Precedence
appPrec = 3 -- Argument of a function application
opPrec = 2 -- Argument of an infix operator
unopPrec = 1 -- Argument of an unresolved infix operator
noPrec = 0 -- Others
parensIf :: Bool -> Doc -> Doc
parensIf True d = parens d
parensIf False d = d
------------------------------
pprint :: Ppr a => a -> String
pprint x = render $ to_HPJ_Doc $ ppr x
class Ppr a where
ppr :: a -> Doc
ppr_list :: [a] -> Doc
ppr_list = vcat . map ppr
instance Ppr a => Ppr [a] where
ppr x = ppr_list x
------------------------------
instance Ppr Name where
ppr v = pprName v
------------------------------
instance Ppr Info where
ppr (TyConI d) = ppr d
ppr (ClassI d is) = ppr d $$ vcat (map ppr is)
ppr (FamilyI d is) = ppr d $$ vcat (map ppr is)
ppr (PrimTyConI name arity is_unlifted)
= text "Primitive"
<+> (if is_unlifted then text "unlifted" else empty)
<+> text "type constructor" <+> quotes (ppr name)
<+> parens (text "arity" <+> int arity)
ppr (ClassOpI v ty cls)
= text "Class op from" <+> ppr cls <> colon <+> ppr_sig v ty
ppr (DataConI v ty tc)
= text "Constructor from" <+> ppr tc <> colon <+> ppr_sig v ty
ppr (PatSynI nm ty) = pprPatSynSig nm ty
ppr (TyVarI v ty)
= text "Type variable" <+> ppr v <+> equals <+> ppr ty
ppr (VarI v ty mb_d)
= vcat [ppr_sig v ty,
case mb_d of { Nothing -> empty; Just d -> ppr d }]
ppr_sig :: Name -> Type -> Doc
ppr_sig v ty = pprName' Applied v <+> dcolon <+> ppr ty
pprFixity :: Name -> Fixity -> Doc
pprFixity _ f | f == defaultFixity = empty
pprFixity v (Fixity i d) = ppr_fix d <+> int i <+> ppr v
where ppr_fix InfixR = text "infixr"
ppr_fix InfixL = text "infixl"
ppr_fix InfixN = text "infix"
-- | Pretty prints a pattern synonym type signature
pprPatSynSig :: Name -> PatSynType -> Doc
pprPatSynSig nm ty
= text "pattern" <+> pprPrefixOcc nm <+> dcolon <+> pprPatSynType ty
-- | Pretty prints a pattern synonym's type; follows the usual
-- conventions to print a pattern synonym type compactly, yet
-- unambiguously. See the note on 'PatSynType' and the section on
-- pattern synonyms in the GHC user's guide for more information.
pprPatSynType :: PatSynType -> Doc
pprPatSynType ty@(ForallT uniTys reqs ty'@(ForallT exTys provs ty''))
| null exTys, null provs = ppr (ForallT uniTys reqs ty'')
| null uniTys, null reqs = noreqs <+> ppr ty'
| null reqs = forall uniTys <+> noreqs <+> ppr ty'
| otherwise = ppr ty
where noreqs = text "() =>"
forall tvs = text "forall" <+> (hsep (map ppr tvs)) <+> text "."
pprPatSynType ty = ppr ty
------------------------------
instance Ppr Module where
ppr (Module pkg m) = text (pkgString pkg) <+> text (modString m)
instance Ppr ModuleInfo where
ppr (ModuleInfo imps) = text "Module" <+> vcat (map ppr imps)
------------------------------
instance Ppr Exp where
ppr = pprExp noPrec
pprPrefixOcc :: Name -> Doc
-- Print operators with parens around them
pprPrefixOcc n = parensIf (isSymOcc n) (ppr n)
isSymOcc :: Name -> Bool
isSymOcc n
= case nameBase n of
[] -> True -- Empty name; weird
(c:_) -> startsVarSym c
-- c.f. OccName.startsVarSym in GHC itself
pprInfixExp :: Exp -> Doc
pprInfixExp (VarE v) = pprName' Infix v
pprInfixExp (ConE v) = pprName' Infix v
pprInfixExp _ = text "<<Non-variable/constructor in infix context>>"
pprExp :: Precedence -> Exp -> Doc
pprExp _ (VarE v) = pprName' Applied v
pprExp _ (ConE c) = pprName' Applied c
pprExp i (LitE l) = pprLit i l
pprExp i (AppE e1 e2) = parensIf (i >= appPrec) $ pprExp opPrec e1
<+> pprExp appPrec e2
pprExp i (AppTypeE e t)
= parensIf (i >= appPrec) $ pprExp opPrec e <+> char '@' <> pprParendType t
pprExp _ (ParensE e) = parens (pprExp noPrec e)
pprExp i (UInfixE e1 op e2)
= parensIf (i > unopPrec) $ pprExp unopPrec e1
<+> pprInfixExp op
<+> pprExp unopPrec e2
pprExp i (InfixE (Just e1) op (Just e2))
= parensIf (i >= opPrec) $ pprExp opPrec e1
<+> pprInfixExp op
<+> pprExp opPrec e2
pprExp _ (InfixE me1 op me2) = parens $ pprMaybeExp noPrec me1
<+> pprInfixExp op
<+> pprMaybeExp noPrec me2
pprExp i (LamE [] e) = pprExp i e -- #13856
pprExp i (LamE ps e) = parensIf (i > noPrec) $ char '\\' <> hsep (map (pprPat appPrec) ps)
<+> text "->" <+> ppr e
pprExp i (LamCaseE ms) = parensIf (i > noPrec)
$ text "\\case" $$ nest nestDepth (ppr ms)
pprExp _ (TupE es) = parens (commaSep es)
pprExp _ (UnboxedTupE es) = hashParens (commaSep es)
pprExp _ (UnboxedSumE e alt arity) = unboxedSumBars (ppr e) alt arity
-- Nesting in Cond is to avoid potential problems in do statements
pprExp i (CondE guard true false)
= parensIf (i > noPrec) $ sep [text "if" <+> ppr guard,
nest 1 $ text "then" <+> ppr true,
nest 1 $ text "else" <+> ppr false]
pprExp i (MultiIfE alts)
= parensIf (i > noPrec) $ vcat $
case alts of
[] -> [text "if {}"]
(alt : alts') -> text "if" <+> pprGuarded arrow alt
: map (nest 3 . pprGuarded arrow) alts'
pprExp i (LetE ds_ e) = parensIf (i > noPrec) $ text "let" <+> pprDecs ds_
$$ text " in" <+> ppr e
where
pprDecs [] = empty
pprDecs [d] = ppr d
pprDecs ds = braces (semiSep ds)
pprExp i (CaseE e ms)
= parensIf (i > noPrec) $ text "case" <+> ppr e <+> text "of"
$$ nest nestDepth (ppr ms)
pprExp i (DoE ss_) = parensIf (i > noPrec) $ text "do" <+> pprStms ss_
where
pprStms [] = empty
pprStms [s] = ppr s
pprStms ss = braces (semiSep ss)
pprExp _ (CompE []) = text "<<Empty CompExp>>"
-- This will probably break with fixity declarations - would need a ';'
pprExp _ (CompE ss) =
if null ss'
-- If there are no statements in a list comprehension besides the last
-- one, we simply treat it like a normal list.
then text "[" <> ppr s <> text "]"
else text "[" <> ppr s
<+> bar
<+> commaSep ss'
<> text "]"
where s = last ss
ss' = init ss
pprExp _ (ArithSeqE d) = ppr d
pprExp _ (ListE es) = brackets (commaSep es)
pprExp i (SigE e t) = parensIf (i > noPrec) $ ppr e <+> dcolon <+> ppr t
pprExp _ (RecConE nm fs) = ppr nm <> braces (pprFields fs)
pprExp _ (RecUpdE e fs) = pprExp appPrec e <> braces (pprFields fs)
pprExp i (StaticE e) = parensIf (i >= appPrec) $
text "static"<+> pprExp appPrec e
pprExp _ (UnboundVarE v) = pprName' Applied v
pprExp _ (LabelE s) = text "#" <> text s
pprFields :: [(Name,Exp)] -> Doc
pprFields = sep . punctuate comma . map (\(s,e) -> ppr s <+> equals <+> ppr e)
pprMaybeExp :: Precedence -> Maybe Exp -> Doc
pprMaybeExp _ Nothing = empty
pprMaybeExp i (Just e) = pprExp i e
------------------------------
instance Ppr Stmt where
ppr (BindS p e) = ppr p <+> text "<-" <+> ppr e
ppr (LetS ds) = text "let" <+> (braces (semiSep ds))
ppr (NoBindS e) = ppr e
ppr (ParS sss) = sep $ punctuate bar
$ map commaSep sss
------------------------------
instance Ppr Match where
ppr (Match p rhs ds) = ppr p <+> pprBody False rhs
$$ where_clause ds
------------------------------
pprGuarded :: Doc -> (Guard, Exp) -> Doc
pprGuarded eqDoc (guard, expr) = case guard of
NormalG guardExpr -> bar <+> ppr guardExpr <+> eqDoc <+> ppr expr
PatG stmts -> bar <+> vcat (punctuate comma $ map ppr stmts) $$
nest nestDepth (eqDoc <+> ppr expr)
------------------------------
pprBody :: Bool -> Body -> Doc
pprBody eq body = case body of
GuardedB xs -> nest nestDepth $ vcat $ map (pprGuarded eqDoc) xs
NormalB e -> eqDoc <+> ppr e
where eqDoc | eq = equals
| otherwise = arrow
------------------------------
instance Ppr Lit where
ppr = pprLit noPrec
pprLit :: Precedence -> Lit -> Doc
pprLit i (IntPrimL x) = parensIf (i > noPrec && x < 0)
(integer x <> char '#')
pprLit _ (WordPrimL x) = integer x <> text "##"
pprLit i (FloatPrimL x) = parensIf (i > noPrec && x < 0)
(float (fromRational x) <> char '#')
pprLit i (DoublePrimL x) = parensIf (i > noPrec && x < 0)
(double (fromRational x) <> text "##")
pprLit i (IntegerL x) = parensIf (i > noPrec && x < 0) (integer x)
pprLit _ (CharL c) = text (show c)
pprLit _ (CharPrimL c) = text (show c) <> char '#'
pprLit _ (StringL s) = pprString s
pprLit _ (StringPrimL s) = pprString (bytesToString s) <> char '#'
pprLit i (RationalL rat) = parensIf (i > noPrec) $
integer (numerator rat) <+> char '/'
<+> integer (denominator rat)
bytesToString :: [Word8] -> String
bytesToString = map (chr . fromIntegral)
pprString :: String -> Doc
-- Print newlines as newlines with Haskell string escape notation,
-- not as '\n'. For other non-printables use regular escape notation.
pprString s = vcat (map text (showMultiLineString s))
------------------------------
instance Ppr Pat where
ppr = pprPat noPrec
pprPat :: Precedence -> Pat -> Doc
pprPat i (LitP l) = pprLit i l
pprPat _ (VarP v) = pprName' Applied v
pprPat _ (TupP ps) = parens (commaSep ps)
pprPat _ (UnboxedTupP ps) = hashParens (commaSep ps)
pprPat _ (UnboxedSumP p alt arity) = unboxedSumBars (ppr p) alt arity
pprPat i (ConP s ps) = parensIf (i >= appPrec) $ pprName' Applied s
<+> sep (map (pprPat appPrec) ps)
pprPat _ (ParensP p) = parens $ pprPat noPrec p
pprPat i (UInfixP p1 n p2)
= parensIf (i > unopPrec) (pprPat unopPrec p1 <+>
pprName' Infix n <+>
pprPat unopPrec p2)
pprPat i (InfixP p1 n p2)
= parensIf (i >= opPrec) (pprPat opPrec p1 <+>
pprName' Infix n <+>
pprPat opPrec p2)
pprPat i (TildeP p) = parensIf (i > noPrec) $ char '~' <> pprPat appPrec p
pprPat i (BangP p) = parensIf (i > noPrec) $ char '!' <> pprPat appPrec p
pprPat i (AsP v p) = parensIf (i > noPrec) $ ppr v <> text "@"
<> pprPat appPrec p
pprPat _ WildP = text "_"
pprPat _ (RecP nm fs)
= parens $ ppr nm
<+> braces (sep $ punctuate comma $
map (\(s,p) -> ppr s <+> equals <+> ppr p) fs)
pprPat _ (ListP ps) = brackets (commaSep ps)
pprPat i (SigP p t) = parensIf (i > noPrec) $ ppr p <+> dcolon <+> ppr t
pprPat _ (ViewP e p) = parens $ pprExp noPrec e <+> text "->" <+> pprPat noPrec p
------------------------------
instance Ppr Dec where
ppr = ppr_dec True
ppr_dec :: Bool -- declaration on the toplevel?
-> Dec
-> Doc
ppr_dec _ (FunD f cs) = vcat $ map (\c -> pprPrefixOcc f <+> ppr c) cs
ppr_dec _ (ValD p r ds) = ppr p <+> pprBody True r
$$ where_clause ds
ppr_dec _ (TySynD t xs rhs)
= ppr_tySyn empty t (hsep (map ppr xs)) rhs
ppr_dec _ (DataD ctxt t xs ksig cs decs)
= ppr_data empty ctxt t (hsep (map ppr xs)) ksig cs decs
ppr_dec _ (NewtypeD ctxt t xs ksig c decs)
= ppr_newtype empty ctxt t (sep (map ppr xs)) ksig c decs
ppr_dec _ (ClassD ctxt c xs fds ds)
= text "class" <+> pprCxt ctxt <+> ppr c <+> hsep (map ppr xs) <+> ppr fds
$$ where_clause ds
ppr_dec _ (InstanceD o ctxt i ds) =
text "instance" <+> maybe empty ppr_overlap o <+> pprCxt ctxt <+> ppr i
$$ where_clause ds
ppr_dec _ (SigD f t) = pprPrefixOcc f <+> dcolon <+> ppr t
ppr_dec _ (ForeignD f) = ppr f
ppr_dec _ (InfixD fx n) = pprFixity n fx
ppr_dec _ (PragmaD p) = ppr p
ppr_dec isTop (DataFamilyD tc tvs kind)
= text "data" <+> maybeFamily <+> ppr tc <+> hsep (map ppr tvs) <+> maybeKind
where
maybeFamily | isTop = text "family"
| otherwise = empty
maybeKind | (Just k') <- kind = dcolon <+> ppr k'
| otherwise = empty
ppr_dec isTop (DataInstD ctxt tc tys ksig cs decs)
= ppr_data maybeInst ctxt tc (sep (map pprParendType tys)) ksig cs decs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (NewtypeInstD ctxt tc tys ksig c decs)
= ppr_newtype maybeInst ctxt tc (sep (map pprParendType tys)) ksig c decs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (TySynInstD tc (TySynEqn tys rhs))
= ppr_tySyn maybeInst tc (sep (map pprParendType tys)) rhs
where
maybeInst | isTop = text "instance"
| otherwise = empty
ppr_dec isTop (OpenTypeFamilyD tfhead)
= text "type" <+> maybeFamily <+> ppr_tf_head tfhead
where
maybeFamily | isTop = text "family"
| otherwise = empty
ppr_dec _ (ClosedTypeFamilyD tfhead@(TypeFamilyHead tc _ _ _) eqns)
= hang (text "type family" <+> ppr_tf_head tfhead <+> text "where")
nestDepth (vcat (map ppr_eqn eqns))
where
ppr_eqn (TySynEqn lhs rhs)
= ppr tc <+> sep (map pprParendType lhs) <+> text "=" <+> ppr rhs
ppr_dec _ (RoleAnnotD name roles)
= hsep [ text "type role", ppr name ] <+> hsep (map ppr roles)
ppr_dec _ (StandaloneDerivD ds cxt ty)
= hsep [ text "deriving"
, maybe empty ppr_deriv_strategy ds
, text "instance"
, pprCxt cxt
, ppr ty ]
ppr_dec _ (DefaultSigD n ty)
= hsep [ text "default", pprPrefixOcc n, dcolon, ppr ty ]
ppr_dec _ (PatSynD name args dir pat)
= text "pattern" <+> pprNameArgs <+> ppr dir <+> pprPatRHS
where
pprNameArgs | InfixPatSyn a1 a2 <- args = ppr a1 <+> ppr name <+> ppr a2
| otherwise = ppr name <+> ppr args
pprPatRHS | ExplBidir cls <- dir = hang (ppr pat <+> text "where")
nestDepth (ppr name <+> ppr cls)
| otherwise = ppr pat
ppr_dec _ (PatSynSigD name ty)
= pprPatSynSig name ty
ppr_deriv_strategy :: DerivStrategy -> Doc
ppr_deriv_strategy ds = text $
case ds of
StockStrategy -> "stock"
AnyclassStrategy -> "anyclass"
NewtypeStrategy -> "newtype"
ppr_overlap :: Overlap -> Doc
ppr_overlap o = text $
case o of
Overlaps -> "{-# OVERLAPS #-}"
Overlappable -> "{-# OVERLAPPABLE #-}"
Overlapping -> "{-# OVERLAPPING #-}"
Incoherent -> "{-# INCOHERENT #-}"
ppr_data :: Doc -> Cxt -> Name -> Doc -> Maybe Kind -> [Con] -> [DerivClause]
-> Doc
ppr_data maybeInst ctxt t argsDoc ksig cs decs
= sep [text "data" <+> maybeInst
<+> pprCxt ctxt
<+> pprName' Applied t <+> argsDoc <+> ksigDoc <+> maybeWhere,
nest nestDepth (sep (pref $ map ppr cs)),
if null decs
then empty
else nest nestDepth
$ vcat $ map ppr_deriv_clause decs]
where
pref :: [Doc] -> [Doc]
pref xs | isGadtDecl = xs
pref [] = [] -- No constructors; can't happen in H98
pref (d:ds) = (char '=' <+> d):map (bar <+>) ds
maybeWhere :: Doc
maybeWhere | isGadtDecl = text "where"
| otherwise = empty
isGadtDecl :: Bool
isGadtDecl = not (null cs) && all isGadtCon cs
where isGadtCon (GadtC _ _ _ ) = True
isGadtCon (RecGadtC _ _ _) = True
isGadtCon (ForallC _ _ x ) = isGadtCon x
isGadtCon _ = False
ksigDoc = case ksig of
Nothing -> empty
Just k -> dcolon <+> ppr k
ppr_newtype :: Doc -> Cxt -> Name -> Doc -> Maybe Kind -> Con -> [DerivClause]
-> Doc
ppr_newtype maybeInst ctxt t argsDoc ksig c decs
= sep [text "newtype" <+> maybeInst
<+> pprCxt ctxt
<+> ppr t <+> argsDoc <+> ksigDoc,
nest 2 (char '=' <+> ppr c),
if null decs
then empty
else nest nestDepth
$ vcat $ map ppr_deriv_clause decs]
where
ksigDoc = case ksig of
Nothing -> empty
Just k -> dcolon <+> ppr k
ppr_deriv_clause :: DerivClause -> Doc
ppr_deriv_clause (DerivClause ds ctxt)
= text "deriving" <+> maybe empty ppr_deriv_strategy ds
<+> ppr_cxt_preds ctxt
ppr_tySyn :: Doc -> Name -> Doc -> Type -> Doc
ppr_tySyn maybeInst t argsDoc rhs
= text "type" <+> maybeInst <+> ppr t <+> argsDoc <+> text "=" <+> ppr rhs
ppr_tf_head :: TypeFamilyHead -> Doc
ppr_tf_head (TypeFamilyHead tc tvs res inj)
= ppr tc <+> hsep (map ppr tvs) <+> ppr res <+> maybeInj
where
maybeInj | (Just inj') <- inj = ppr inj'
| otherwise = empty
------------------------------
instance Ppr FunDep where
ppr (FunDep xs ys) = hsep (map ppr xs) <+> text "->" <+> hsep (map ppr ys)
ppr_list [] = empty
ppr_list xs = bar <+> commaSep xs
------------------------------
instance Ppr FamFlavour where
ppr DataFam = text "data"
ppr TypeFam = text "type"
------------------------------
instance Ppr FamilyResultSig where
ppr NoSig = empty
ppr (KindSig k) = dcolon <+> ppr k
ppr (TyVarSig bndr) = text "=" <+> ppr bndr
------------------------------
instance Ppr InjectivityAnn where
ppr (InjectivityAnn lhs rhs) =
bar <+> ppr lhs <+> text "->" <+> hsep (map ppr rhs)
------------------------------
instance Ppr Foreign where
ppr (ImportF callconv safety impent as typ)
= text "foreign import"
<+> showtextl callconv
<+> showtextl safety
<+> text (show impent)
<+> ppr as
<+> dcolon <+> ppr typ
ppr (ExportF callconv expent as typ)
= text "foreign export"
<+> showtextl callconv
<+> text (show expent)
<+> ppr as
<+> dcolon <+> ppr typ
------------------------------
instance Ppr Pragma where
ppr (InlineP n inline rm phases)
= text "{-#"
<+> ppr inline
<+> ppr rm
<+> ppr phases
<+> ppr n
<+> text "#-}"
ppr (SpecialiseP n ty inline phases)
= text "{-# SPECIALISE"
<+> maybe empty ppr inline
<+> ppr phases
<+> sep [ ppr n <+> dcolon
, nest 2 $ ppr ty ]
<+> text "#-}"
ppr (SpecialiseInstP inst)
= text "{-# SPECIALISE instance" <+> ppr inst <+> text "#-}"
ppr (RuleP n bndrs lhs rhs phases)
= sep [ text "{-# RULES" <+> pprString n <+> ppr phases
, nest 4 $ ppr_forall <+> ppr lhs
, nest 4 $ char '=' <+> ppr rhs <+> text "#-}" ]
where ppr_forall | null bndrs = empty
| otherwise = text "forall"
<+> fsep (map ppr bndrs)
<+> char '.'
ppr (AnnP tgt expr)
= text "{-# ANN" <+> target1 tgt <+> ppr expr <+> text "#-}"
where target1 ModuleAnnotation = text "module"
target1 (TypeAnnotation t) = text "type" <+> ppr t
target1 (ValueAnnotation v) = ppr v
ppr (LineP line file)
= text "{-# LINE" <+> int line <+> text (show file) <+> text "#-}"
ppr (CompleteP cls mty)
= text "{-# COMPLETE" <+> (fsep $ punctuate comma $ map ppr cls)
<+> maybe empty (\ty -> dcolon <+> ppr ty) mty
------------------------------
instance Ppr Inline where
ppr NoInline = text "NOINLINE"
ppr Inline = text "INLINE"
ppr Inlinable = text "INLINABLE"
------------------------------
instance Ppr RuleMatch where
ppr ConLike = text "CONLIKE"
ppr FunLike = empty
------------------------------
instance Ppr Phases where
ppr AllPhases = empty
ppr (FromPhase i) = brackets $ int i
ppr (BeforePhase i) = brackets $ char '~' <> int i
------------------------------
instance Ppr RuleBndr where
ppr (RuleVar n) = ppr n
ppr (TypedRuleVar n ty) = parens $ ppr n <+> dcolon <+> ppr ty
------------------------------
instance Ppr Clause where
ppr (Clause ps rhs ds) = hsep (map (pprPat appPrec) ps) <+> pprBody True rhs
$$ where_clause ds
------------------------------
instance Ppr Con where
ppr (NormalC c sts) = ppr c <+> sep (map pprBangType sts)
ppr (RecC c vsts)
= ppr c <+> braces (sep (punctuate comma $ map pprVarBangType vsts))
ppr (InfixC st1 c st2) = pprBangType st1
<+> pprName' Infix c
<+> pprBangType st2
ppr (ForallC ns ctxt (GadtC c sts ty))
= commaSepApplied c <+> dcolon <+> pprForall ns ctxt
<+> pprGadtRHS sts ty
ppr (ForallC ns ctxt (RecGadtC c vsts ty))
= commaSepApplied c <+> dcolon <+> pprForall ns ctxt
<+> pprRecFields vsts ty
ppr (ForallC ns ctxt con)
= pprForall ns ctxt <+> ppr con
ppr (GadtC c sts ty)
= commaSepApplied c <+> dcolon <+> pprGadtRHS sts ty
ppr (RecGadtC c vsts ty)
= commaSepApplied c <+> dcolon <+> pprRecFields vsts ty
instance Ppr PatSynDir where
ppr Unidir = text "<-"
ppr ImplBidir = text "="
ppr (ExplBidir _) = text "<-"
-- the ExplBidir's clauses are pretty printed together with the
-- entire pattern synonym; so only print the direction here.
instance Ppr PatSynArgs where
ppr (PrefixPatSyn args) = sep $ map ppr args
ppr (InfixPatSyn a1 a2) = ppr a1 <+> ppr a2
ppr (RecordPatSyn sels) = braces $ sep (punctuate comma (map ppr sels))
commaSepApplied :: [Name] -> Doc
commaSepApplied = commaSepWith (pprName' Applied)
pprForall :: [TyVarBndr] -> Cxt -> Doc
pprForall tvs cxt
-- even in the case without any tvs, there could be a non-empty
-- context cxt (e.g., in the case of pattern synonyms, where there
-- are multiple forall binders and contexts).
| [] <- tvs = pprCxt cxt
| otherwise = text "forall" <+> hsep (map ppr tvs) <+> char '.' <+> pprCxt cxt
pprRecFields :: [(Name, Strict, Type)] -> Type -> Doc
pprRecFields vsts ty
= braces (sep (punctuate comma $ map pprVarBangType vsts))
<+> arrow <+> ppr ty
pprGadtRHS :: [(Strict, Type)] -> Type -> Doc
pprGadtRHS [] ty
= ppr ty
pprGadtRHS sts ty
= sep (punctuate (space <> arrow) (map pprBangType sts))
<+> arrow <+> ppr ty
------------------------------
pprVarBangType :: VarBangType -> Doc
-- Slight infelicity: with print non-atomic type with parens
pprVarBangType (v, bang, t) = ppr v <+> dcolon <+> pprBangType (bang, t)
------------------------------
pprBangType :: BangType -> Doc
-- Make sure we print
--
-- Con {-# UNPACK #-} a
--
-- rather than
--
-- Con {-# UNPACK #-}a
--
-- when there's no strictness annotation. If there is a strictness annotation,
-- it's okay to not put a space between it and the type.
pprBangType (bt@(Bang _ NoSourceStrictness), t) = ppr bt <+> pprParendType t
pprBangType (bt, t) = ppr bt <> pprParendType t
------------------------------
instance Ppr Bang where
ppr (Bang su ss) = ppr su <+> ppr ss
------------------------------
instance Ppr SourceUnpackedness where
ppr NoSourceUnpackedness = empty
ppr SourceNoUnpack = text "{-# NOUNPACK #-}"
ppr SourceUnpack = text "{-# UNPACK #-}"
------------------------------
instance Ppr SourceStrictness where
ppr NoSourceStrictness = empty
ppr SourceLazy = char '~'
ppr SourceStrict = char '!'
------------------------------
instance Ppr DecidedStrictness where
ppr DecidedLazy = empty
ppr DecidedStrict = char '!'
ppr DecidedUnpack = text "{-# UNPACK #-} !"
------------------------------
{-# DEPRECATED pprVarStrictType
"As of @template-haskell-2.11.0.0@, 'VarStrictType' has been replaced by 'VarBangType'. Please use 'pprVarBangType' instead." #-}
pprVarStrictType :: (Name, Strict, Type) -> Doc
pprVarStrictType = pprVarBangType
------------------------------
{-# DEPRECATED pprStrictType
"As of @template-haskell-2.11.0.0@, 'StrictType' has been replaced by 'BangType'. Please use 'pprBangType' instead." #-}
pprStrictType :: (Strict, Type) -> Doc
pprStrictType = pprBangType
------------------------------
pprParendType :: Type -> Doc
pprParendType (VarT v) = pprName' Applied v
-- `Applied` is used here instead of `ppr` because of infix names (#13887)
pprParendType (ConT c) = pprName' Applied c
pprParendType (TupleT 0) = text "()"
pprParendType (TupleT n) = parens (hcat (replicate (n-1) comma))
pprParendType (UnboxedTupleT n) = hashParens $ hcat $ replicate (n-1) comma
pprParendType (UnboxedSumT arity) = hashParens $ hcat $ replicate (arity-1) bar
pprParendType ArrowT = parens (text "->")
pprParendType ListT = text "[]"
pprParendType (LitT l) = pprTyLit l
pprParendType (PromotedT c) = text "'" <> pprName' Applied c
pprParendType (PromotedTupleT 0) = text "'()"
pprParendType (PromotedTupleT n) = quoteParens (hcat (replicate (n-1) comma))
pprParendType PromotedNilT = text "'[]"
pprParendType PromotedConsT = text "'(:)"
pprParendType StarT = char '*'
pprParendType ConstraintT = text "Constraint"
pprParendType (SigT ty k) = parens (ppr ty <+> text "::" <+> ppr k)
pprParendType WildCardT = char '_'
pprParendType (InfixT x n y) = parens (ppr x <+> pprName' Infix n <+> ppr y)
pprParendType t@(UInfixT {}) = parens (pprUInfixT t)
pprParendType (ParensT t) = ppr t
pprParendType tuple | (TupleT n, args) <- split tuple
, length args == n
= parens (commaSep args)
pprParendType other = parens (ppr other)
pprUInfixT :: Type -> Doc
pprUInfixT (UInfixT x n y) = pprUInfixT x <+> pprName' Infix n <+> pprUInfixT y
pprUInfixT t = ppr t
instance Ppr Type where
ppr (ForallT tvars ctxt ty) = sep [pprForall tvars ctxt, ppr ty]
ppr ty = pprTyApp (split ty)
-- Works, in a degnerate way, for SigT, and puts parens round (ty :: kind)
-- See Note [Pretty-printing kind signatures]
{- Note [Pretty-printing kind signatures]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
GHC's parser only recognises a kind signature in a type when there are
parens around it. E.g. the parens are required here:
f :: (Int :: *)
type instance F Int = (Bool :: *)
So we always print a SigT with parens (see Trac #10050). -}
pprTyApp :: (Type, [Type]) -> Doc
pprTyApp (ArrowT, [arg1,arg2]) = sep [pprFunArgType arg1 <+> text "->", ppr arg2]
pprTyApp (EqualityT, [arg1, arg2]) =
sep [pprFunArgType arg1 <+> text "~", ppr arg2]
pprTyApp (ListT, [arg]) = brackets (ppr arg)
pprTyApp (TupleT n, args)
| length args == n = parens (commaSep args)
pprTyApp (PromotedTupleT n, args)
| length args == n = quoteParens (commaSep args)
pprTyApp (fun, args) = pprParendType fun <+> sep (map pprParendType args)
pprFunArgType :: Type -> Doc -- Should really use a precedence argument
-- Everything except forall and (->) binds more tightly than (->)
pprFunArgType ty@(ForallT {}) = parens (ppr ty)
pprFunArgType ty@((ArrowT `AppT` _) `AppT` _) = parens (ppr ty)
pprFunArgType ty@(SigT _ _) = parens (ppr ty)
pprFunArgType ty = ppr ty
split :: Type -> (Type, [Type]) -- Split into function and args
split t = go t []
where go (AppT t1 t2) args = go t1 (t2:args)
go ty args = (ty, args)
pprTyLit :: TyLit -> Doc
pprTyLit (NumTyLit n) = integer n
pprTyLit (StrTyLit s) = text (show s)
instance Ppr TyLit where
ppr = pprTyLit
------------------------------
instance Ppr TyVarBndr where
ppr (PlainTV nm) = ppr nm
ppr (KindedTV nm k) = parens (ppr nm <+> dcolon <+> ppr k)
instance Ppr Role where
ppr NominalR = text "nominal"
ppr RepresentationalR = text "representational"
ppr PhantomR = text "phantom"
ppr InferR = text "_"
------------------------------
pprCxt :: Cxt -> Doc
pprCxt [] = empty
pprCxt ts = ppr_cxt_preds ts <+> text "=>"
ppr_cxt_preds :: Cxt -> Doc
ppr_cxt_preds [] = empty
ppr_cxt_preds [t] = ppr t
ppr_cxt_preds ts = parens (commaSep ts)
------------------------------
instance Ppr Range where
ppr = brackets . pprRange
where pprRange :: Range -> Doc
pprRange (FromR e) = ppr e <> text ".."
pprRange (FromThenR e1 e2) = ppr e1 <> text ","
<> ppr e2 <> text ".."
pprRange (FromToR e1 e2) = ppr e1 <> text ".." <> ppr e2
pprRange (FromThenToR e1 e2 e3) = ppr e1 <> text ","
<> ppr e2 <> text ".."
<> ppr e3
------------------------------
where_clause :: [Dec] -> Doc
where_clause [] = empty
where_clause ds = nest nestDepth $ text "where" <+> vcat (map (ppr_dec False) ds)
showtextl :: Show a => a -> Doc
showtextl = text . map toLower . show
hashParens :: Doc -> Doc
hashParens d = text "(# " <> d <> text " #)"
quoteParens :: Doc -> Doc
quoteParens d = text "'(" <> d <> text ")"
-----------------------------
instance Ppr Loc where
ppr (Loc { loc_module = md
, loc_package = pkg
, loc_start = (start_ln, start_col)
, loc_end = (end_ln, end_col) })
= hcat [ text pkg, colon, text md, colon
, parens $ int start_ln <> comma <> int start_col
, text "-"
, parens $ int end_ln <> comma <> int end_col ]
-- Takes a list of printable things and prints them separated by commas followed
-- by space.
commaSep :: Ppr a => [a] -> Doc
commaSep = commaSepWith ppr
-- Takes a list of things and prints them with the given pretty-printing
-- function, separated by commas followed by space.
commaSepWith :: (a -> Doc) -> [a] -> Doc
commaSepWith pprFun = sep . punctuate comma . map pprFun
-- Takes a list of printable things and prints them separated by semicolons
-- followed by space.
semiSep :: Ppr a => [a] -> Doc
semiSep = sep . punctuate semi . map ppr
-- Prints out the series of vertical bars that wraps an expression or pattern
-- used in an unboxed sum.
unboxedSumBars :: Doc -> SumAlt -> SumArity -> Doc
unboxedSumBars d alt arity = hashParens $
bars (alt-1) <> d <> bars (arity - alt)
where
bars i = hsep (replicate i bar)
-- Text containing the vertical bar character.
bar :: Doc
bar = char '|'
|
ezyang/ghc
|
libraries/template-haskell/Language/Haskell/TH/Ppr.hs
|
Haskell
|
bsd-3-clause
| 31,558
|
module Main where
import System.Environment (getArgs)
import Network.Factual.API
import Data.Factual.Query.DiffsQuery
import Data.Factual.Response
main :: IO()
main = do
args <- getArgs
let oauthKey = head args
let oauthSecret = last args
let options = Options { token = generateToken oauthKey oauthSecret, timeout = Nothing }
let query = DiffsQuery { table = Custom "canada-stable", start = 1339123455775, end = 1339124455775 }
result <- executeQuery options query
putStrLn $ "Status: " ++ status result
putStrLn $ "Version: " ++ show (version result)
putStrLn $ "Data: " ++ show (response result)
|
rudyl313/factual-haskell-driver
|
examples/DiffsExample.hs
|
Haskell
|
bsd-3-clause
| 619
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.