code
stringlengths 2
1.05M
| repo_name
stringlengths 5
101
| path
stringlengths 4
991
| language
stringclasses 3
values | license
stringclasses 5
values | size
int64 2
1.05M
|
|---|---|---|---|---|---|
{-# LANGUAGE TemplateHaskell #-}
{-| Unittests for the static lock declaration.
-}
{-
Copyright (C) 2016 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.JQueue.LockDecls (testLockDecls) where
import Test.QuickCheck
import Test.HUnit
import Data.List
import Data.Maybe
import Prelude ()
import Ganeti.Prelude
import Test.Ganeti.TestHelper
import Test.Ganeti.Objects
import Test.Ganeti.OpCodes (genOpCodeFromId)
import Test.Ganeti.TestCommon
import qualified Ganeti.Constants as C
import Ganeti.JQueue.LockDecls
import Ganeti.OpCodes
import Ganeti.Objects
prop_staticWeight :: ConfigData -> Maybe OpCode -> [OpCode] -> Property
prop_staticWeight cfg op ops =
let weight = staticWeight cfg op ops
maxWeight = C.staticLockSureBlockWeight * 5
in (weight >= 0 && weight <= (maxWeight+ C.staticLockBaseWeight)) ==? True
genExclusiveInstanceOp :: ConfigData -> Gen OpCode
genExclusiveInstanceOp cfg = do
let list = [ "OP_INSTANCE_STARTUP"
, "OP_INSTANCE_SHUTDOWN"
, "OP_INSTANCE_REBOOT"
, "OP_INSTANCE_RENAME"
]
op_id <- elements list
genOpCodeFromId op_id (Just cfg)
prop_instNameConflictCheck :: Property
prop_instNameConflictCheck = do
forAll (genConfigDataWithValues 10 50) $ \ cfg ->
forAll (genExclusiveInstanceOp cfg) $ \ op1 ->
forAll (genExclusiveInstanceOp cfg) $ \ op2 ->
forAll (genExclusiveInstanceOp cfg) $ \ op3 ->
let w1 = staticWeight cfg (Just op1) [op3]
w2 = staticWeight cfg (Just op2) [op3]
iName1 = opInstanceName op1
iName2 = opInstanceName op2
iName3 = opInstanceName op3
testResult
| iName1 == iName2 = True
| iName1 == iName3 = (w2 <= w1)
| iName2 == iName3 = (w1 <= w2)
| otherwise = True
in testResult
genExclusiveNodeOp :: ConfigData -> Gen OpCode
genExclusiveNodeOp cfg = do
let list = [ "OP_REPAIR_COMMAND"
, "OP_NODE_MODIFY_STORAGE"
, "OP_REPAIR_NODE_STORAGE"
]
op_id <- elements list
genOpCodeFromId op_id (Just cfg)
prop_nodeNameConflictCheck :: Property
prop_nodeNameConflictCheck = do
forAll (genConfigDataWithValues 10 50) $ \ cfg ->
forAll (genExclusiveNodeOp cfg) $ \ op1 ->
forAll (genExclusiveNodeOp cfg) $ \ op2 ->
forAll (genExclusiveNodeOp cfg) $ \ op3 ->
let w1 = staticWeight cfg (Just op1) [op3]
w2 = staticWeight cfg (Just op2) [op3]
nName1 = opNodeName op1
nName2 = opNodeName op2
nName3 = opNodeName op3
testResult
| nName1 == nName2 = True
| nName1 == nName3 = (w2 <= w1)
| nName2 == nName3 = (w1 <= w2)
| otherwise = True
in testResult
case_queueLockOpOrder :: Assertion
case_queueLockOpOrder = do
cfg <- generate $ genConfigDataWithValues 10 50
diagnoseOp <- generate $ genOpCodeFromId "OP_OS_DIAGNOSE" (Just cfg)
networkAddOp <- generate $ genOpCodeFromId "OP_NETWORK_ADD" (Just cfg)
groupVerifyOp <- generate $ genOpCodeFromId "OP_GROUP_VERIFY_DISKS" (Just cfg)
nodeAddOp <- generate $ genOpCodeFromId "OP_NODE_ADD" (Just cfg)
currentOp <- generate $ genExclusiveInstanceOp cfg
let w1 = staticWeight cfg (Just diagnoseOp) [currentOp]
w2 = staticWeight cfg (Just networkAddOp) [currentOp]
w3 = staticWeight cfg (Just groupVerifyOp) [currentOp]
w4 = staticWeight cfg (Just nodeAddOp) [currentOp]
weights = [w1, w2, w3, w4]
assertEqual "weights should be sorted"
weights
(sort weights)
testSuite "LockDecls" [ 'prop_staticWeight
, 'prop_instNameConflictCheck
, 'prop_nodeNameConflictCheck
, 'case_queueLockOpOrder ]
|
onponomarev/ganeti
|
test/hs/Test/Ganeti/JQueue/LockDecls.hs
|
Haskell
|
bsd-2-clause
| 5,098
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE ConstraintKinds #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
module Stack.Dot (dot
,listDependencies
,DotOpts(..)
,DotPayload(..)
,ListDepsOpts(..)
,ListDepsFormat(..)
,ListDepsFormatOpts(..)
,resolveDependencies
,printGraph
,pruneGraph
) where
import Data.Aeson
import qualified Data.ByteString.Lazy.Char8 as LBC8
import qualified Data.Foldable as F
import qualified Data.Sequence as Seq
import qualified Data.Set as Set
import qualified Data.Map as Map
import qualified Data.Text as Text
import qualified Data.Text.IO as Text
import qualified Data.Traversable as T
import Distribution.Text (display)
import qualified Distribution.PackageDescription as PD
import qualified Distribution.SPDX.License as SPDX
import Distribution.License (License(BSD3), licenseFromSPDX)
import Distribution.Types.PackageName (mkPackageName)
import qualified Path
import RIO.PrettyPrint (HasTerm (..), HasStylesUpdate (..))
import RIO.Process (HasProcessContext (..))
import Stack.Build (loadPackage)
import Stack.Build.Installed (getInstalled, toInstallMap)
import Stack.Build.Source
import Stack.Constants
import Stack.Package
import Stack.Prelude hiding (Display (..), pkgName, loadPackage)
import qualified Stack.Prelude (pkgName)
import Stack.Runners
import Stack.SourceMap
import Stack.Types.Build
import Stack.Types.Compiler (wantedToActual)
import Stack.Types.Config
import Stack.Types.GhcPkgId
import Stack.Types.SourceMap
import Stack.Build.Target(NeedTargets(..), parseTargets)
-- | Options record for @stack dot@
data DotOpts = DotOpts
{ dotIncludeExternal :: !Bool
-- ^ Include external dependencies
, dotIncludeBase :: !Bool
-- ^ Include dependencies on base
, dotDependencyDepth :: !(Maybe Int)
-- ^ Limit the depth of dependency resolution to (Just n) or continue until fixpoint
, dotPrune :: !(Set PackageName)
-- ^ Package names to prune from the graph
, dotTargets :: [Text]
-- ^ stack TARGETs to trace dependencies for
, dotFlags :: !(Map ApplyCLIFlag (Map FlagName Bool))
-- ^ Flags to apply when calculating dependencies
, dotTestTargets :: Bool
-- ^ Like the "--test" flag for build, affects the meaning of 'dotTargets'.
, dotBenchTargets :: Bool
-- ^ Like the "--bench" flag for build, affects the meaning of 'dotTargets'.
, dotGlobalHints :: Bool
-- ^ Use global hints instead of relying on an actual GHC installation.
}
data ListDepsFormatOpts = ListDepsFormatOpts { listDepsSep :: !Text
-- ^ Separator between the package name and details.
, listDepsLicense :: !Bool
-- ^ Print dependency licenses instead of versions.
}
data ListDepsFormat = ListDepsText ListDepsFormatOpts
| ListDepsTree ListDepsFormatOpts
| ListDepsJSON
data ListDepsOpts = ListDepsOpts
{ listDepsFormat :: !ListDepsFormat
-- ^ Format of printing dependencies
, listDepsDotOpts :: !DotOpts
-- ^ The normal dot options.
}
-- | Visualize the project's dependencies as a graphviz graph
dot :: DotOpts -> RIO Runner ()
dot dotOpts = do
(localNames, prunedGraph) <- createPrunedDependencyGraph dotOpts
printGraph dotOpts localNames prunedGraph
-- | Information about a package in the dependency graph, when available.
data DotPayload = DotPayload
{ payloadVersion :: Maybe Version
-- ^ The package version.
, payloadLicense :: Maybe (Either SPDX.License License)
-- ^ The license the package was released under.
, payloadLocation :: Maybe PackageLocation
-- ^ The location of the package.
} deriving (Eq, Show)
-- | Create the dependency graph and also prune it as specified in the dot
-- options. Returns a set of local names and and a map from package names to
-- dependencies.
createPrunedDependencyGraph :: DotOpts
-> RIO Runner
(Set PackageName,
Map PackageName (Set PackageName, DotPayload))
createPrunedDependencyGraph dotOpts = withDotConfig dotOpts $ do
localNames <- view $ buildConfigL.to (Map.keysSet . smwProject . bcSMWanted)
logDebug "Creating dependency graph"
resultGraph <- createDependencyGraph dotOpts
let pkgsToPrune = if dotIncludeBase dotOpts
then dotPrune dotOpts
else Set.insert "base" (dotPrune dotOpts)
prunedGraph = pruneGraph localNames pkgsToPrune resultGraph
logDebug "Returning prouned dependency graph"
return (localNames, prunedGraph)
-- | Create the dependency graph, the result is a map from a package
-- name to a tuple of dependencies and payload if available. This
-- function mainly gathers the required arguments for
-- @resolveDependencies@.
createDependencyGraph
:: DotOpts
-> RIO DotConfig (Map PackageName (Set PackageName, DotPayload))
createDependencyGraph dotOpts = do
sourceMap <- view sourceMapL
locals <- for (toList $ smProject sourceMap) loadLocalPackage
let graph = Map.fromList $ projectPackageDependencies dotOpts (filter lpWanted locals)
globalDump <- view $ to dcGlobalDump
-- TODO: Can there be multiple entries for wired-in-packages? If so,
-- this will choose one arbitrarily..
let globalDumpMap = Map.fromList $ map (\dp -> (Stack.Prelude.pkgName (dpPackageIdent dp), dp)) globalDump
globalIdMap = Map.fromList $ map (\dp -> (dpGhcPkgId dp, dpPackageIdent dp)) globalDump
let depLoader = createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps
loadPackageDeps name version loc flags ghcOptions cabalConfigOpts
-- Skip packages that can't be loaded - see
-- https://github.com/commercialhaskell/stack/issues/2967
| name `elem` [mkPackageName "rts", mkPackageName "ghc"] =
return (Set.empty, DotPayload (Just version) (Just $ Right BSD3) Nothing)
| otherwise =
fmap (packageAllDeps &&& makePayload loc) (loadPackage loc flags ghcOptions cabalConfigOpts)
resolveDependencies (dotDependencyDepth dotOpts) graph depLoader
where makePayload loc pkg = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) (Just $ PLImmutable loc)
listDependencies
:: ListDepsOpts
-> RIO Runner ()
listDependencies opts = do
let dotOpts = listDepsDotOpts opts
(pkgs, resultGraph) <- createPrunedDependencyGraph dotOpts
liftIO $ case listDepsFormat opts of
ListDepsTree treeOpts -> Text.putStrLn "Packages" >> printTree treeOpts dotOpts 0 [] (treeRoots opts pkgs) resultGraph
ListDepsJSON -> printJSON pkgs resultGraph
ListDepsText textOpts -> void (Map.traverseWithKey go (snd <$> resultGraph))
where go name payload = Text.putStrLn $ listDepsLine textOpts name payload
data DependencyTree = DependencyTree (Set PackageName) (Map PackageName (Set PackageName, DotPayload))
instance ToJSON DependencyTree where
toJSON (DependencyTree _ dependencyMap) =
toJSON $ foldToList dependencyToJSON dependencyMap
foldToList :: (k -> a -> b) -> Map k a -> [b]
foldToList f = Map.foldrWithKey (\k a bs -> bs ++ [f k a]) []
dependencyToJSON :: PackageName -> (Set PackageName, DotPayload) -> Value
dependencyToJSON pkg (deps, payload) = let fieldsAlwaysPresent = [ "name" .= packageNameString pkg
, "license" .= licenseText payload
, "version" .= versionText payload
, "dependencies" .= Set.map packageNameString deps
]
loc = catMaybes [("location" .=) . pkgLocToJSON <$> payloadLocation payload]
in object $ fieldsAlwaysPresent ++ loc
pkgLocToJSON :: PackageLocation -> Value
pkgLocToJSON (PLMutable (ResolvedPath _ dir)) = object [ "type" .= ("project package" :: Text)
, "url" .= ("file://" ++ Path.toFilePath dir)]
pkgLocToJSON (PLImmutable (PLIHackage pkgid _ _)) = object [ "type" .= ("hackage" :: Text)
, "url" .= ("https://hackage.haskell.org/package/" ++ display pkgid)]
pkgLocToJSON (PLImmutable (PLIArchive archive _)) = let url = case archiveLocation archive of
ALUrl u -> u
ALFilePath (ResolvedPath _ path) -> Text.pack $ "file://" ++ Path.toFilePath path
in object [ "type" .= ("archive" :: Text)
, "url" .= url
, "sha256" .= archiveHash archive
, "size" .= archiveSize archive ]
pkgLocToJSON (PLImmutable (PLIRepo repo _)) = object [ "type" .= case repoType repo of
RepoGit -> "git" :: Text
RepoHg -> "hg" :: Text
, "url" .= repoUrl repo
, "commit" .= repoCommit repo
, "subdir" .= repoSubdir repo
]
printJSON :: Set PackageName
-> Map PackageName (Set PackageName, DotPayload)
-> IO ()
printJSON pkgs dependencyMap = LBC8.putStrLn $ encode $ DependencyTree pkgs dependencyMap
treeRoots :: ListDepsOpts -> Set PackageName -> Set PackageName
treeRoots opts projectPackages' =
let targets = dotTargets $ listDepsDotOpts opts
in if null targets
then projectPackages'
else Set.fromList $ map (mkPackageName . Text.unpack) targets
printTree :: ListDepsFormatOpts
-> DotOpts
-> Int
-> [Int]
-> Set PackageName
-> Map PackageName (Set PackageName, DotPayload)
-> IO ()
printTree opts dotOpts depth remainingDepsCounts packages dependencyMap =
F.sequence_ $ Seq.mapWithIndex go (toSeq packages)
where
toSeq = Seq.fromList . Set.toList
go index name = let newDepsCounts = remainingDepsCounts ++ [Set.size packages - index - 1]
in
case Map.lookup name dependencyMap of
Just (deps, payload) -> do
printTreeNode opts dotOpts depth newDepsCounts deps payload name
if Just depth == dotDependencyDepth dotOpts
then return ()
else printTree opts dotOpts (depth + 1) newDepsCounts deps dependencyMap
-- TODO: Define this behaviour, maybe return an error?
Nothing -> return ()
printTreeNode :: ListDepsFormatOpts
-> DotOpts
-> Int
-> [Int]
-> Set PackageName
-> DotPayload
-> PackageName
-> IO ()
printTreeNode opts dotOpts depth remainingDepsCounts deps payload name =
let remainingDepth = fromMaybe 999 (dotDependencyDepth dotOpts) - depth
hasDeps = not $ null deps
in Text.putStrLn $ treeNodePrefix "" remainingDepsCounts hasDeps remainingDepth <> " " <> listDepsLine opts name payload
treeNodePrefix :: Text -> [Int] -> Bool -> Int -> Text
treeNodePrefix t [] _ _ = t
treeNodePrefix t [0] True 0 = t <> "└──"
treeNodePrefix t [_] True 0 = t <> "├──"
treeNodePrefix t [0] True _ = t <> "└─┬"
treeNodePrefix t [_] True _ = t <> "├─┬"
treeNodePrefix t [0] False _ = t <> "└──"
treeNodePrefix t [_] False _ = t <> "├──"
treeNodePrefix t (0:ns) d remainingDepth = treeNodePrefix (t <> " ") ns d remainingDepth
treeNodePrefix t (_:ns) d remainingDepth = treeNodePrefix (t <> "│ ") ns d remainingDepth
listDepsLine :: ListDepsFormatOpts -> PackageName -> DotPayload -> Text
listDepsLine opts name payload = Text.pack (packageNameString name) <> listDepsSep opts <> payloadText opts payload
payloadText :: ListDepsFormatOpts -> DotPayload -> Text
payloadText opts payload =
if listDepsLicense opts
then licenseText payload
else versionText payload
licenseText :: DotPayload -> Text
licenseText payload = maybe "<unknown>" (Text.pack . display . either licenseFromSPDX id) (payloadLicense payload)
versionText :: DotPayload -> Text
versionText payload = maybe "<unknown>" (Text.pack . display) (payloadVersion payload)
-- | @pruneGraph dontPrune toPrune graph@ prunes all packages in
-- @graph@ with a name in @toPrune@ and removes resulting orphans
-- unless they are in @dontPrune@
pruneGraph :: (F.Foldable f, F.Foldable g, Eq a)
=> f PackageName
-> g PackageName
-> Map PackageName (Set PackageName, a)
-> Map PackageName (Set PackageName, a)
pruneGraph dontPrune names =
pruneUnreachable dontPrune . Map.mapMaybeWithKey (\pkg (pkgDeps,x) ->
if pkg `F.elem` names
then Nothing
else let filtered = Set.filter (\n -> n `F.notElem` names) pkgDeps
in if Set.null filtered && not (Set.null pkgDeps)
then Nothing
else Just (filtered,x))
-- | Make sure that all unreachable nodes (orphans) are pruned
pruneUnreachable :: (Eq a, F.Foldable f)
=> f PackageName
-> Map PackageName (Set PackageName, a)
-> Map PackageName (Set PackageName, a)
pruneUnreachable dontPrune = fixpoint prune
where fixpoint :: Eq a => (a -> a) -> a -> a
fixpoint f v = if f v == v then v else fixpoint f (f v)
prune graph' = Map.filterWithKey (\k _ -> reachable k) graph'
where reachable k = k `F.elem` dontPrune || k `Set.member` reachables
reachables = F.fold (fst <$> graph')
-- | Resolve the dependency graph up to (Just depth) or until fixpoint is reached
resolveDependencies :: (Applicative m, Monad m)
=> Maybe Int
-> Map PackageName (Set PackageName, DotPayload)
-> (PackageName -> m (Set PackageName, DotPayload))
-> m (Map PackageName (Set PackageName, DotPayload))
resolveDependencies (Just 0) graph _ = return graph
resolveDependencies limit graph loadPackageDeps = do
let values = Set.unions (fst <$> Map.elems graph)
keys = Map.keysSet graph
next = Set.difference values keys
if Set.null next
then return graph
else do
x <- T.traverse (\name -> (name,) <$> loadPackageDeps name) (F.toList next)
resolveDependencies (subtract 1 <$> limit)
(Map.unionWith unifier graph (Map.fromList x))
loadPackageDeps
where unifier (pkgs1,v1) (pkgs2,_) = (Set.union pkgs1 pkgs2, v1)
-- | Given a SourceMap and a dependency loader, load the set of dependencies for a package
createDepLoader :: SourceMap
-> Map PackageName DumpPackage
-> Map GhcPkgId PackageIdentifier
-> (PackageName -> Version -> PackageLocationImmutable ->
Map FlagName Bool -> [Text] -> [Text] -> RIO DotConfig (Set PackageName, DotPayload))
-> PackageName
-> RIO DotConfig (Set PackageName, DotPayload)
createDepLoader sourceMap globalDumpMap globalIdMap loadPackageDeps pkgName = do
fromMaybe noDepsErr
(projectPackageDeps <|> dependencyDeps <|> globalDeps)
where
projectPackageDeps =
loadDeps <$> Map.lookup pkgName (smProject sourceMap)
where
loadDeps pp = do
pkg <- loadCommonPackage (ppCommon pp)
pure (packageAllDeps pkg, payloadFromLocal pkg Nothing)
dependencyDeps =
loadDeps <$> Map.lookup pkgName (smDeps sourceMap)
where
loadDeps DepPackage{dpLocation=PLMutable dir} = do
pp <- mkProjectPackage YesPrintWarnings dir False
pkg <- loadCommonPackage (ppCommon pp)
pure (packageAllDeps pkg, payloadFromLocal pkg (Just $ PLMutable dir))
loadDeps dp@DepPackage{dpLocation=PLImmutable loc} = do
let common = dpCommon dp
gpd <- liftIO $ cpGPD common
let PackageIdentifier name version = PD.package $ PD.packageDescription gpd
flags = cpFlags common
ghcOptions = cpGhcOptions common
cabalConfigOpts = cpCabalConfigOpts common
assert (pkgName == name) (loadPackageDeps pkgName version loc flags ghcOptions cabalConfigOpts)
-- If package is a global package, use info from ghc-pkg (#4324, #3084)
globalDeps =
pure . getDepsFromDump <$> Map.lookup pkgName globalDumpMap
where
getDepsFromDump dump =
(Set.fromList deps, payloadFromDump dump)
where
deps = map ghcIdToPackageName (dpDepends dump)
ghcIdToPackageName depId =
let errText = "Invariant violated: Expected to find "
in maybe (error (errText ++ ghcPkgIdString depId ++ " in global DB"))
Stack.Prelude.pkgName
(Map.lookup depId globalIdMap)
noDepsErr = error ("Invariant violated: The '" ++ packageNameString pkgName
++ "' package was not found in any of the dependency sources")
payloadFromLocal pkg loc = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) loc
payloadFromDump dp = DotPayload (Just $ pkgVersion $ dpPackageIdent dp) (Right <$> dpLicense dp) Nothing
-- | Resolve the direct (depth 0) external dependencies of the given local packages (assumed to come from project packages)
projectPackageDependencies :: DotOpts -> [LocalPackage] -> [(PackageName, (Set PackageName, DotPayload))]
projectPackageDependencies dotOpts locals =
map (\lp -> let pkg = localPackageToPackage lp
pkgDir = Path.parent $ lpCabalFile lp
loc = PLMutable $ ResolvedPath (RelFilePath "N/A") pkgDir
in (packageName pkg, (deps pkg, lpPayload pkg loc)))
locals
where deps pkg =
if dotIncludeExternal dotOpts
then Set.delete (packageName pkg) (packageAllDeps pkg)
else Set.intersection localNames (packageAllDeps pkg)
localNames = Set.fromList $ map (packageName . lpPackage) locals
lpPayload pkg loc = DotPayload (Just $ packageVersion pkg) (Just $ packageLicense pkg) (Just loc)
-- | Print a graphviz graph of the edges in the Map and highlight the given local packages
printGraph :: (Applicative m, MonadIO m)
=> DotOpts
-> Set PackageName -- ^ all locals
-> Map PackageName (Set PackageName, DotPayload)
-> m ()
printGraph dotOpts locals graph = do
liftIO $ Text.putStrLn "strict digraph deps {"
printLocalNodes dotOpts filteredLocals
printLeaves graph
void (Map.traverseWithKey printEdges (fst <$> graph))
liftIO $ Text.putStrLn "}"
where filteredLocals = Set.filter (\local' ->
local' `Set.notMember` dotPrune dotOpts) locals
-- | Print the local nodes with a different style depending on options
printLocalNodes :: (F.Foldable t, MonadIO m)
=> DotOpts
-> t PackageName
-> m ()
printLocalNodes dotOpts locals = liftIO $ Text.putStrLn (Text.intercalate "\n" lpNodes)
where applyStyle :: Text -> Text
applyStyle n = if dotIncludeExternal dotOpts
then n <> " [style=dashed];"
else n <> " [style=solid];"
lpNodes :: [Text]
lpNodes = map (applyStyle . nodeName) (F.toList locals)
-- | Print nodes without dependencies
printLeaves :: MonadIO m
=> Map PackageName (Set PackageName, DotPayload)
-> m ()
printLeaves = F.mapM_ printLeaf . Map.keysSet . Map.filter Set.null . fmap fst
-- | @printDedges p ps@ prints an edge from p to every ps
printEdges :: MonadIO m => PackageName -> Set PackageName -> m ()
printEdges package deps = F.forM_ deps (printEdge package)
-- | Print an edge between the two package names
printEdge :: MonadIO m => PackageName -> PackageName -> m ()
printEdge from to' = liftIO $ Text.putStrLn (Text.concat [ nodeName from, " -> ", nodeName to', ";"])
-- | Convert a package name to a graph node name.
nodeName :: PackageName -> Text
nodeName name = "\"" <> Text.pack (packageNameString name) <> "\""
-- | Print a node with no dependencies
printLeaf :: MonadIO m => PackageName -> m ()
printLeaf package = liftIO . Text.putStrLn . Text.concat $
if isWiredIn package
then ["{rank=max; ", nodeName package, " [shape=box]; };"]
else ["{rank=max; ", nodeName package, "; };"]
-- | Check if the package is wired in (shipped with) ghc
isWiredIn :: PackageName -> Bool
isWiredIn = (`Set.member` wiredInPackages)
localPackageToPackage :: LocalPackage -> Package
localPackageToPackage lp =
fromMaybe (lpPackage lp) (lpTestBench lp)
-- Plumbing for --test and --bench flags
withDotConfig
:: DotOpts
-> RIO DotConfig a
-> RIO Runner a
withDotConfig opts inner =
local (over globalOptsL modifyGO) $
if dotGlobalHints opts
then withConfig NoReexec $ withBuildConfig withGlobalHints
else withConfig YesReexec withReal
where
withGlobalHints = do
bconfig <- view buildConfigL
globals <- globalsFromHints $ smwCompiler $ bcSMWanted bconfig
fakeGhcPkgId <- parseGhcPkgId "ignored"
actual <- either throwIO pure $
wantedToActual $ smwCompiler $
bcSMWanted bconfig
let smActual = SMActual
{ smaCompiler = actual
, smaProject = smwProject $ bcSMWanted bconfig
, smaDeps = smwDeps $ bcSMWanted bconfig
, smaGlobal = Map.mapWithKey toDump globals
}
toDump :: PackageName -> Version -> DumpPackage
toDump name version = DumpPackage
{ dpGhcPkgId = fakeGhcPkgId
, dpPackageIdent = PackageIdentifier name version
, dpParentLibIdent = Nothing
, dpLicense = Nothing
, dpLibDirs = []
, dpLibraries = []
, dpHasExposedModules = True
, dpExposedModules = mempty
, dpDepends = []
, dpHaddockInterfaces = []
, dpHaddockHtml = Nothing
, dpIsExposed = True
}
actualPkgs = Map.keysSet (smaDeps smActual) <>
Map.keysSet (smaProject smActual)
prunedActual = smActual { smaGlobal = pruneGlobals (smaGlobal smActual) actualPkgs }
targets <- parseTargets NeedTargets False boptsCLI prunedActual
logDebug "Loading source map"
sourceMap <- loadSourceMap targets boptsCLI smActual
let dc = DotConfig
{ dcBuildConfig = bconfig
, dcSourceMap = sourceMap
, dcGlobalDump = toList $ smaGlobal smActual
}
logDebug "DotConfig fully loaded"
runRIO dc inner
withReal = withEnvConfig NeedTargets boptsCLI $ do
envConfig <- ask
let sourceMap = envConfigSourceMap envConfig
installMap <- toInstallMap sourceMap
(_, globalDump, _, _) <- getInstalled installMap
let dc = DotConfig
{ dcBuildConfig = envConfigBuildConfig envConfig
, dcSourceMap = sourceMap
, dcGlobalDump = globalDump
}
runRIO dc inner
boptsCLI = defaultBuildOptsCLI
{ boptsCLITargets = dotTargets opts
, boptsCLIFlags = dotFlags opts
}
modifyGO =
(if dotTestTargets opts then set (globalOptsBuildOptsMonoidL.buildOptsMonoidTestsL) (Just True) else id) .
(if dotBenchTargets opts then set (globalOptsBuildOptsMonoidL.buildOptsMonoidBenchmarksL) (Just True) else id)
data DotConfig = DotConfig
{ dcBuildConfig :: !BuildConfig
, dcSourceMap :: !SourceMap
, dcGlobalDump :: ![DumpPackage]
}
instance HasLogFunc DotConfig where
logFuncL = runnerL.logFuncL
instance HasPantryConfig DotConfig where
pantryConfigL = configL.pantryConfigL
instance HasTerm DotConfig where
useColorL = runnerL.useColorL
termWidthL = runnerL.termWidthL
instance HasStylesUpdate DotConfig where
stylesUpdateL = runnerL.stylesUpdateL
instance HasGHCVariant DotConfig
instance HasPlatform DotConfig
instance HasRunner DotConfig where
runnerL = configL.runnerL
instance HasProcessContext DotConfig where
processContextL = runnerL.processContextL
instance HasConfig DotConfig
instance HasBuildConfig DotConfig where
buildConfigL = lens dcBuildConfig (\x y -> x { dcBuildConfig = y })
instance HasSourceMap DotConfig where
sourceMapL = lens dcSourceMap (\x y -> x { dcSourceMap = y })
|
juhp/stack
|
src/Stack/Dot.hs
|
Haskell
|
bsd-3-clause
| 25,549
|
module Sql.Utils where
import Database.HDBC
import Database.HDBC.Sqlite3
import Data.Maybe
import Types
conn :: IO Connection
conn = connectSqlite3 "hunter.db"
fetch :: Connection -> String -> [SqlValue] -> IO (Maybe [SqlValue])
fetch c s vs = do
com <- prepare c s
execute com vs
fetchRow com
lastRowId :: Connection -> IO (Maybe Int)
lastRowId c = do
res <- fetch c "SELECT last_insert_rowid()" []
return $ fromSql <$> (listToMaybe =<< res)
|
hherman1/CatanServ
|
src/Sql/Utils.hs
|
Haskell
|
bsd-3-clause
| 467
|
-- | Item and treasure definitions.
module Content.ItemKind ( cdefs ) where
import qualified Data.EnumMap.Strict as EM
import Data.List
import Content.ItemKindActor
import Content.ItemKindBlast
import Content.ItemKindOrgan
import Content.ItemKindTemporary
import Game.LambdaHack.Common.Ability
import Game.LambdaHack.Common.Color
import Game.LambdaHack.Common.ContentDef
import Game.LambdaHack.Common.Dice
import Game.LambdaHack.Common.Flavour
import Game.LambdaHack.Common.Misc
import Game.LambdaHack.Content.ItemKind
cdefs :: ContentDef ItemKind
cdefs = ContentDef
{ getSymbol = isymbol
, getName = iname
, getFreq = ifreq
, validateSingle = validateSingleItemKind
, validateAll = validateAllItemKind
, content = items ++ organs ++ blasts ++ actors ++ temporaries
}
items :: [ItemKind]
items =
[dart, dart200, paralizingProj, harpoon, net, jumpingPole, sharpeningTool, seeingItem, light1, light2, light3, gorget, necklace1, necklace2, necklace3, necklace4, necklace5, necklace6, necklace7, necklace8, necklace9, sightSharpening, ring1, ring2, ring3, ring4, ring5, ring6, ring7, ring8, potion1, potion2, potion3, potion4, potion5, potion6, potion7, potion8, potion9, flask1, flask2, flask3, flask4, flask5, flask6, flask7, flask8, flask9, flask10, flask11, flask12, flask13, flask14, scroll1, scroll2, scroll3, scroll4, scroll5, scroll6, scroll7, scroll8, scroll9, scroll10, scroll11, armorLeather, armorMail, gloveFencing, gloveGauntlet, gloveJousting, buckler, shield, dagger, daggerDropBestWeapon, hammer, hammerParalyze, hammerSpark, sword, swordImpress, swordNullify, halberd, halberdPushActor, wand1, wand2, gem1, gem2, gem3, gem4, currency]
dart, dart200, paralizingProj, harpoon, net, jumpingPole, sharpeningTool, seeingItem, light1, light2, light3, gorget, necklace1, necklace2, necklace3, necklace4, necklace5, necklace6, necklace7, necklace8, necklace9, sightSharpening, ring1, ring2, ring3, ring4, ring5, ring6, ring7, ring8, potion1, potion2, potion3, potion4, potion5, potion6, potion7, potion8, potion9, flask1, flask2, flask3, flask4, flask5, flask6, flask7, flask8, flask9, flask10, flask11, flask12, flask13, flask14, scroll1, scroll2, scroll3, scroll4, scroll5, scroll6, scroll7, scroll8, scroll9, scroll10, scroll11, armorLeather, armorMail, gloveFencing, gloveGauntlet, gloveJousting, buckler, shield, dagger, daggerDropBestWeapon, hammer, hammerParalyze, hammerSpark, sword, swordImpress, swordNullify, halberd, halberdPushActor, wand1, wand2, gem1, gem2, gem3, gem4, currency :: ItemKind
necklace, ring, potion, flask, scroll, wand, gem :: ItemKind -- generic templates
-- * Item group symbols, partially from Nethack
symbolProjectile, _symbolLauncher, symbolLight, symbolTool, symbolGem, symbolGold, symbolNecklace, symbolRing, symbolPotion, symbolFlask, symbolScroll, symbolTorsoArmor, symbolMiscArmor, _symbolClothes, symbolShield, symbolPolearm, symbolEdged, symbolHafted, symbolWand, _symbolStaff, _symbolFood :: Char
symbolProjectile = '|'
_symbolLauncher = '}'
symbolLight = '('
symbolTool = '('
symbolGem = '*'
symbolGold = '$'
symbolNecklace = '"'
symbolRing = '='
symbolPotion = '!' -- concoction, bottle, jar, vial, canister
symbolFlask = '!'
symbolScroll = '?' -- book, note, tablet, remote
symbolTorsoArmor = '['
symbolMiscArmor = '['
_symbolClothes = '('
symbolShield = '['
symbolPolearm = ')'
symbolEdged = ')'
symbolHafted = ')'
symbolWand = '/' -- magical rod, transmitter, pistol, rifle
_symbolStaff = '_' -- scanner
_symbolFood = ',' -- too easy to miss?
-- * Thrown weapons
dart = ItemKind
{ isymbol = symbolProjectile
, iname = "dart"
, ifreq = [("useful", 100), ("any arrow", 100)]
, iflavour = zipPlain [Cyan]
, icount = 4 * d 3
, irarity = [(1, 10), (10, 20)]
, iverbHit = "nick"
, iweight = 50
, iaspects = [AddHurtRanged (d 3 + dl 6 |*| 20)]
, ieffects = [Hurt (2 * d 1)]
, ifeature = [Identified]
, idesc = "Little, but sharp and sturdy." -- "Much inferior to arrows though, especially given the contravariance problems." --- funny, but destroy the suspension of disbelief; this is supposed to be a Lovecraftian horror and any hilarity must ensue from the failures in making it so and not from actively trying to be funny; also, mundane objects are not supposed to be scary or transcendental; the scare is in horrors from the abstract dimension visiting our ordinary reality; without the contrast there's no horror and no wonder, so also the magical items must be contrasted with ordinary XIX century and antique items
, ikit = []
}
dart200 = ItemKind
{ isymbol = symbolProjectile
, iname = "fine dart"
, ifreq = [("useful", 100), ("any arrow", 50)] -- TODO: until arrows added
, iflavour = zipPlain [BrRed]
, icount = 4 * d 3
, irarity = [(1, 20), (10, 10)]
, iverbHit = "prick"
, iweight = 50
, iaspects = [AddHurtRanged (d 3 + dl 6 |*| 20)]
, ieffects = [Hurt (1 * d 1)]
, ifeature = [toVelocity 200, Identified]
, idesc = "Finely balanced for throws of great speed."
, ikit = []
}
-- * Exotic thrown weapons
paralizingProj = ItemKind
{ isymbol = symbolProjectile
, iname = "bolas set"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [BrYellow]
, icount = dl 4
, irarity = [(5, 5), (10, 5)]
, iverbHit = "entangle"
, iweight = 500
, iaspects = []
, ieffects = [Hurt (2 * d 1), Paralyze (5 + d 5), DropBestWeapon]
, ifeature = [Identified]
, idesc = "Wood balls tied with hemp rope. The target enemy is tripped and bound to drop the main weapon, while fighting for balance."
, ikit = []
}
harpoon = ItemKind
{ isymbol = symbolProjectile
, iname = "harpoon"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [Brown]
, icount = dl 5
, irarity = [(10, 10)]
, iverbHit = "hook"
, iweight = 4000
, iaspects = [AddHurtRanged (d 2 + dl 5 |*| 20)]
, ieffects = [Hurt (4 * d 1), PullActor (ThrowMod 200 50)]
, ifeature = [Identified]
, idesc = "The cruel, barbed head lodges in its victim so painfully that the weakest tug of the thin line sends the victim flying."
, ikit = []
}
net = ItemKind
{ isymbol = symbolProjectile
, iname = "net"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [White]
, icount = dl 3
, irarity = [(3, 5), (10, 4)]
, iverbHit = "entangle"
, iweight = 1000
, iaspects = []
, ieffects = [ toOrganGameTurn "slow 10" (3 + d 3)
, DropItem CEqp "torso armor" False ]
, ifeature = [Identified]
, idesc = "A wide net with weights along the edges. Entangles armor and restricts movement."
, ikit = []
}
-- * Assorted tools
jumpingPole = ItemKind
{ isymbol = symbolTool
, iname = "jumping pole"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [White]
, icount = 1
, irarity = [(1, 2)]
, iverbHit = "prod"
, iweight = 10000
, iaspects = [Timeout $ d 2 + 2 - dl 2 |*| 10]
, ieffects = [Recharging (toOrganActorTurn "fast 20" 1)]
, ifeature = [Durable, Applicable, Identified]
, idesc = "Makes you vulnerable at take-off, but then you are free like a bird."
, ikit = []
}
sharpeningTool = ItemKind
{ isymbol = symbolTool
, iname = "whetstone"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [Blue]
, icount = 1
, irarity = [(10, 10)]
, iverbHit = "smack"
, iweight = 400
, iaspects = [AddHurtMelee $ d 10 |*| 3]
, ieffects = []
, ifeature = [EqpSlot EqpSlotAddHurtMelee "", Identified]
, idesc = "A portable sharpening stone that lets you fix your weapons between or even during fights, without the need to set up camp, fish out tools and assemble a proper sharpening workshop."
, ikit = []
}
seeingItem = ItemKind
{ isymbol = '%'
, iname = "pupil"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [Red]
, icount = 1
, irarity = [(1, 1)]
, iverbHit = "gaze at"
, iweight = 100
, iaspects = [ AddSight 10, AddMaxCalm 60, AddLight 2
, Periodic, Timeout $ 1 + d 2 ]
, ieffects = [ Recharging (toOrganNone "poisoned")
, Recharging (Summon [("mobile monster", 1)] 1) ]
, ifeature = [Identified]
, idesc = "A slimy, dilated green pupil torn out from some giant eye. Clear and focused, as if still alive."
, ikit = []
}
-- * Lights
light1 = ItemKind
{ isymbol = symbolLight
, iname = "wooden torch"
, ifreq = [("useful", 100), ("light source", 100)]
, iflavour = zipPlain [Brown]
, icount = d 2
, irarity = [(1, 10)]
, iverbHit = "scorch"
, iweight = 1200
, iaspects = [ AddLight 3 -- not only flashes, but also sparks
, AddSight (-2) ] -- unused by AI due to the mixed blessing
, ieffects = [Burn 2]
, ifeature = [EqpSlot EqpSlotAddLight "", Identified]
, idesc = "A smoking, heavy wooden torch, burning in an unsteady glow."
, ikit = []
}
light2 = ItemKind
{ isymbol = symbolLight
, iname = "oil lamp"
, ifreq = [("useful", 100), ("light source", 100)]
, iflavour = zipPlain [BrYellow]
, icount = 1
, irarity = [(6, 7)]
, iverbHit = "burn"
, iweight = 1000
, iaspects = [AddLight 3, AddSight (-1)]
, ieffects = [Burn 3, Paralyze 3, OnSmash (Explode "burning oil 3")]
, ifeature = [ toVelocity 70 -- hard not to spill the oil while throwing
, Fragile, EqpSlot EqpSlotAddLight "", Identified ]
, idesc = "A clay lamp filled with plant oil feeding a tiny wick."
, ikit = []
}
light3 = ItemKind
{ isymbol = symbolLight
, iname = "brass lantern"
, ifreq = [("useful", 100), ("light source", 100)]
, iflavour = zipPlain [BrWhite]
, icount = 1
, irarity = [(10, 5)]
, iverbHit = "burn"
, iweight = 2400
, iaspects = [AddLight 4, AddSight (-1)]
, ieffects = [Burn 4, Paralyze 4, OnSmash (Explode "burning oil 4")]
, ifeature = [ toVelocity 70 -- hard to throw so that it opens and burns
, Fragile, EqpSlot EqpSlotAddLight "", Identified ]
, idesc = "Very bright and very heavy brass lantern."
, ikit = []
}
-- * Periodic jewelry
gorget = ItemKind
{ isymbol = symbolNecklace
, iname = "Old Gorget"
, ifreq = [("useful", 100)]
, iflavour = zipFancy [BrCyan]
, icount = 1
, irarity = [(4, 3), (10, 3)] -- weak, shallow
, iverbHit = "whip"
, iweight = 30
, iaspects = [ Unique
, Periodic
, Timeout $ 1 + d 2
, AddArmorMelee $ 2 + d 3
, AddArmorRanged $ 2 + d 3 ]
, ieffects = [Recharging (RefillCalm 1)]
, ifeature = [ Durable, Precious, EqpSlot EqpSlotPeriodic ""
, Identified, toVelocity 50 ] -- not dense enough
, idesc = "Highly ornamental, cold, large, steel medallion on a chain. Unlikely to offer much protection as an armor piece, but the old, worn engraving reassures you."
, ikit = []
}
necklace = ItemKind
{ isymbol = symbolNecklace
, iname = "necklace"
, ifreq = [("useful", 100)]
, iflavour = zipFancy stdCol ++ zipPlain brightCol
, icount = 1
, irarity = [(10, 2)]
, iverbHit = "whip"
, iweight = 30
, iaspects = [Periodic]
, ieffects = []
, ifeature = [ Precious, EqpSlot EqpSlotPeriodic ""
, toVelocity 50 ] -- not dense enough
, idesc = "Menacing Greek symbols shimmer with increasing speeds along a chain of fine encrusted links. After a tense build-up, a prismatic arc shoots towards the ground and the iridescence subdues, becomes ordered and resembles a harmless ornament again, for a time."
, ikit = []
}
necklace1 = necklace
{ ifreq = [("treasure", 100)]
, iaspects = [Unique, Timeout $ d 3 + 4 - dl 3 |*| 10]
++ iaspects necklace
, ieffects = [NoEffect "of Aromata", Recharging (RefillHP 1)]
, ifeature = Durable : ifeature necklace
, idesc = "A cord of freshly dried herbs and healing berries."
}
necklace2 = necklace
{ ifreq = [("treasure", 100)] -- just too nasty to call it useful
, irarity = [(1, 1)]
, iaspects = (Timeout $ d 3 + 3 - dl 3 |*| 10) : iaspects necklace
, ieffects = [ Recharging Impress
, Recharging (DropItem COrgan "temporary conditions" True)
, Recharging (Summon [("mobile animal", 1)] $ 1 + dl 2)
, Recharging (Explode "waste") ]
}
necklace3 = necklace
{ iaspects = (Timeout $ d 3 + 3 - dl 3 |*| 10) : iaspects necklace
, ieffects = [Recharging (Paralyze $ 5 + d 5 + dl 5)]
}
necklace4 = necklace
{ iaspects = (Timeout $ d 4 + 4 - dl 4 |*| 2) : iaspects necklace
, ieffects = [Recharging (Teleport $ d 2 * 3)]
}
necklace5 = necklace
{ iaspects = (Timeout $ d 3 + 4 - dl 3 |*| 10) : iaspects necklace
, ieffects = [Recharging (Teleport $ 14 + d 3 * 3)]
}
necklace6 = necklace
{ iaspects = (Timeout $ d 4 |*| 10) : iaspects necklace
, ieffects = [Recharging (PushActor (ThrowMod 100 50))]
}
necklace7 = necklace -- TODO: teach AI to wear only for fight
{ ifreq = [("treasure", 100)]
, iaspects = [ Unique, AddMaxHP $ 10 + d 10
, AddArmorMelee 20, AddArmorRanged 20
, Timeout $ d 2 + 5 - dl 3 ]
++ iaspects necklace
, ieffects = [ NoEffect "of Overdrive"
, Recharging (InsertMove $ 1 + d 2)
, Recharging (RefillHP (-1))
, Recharging (RefillCalm (-1)) ]
, ifeature = Durable : ifeature necklace
}
necklace8 = necklace
{ iaspects = (Timeout $ d 3 + 3 - dl 3 |*| 5) : iaspects necklace
, ieffects = [Recharging $ Explode "spark"]
}
necklace9 = necklace
{ iaspects = (Timeout $ d 3 + 3 - dl 3 |*| 5) : iaspects necklace
, ieffects = [Recharging $ Explode "fragrance"]
}
-- * Non-periodic jewelry
sightSharpening = ItemKind
{ isymbol = symbolRing
, iname = "Sharp Monocle"
, ifreq = [("treasure", 100)]
, iflavour = zipPlain [White]
, icount = 1
, irarity = [(7, 3), (10, 3)] -- medium weak, medium shallow
, iverbHit = "rap"
, iweight = 50
, iaspects = [Unique, AddSight $ 1 + d 2, AddHurtMelee $ d 2 |*| 3]
, ieffects = []
, ifeature = [ Precious, Identified, Durable
, EqpSlot EqpSlotAddSight "" ]
, idesc = "Let's you better focus your weaker eye."
, ikit = []
}
-- Don't add standard effects to rings, because they go in and out
-- of eqp and so activating them would require UI tedium: looking for
-- them in eqp and inv or even activating a wrong item via letter by mistake.
ring = ItemKind
{ isymbol = symbolRing
, iname = "ring"
, ifreq = [("useful", 100)]
, iflavour = zipPlain stdCol ++ zipFancy darkCol
, icount = 1
, irarity = [(10, 3)]
, iverbHit = "knock"
, iweight = 15
, iaspects = []
, ieffects = [Explode "blast 20"]
, ifeature = [Precious, Identified]
, idesc = "It looks like an ordinary object, but it's in fact a generator of exceptional effects: adding to some of your natural abilities and subtracting from others. You'd profit enormously if you could find a way to multiply such generators."
, ikit = []
}
ring1 = ring
{ irarity = [(10, 2)]
, iaspects = [AddSpeed $ 1 + d 2, AddMaxHP $ dl 7 - 7 - d 7]
, ieffects = [Explode "distortion"] -- strong magic
, ifeature = ifeature ring ++ [EqpSlot EqpSlotAddSpeed ""]
}
ring2 = ring
{ irarity = [(10, 5)]
, iaspects = [AddMaxHP $ 10 + dl 10, AddMaxCalm $ dl 5 - 20 - d 5]
, ifeature = ifeature ring ++ [EqpSlot EqpSlotAddMaxHP ""]
}
ring3 = ring
{ irarity = [(10, 5)]
, iaspects = [AddMaxCalm $ 29 + dl 10]
, ifeature = ifeature ring ++ [EqpSlot EqpSlotAddMaxCalm ""]
, idesc = "Cold, solid to the touch, perfectly round, engraved with solemn, strangely comforting, worn out words."
}
ring4 = ring
{ irarity = [(3, 3), (10, 5)]
, iaspects = [AddHurtMelee $ d 5 + dl 5 |*| 3, AddMaxHP $ dl 3 - 5 - d 3]
, ifeature = ifeature ring ++ [EqpSlot EqpSlotAddHurtMelee ""]
}
ring5 = ring -- by the time it's found, probably no space in eqp
{ irarity = [(5, 0), (10, 2)]
, iaspects = [AddLight $ d 2]
, ieffects = [Explode "distortion"] -- strong magic
, ifeature = ifeature ring ++ [EqpSlot EqpSlotAddLight ""]
, idesc = "A sturdy ring with a large, shining stone."
}
ring6 = ring
{ ifreq = [("treasure", 100)]
, irarity = [(10, 2)]
, iaspects = [ Unique, AddSpeed $ 3 + d 4
, AddMaxCalm $ - 20 - d 20, AddMaxHP $ - 20 - d 20 ]
, ieffects = [NoEffect "of Rush"] -- no explosion, because Durable
, ifeature = ifeature ring ++ [Durable, EqpSlot EqpSlotAddSpeed ""]
}
ring7 = ring
{ ifreq = [("useful", 100), ("ring of opportunity sniper", 1) ]
, irarity = [(1, 1)]
, iaspects = [AddSkills $ EM.fromList [(AbProject, 8)]]
, ieffects = [ NoEffect "of opportunity sniper"
, Explode "distortion" ] -- strong magic
, ifeature = ifeature ring ++ [EqpSlot (EqpSlotAddSkills AbProject) ""]
}
ring8 = ring
{ ifreq = [("useful", 1), ("ring of opportunity grenadier", 1) ]
, irarity = [(1, 1)]
, iaspects = [AddSkills $ EM.fromList [(AbProject, 11)]]
, ieffects = [ NoEffect "of opportunity grenadier"
, Explode "distortion" ] -- strong magic
, ifeature = ifeature ring ++ [EqpSlot (EqpSlotAddSkills AbProject) ""]
}
-- * Ordinary exploding consumables, often intended to be thrown
potion = ItemKind
{ isymbol = symbolPotion
, iname = "potion"
, ifreq = [("useful", 100)]
, iflavour = zipLiquid brightCol ++ zipPlain brightCol ++ zipFancy brightCol
, icount = 1
, irarity = [(1, 12), (10, 9)]
, iverbHit = "splash"
, iweight = 200
, iaspects = []
, ieffects = []
, ifeature = [ toVelocity 50 -- oily, bad grip
, Applicable, Fragile ]
, idesc = "A vial of bright, frothing concoction." -- purely natural; no maths, no magic
, ikit = []
}
potion1 = potion
{ ieffects = [ NoEffect "of rose water", Impress, RefillCalm (-3)
, OnSmash ApplyPerfume, OnSmash (Explode "fragrance") ]
}
potion2 = potion
{ ifreq = [("treasure", 100)]
, irarity = [(6, 10), (10, 10)]
, iaspects = [Unique]
, ieffects = [ NoEffect "of Attraction", Impress, OverfillCalm (-20)
, OnSmash (Explode "pheromone") ]
}
potion3 = potion
{ irarity = [(1, 10)]
, ieffects = [ RefillHP 5, DropItem COrgan "poisoned" True
, OnSmash (Explode "healing mist") ]
}
potion4 = potion
{ irarity = [(10, 10)]
, ieffects = [ RefillHP 10, DropItem COrgan "poisoned" True
, OnSmash (Explode "healing mist 2") ]
}
potion5 = potion
{ ieffects = [ OneOf [ OverfillHP 10, OverfillHP 5, Burn 5
, toOrganActorTurn "strengthened" (20 + d 5) ]
, OnSmash (OneOf [ Explode "healing mist"
, Explode "wounding mist"
, Explode "fragrance"
, Explode "smelly droplet"
, Explode "blast 10" ]) ]
}
potion6 = potion
{ irarity = [(3, 3), (10, 6)]
, ieffects = [ Impress
, OneOf [ OverfillCalm (-60)
, OverfillHP 20, OverfillHP 10, Burn 10
, toOrganActorTurn "fast 20" (20 + d 5) ]
, OnSmash (OneOf [ Explode "healing mist 2"
, Explode "calming mist"
, Explode "distressing odor"
, Explode "eye drop"
, Explode "blast 20" ]) ]
}
potion7 = potion
{ irarity = [(1, 15), (10, 5)]
, ieffects = [ DropItem COrgan "poisoned" True
, OnSmash (Explode "antidote mist") ]
}
potion8 = potion
{ irarity = [(1, 5), (10, 15)]
, ieffects = [ DropItem COrgan "temporary conditions" True
, OnSmash (Explode "blast 10") ]
}
potion9 = potion
{ ifreq = [("treasure", 100)]
, irarity = [(10, 5)]
, iaspects = [Unique]
, ieffects = [ NoEffect "of Love", OverfillHP 60
, Impress, OverfillCalm (-60)
, OnSmash (Explode "healing mist 2")
, OnSmash (Explode "pheromone") ]
}
-- * Exploding consumables with temporary aspects, can be thrown
-- TODO: dip projectiles in those
-- TODO: add flavour and realism as in, e.g., "flask of whiskey",
-- which is more flavourful and believable than "flask of strength"
flask = ItemKind
{ isymbol = symbolFlask
, iname = "flask"
, ifreq = [("useful", 100), ("flask", 100)]
, iflavour = zipLiquid darkCol ++ zipPlain darkCol ++ zipFancy darkCol
, icount = 1
, irarity = [(1, 9), (10, 6)]
, iverbHit = "splash"
, iweight = 500
, iaspects = []
, ieffects = []
, ifeature = [ toVelocity 50 -- oily, bad grip
, Applicable, Fragile ]
, idesc = "A flask of oily liquid of a suspect color."
, ikit = []
}
flask1 = flask
{ irarity = [(10, 5)]
, ieffects = [ NoEffect "of strength brew"
, toOrganActorTurn "strengthened" (20 + d 5)
, toOrganNone "regenerating"
, OnSmash (Explode "strength mist") ]
}
flask2 = flask
{ ieffects = [ NoEffect "of weakness brew"
, toOrganGameTurn "weakened" (20 + d 5)
, OnSmash (Explode "weakness mist") ]
}
flask3 = flask
{ ieffects = [ NoEffect "of protecting balm"
, toOrganActorTurn "protected" (20 + d 5)
, OnSmash (Explode "protecting balm") ]
}
flask4 = flask
{ ieffects = [ NoEffect "of PhD defense questions"
, toOrganGameTurn "defenseless" (20 + d 5)
, OnSmash (Explode "PhD defense question") ]
}
flask5 = flask
{ irarity = [(10, 5)]
, ieffects = [ NoEffect "of haste brew"
, toOrganActorTurn "fast 20" (20 + d 5)
, OnSmash (Explode "haste spray") ]
}
flask6 = flask
{ ieffects = [ NoEffect "of lethargy brew"
, toOrganGameTurn "slow 10" (20 + d 5)
, toOrganNone "regenerating"
, RefillCalm 3
, OnSmash (Explode "slowness spray") ]
}
flask7 = flask -- sight can be reduced from Calm, drunk, etc.
{ irarity = [(10, 7)]
, ieffects = [ NoEffect "of eye drops"
, toOrganActorTurn "far-sighted" (20 + d 5)
, OnSmash (Explode "blast 10") ]
}
flask8 = flask
{ irarity = [(10, 3)]
, ieffects = [ NoEffect "of smelly concoction"
, toOrganActorTurn "keen-smelling" (20 + d 5)
, OnSmash (Explode "blast 10") ]
}
flask9 = flask
{ ieffects = [ NoEffect "of bait cocktail"
, toOrganActorTurn "drunk" (5 + d 5)
, OnSmash (Summon [("mobile animal", 1)] $ 1 + dl 2)
, OnSmash (Explode "waste") ]
}
flask10 = flask
{ ieffects = [ NoEffect "of whiskey"
, toOrganActorTurn "drunk" (20 + d 5)
, Impress, Burn 2, RefillHP 4
, OnSmash (Explode "whiskey spray") ]
}
flask11 = flask
{ irarity = [(1, 20), (10, 10)]
, ieffects = [ NoEffect "of regeneration brew"
, toOrganNone "regenerating"
, OnSmash (Explode "healing mist") ]
}
flask12 = flask -- but not flask of Calm depletion, since Calm reduced often
{ ieffects = [ NoEffect "of poison"
, toOrganNone "poisoned"
, OnSmash (Explode "wounding mist") ]
}
flask13 = flask
{ irarity = [(10, 5)]
, ieffects = [ NoEffect "of slow resistance"
, toOrganNone "slow resistant"
, OnSmash (Explode "anti-slow mist") ]
}
flask14 = flask
{ irarity = [(10, 5)]
, ieffects = [ NoEffect "of poison resistance"
, toOrganNone "poison resistant"
, OnSmash (Explode "antidote mist") ]
}
-- * Non-exploding consumables, not specifically designed for throwing
scroll = ItemKind
{ isymbol = symbolScroll
, iname = "scroll"
, ifreq = [("useful", 100), ("any scroll", 100)]
, iflavour = zipFancy stdCol ++ zipPlain darkCol -- arcane and old
, icount = 1
, irarity = [(1, 15), (10, 12)]
, iverbHit = "thump"
, iweight = 50
, iaspects = []
, ieffects = []
, ifeature = [ toVelocity 25 -- bad shape, even rolled up
, Applicable ]
, idesc = "Scraps of haphazardly scribbled mysteries from beyond. Is this equation an alchemical recipe? Is this diagram an extradimensional map? Is this formula a secret call sign?"
, ikit = []
}
scroll1 = scroll
{ ifreq = [("treasure", 100)]
, irarity = [(5, 10), (10, 10)] -- mixed blessing, so available early
, iaspects = [Unique]
, ieffects = [ NoEffect "of Reckless Beacon"
, CallFriend 1, Summon standardSummon (2 + d 2) ]
}
scroll2 = scroll
{ irarity = []
, ieffects = []
}
scroll3 = scroll
{ irarity = [(1, 5), (10, 3)]
, ieffects = [Ascend (-1)]
}
scroll4 = scroll
{ ieffects = [OneOf [ Teleport 5, RefillCalm 5, RefillCalm (-5)
, InsertMove 5, Paralyze 10 ]]
}
scroll5 = scroll
{ irarity = [(10, 15)]
, ieffects = [ Impress
, OneOf [ Teleport 20, Ascend (-1), Ascend 1
, Summon standardSummon 2, CallFriend 1
, RefillCalm 5, OverfillCalm (-60)
, CreateItem CGround "useful" TimerNone ] ]
}
scroll6 = scroll
{ ieffects = [Teleport 5]
}
scroll7 = scroll
{ ieffects = [Teleport 20]
}
scroll8 = scroll
{ irarity = [(10, 3)]
, ieffects = [InsertMove $ 1 + d 2 + dl 2]
}
scroll9 = scroll -- TODO: remove Calm when server can tell if anything IDed
{ irarity = [(1, 15), (10, 10)]
, ieffects = [ NoEffect "of scientific explanation"
, Identify, OverfillCalm 3 ]
}
scroll10 = scroll -- TODO: firecracker only if an item really polymorphed?
-- But currently server can't tell.
{ irarity = [(10, 10)]
, ieffects = [ NoEffect "transfiguration"
, PolyItem, Explode "firecracker 7" ]
}
scroll11 = scroll
{ ifreq = [("treasure", 100)]
, irarity = [(6, 10), (10, 10)]
, iaspects = [Unique]
, ieffects = [NoEffect "of Prisoner Release", CallFriend 1]
}
standardSummon :: Freqs ItemKind
standardSummon = [("mobile monster", 30), ("mobile animal", 70)]
-- * Armor
armorLeather = ItemKind
{ isymbol = symbolTorsoArmor
, iname = "leather armor"
, ifreq = [("useful", 100), ("torso armor", 1)]
, iflavour = zipPlain [Brown]
, icount = 1
, irarity = [(1, 9), (10, 3)]
, iverbHit = "thud"
, iweight = 7000
, iaspects = [ AddHurtMelee (-3)
, AddArmorMelee $ 1 + d 2 + dl 2 |*| 5
, AddArmorRanged $ 1 + d 2 + dl 2 |*| 5 ]
, ieffects = []
, ifeature = [ toVelocity 30 -- unwieldy to throw and blunt
, Durable, EqpSlot EqpSlotAddArmorMelee "", Identified ]
, idesc = "A stiff jacket formed from leather boiled in bee wax. Smells much better than the rest of your garment."
, ikit = []
}
armorMail = armorLeather
{ iname = "mail armor"
, iflavour = zipPlain [Cyan]
, irarity = [(6, 9), (10, 3)]
, iweight = 12000
, iaspects = [ AddHurtMelee (-3)
, AddArmorMelee $ 2 + d 2 + dl 3 |*| 5
, AddArmorRanged $ 2 + d 2 + dl 3 |*| 5 ]
, idesc = "A long shirt woven from iron rings. Discourages foes from attacking your torso, making it harder for them to land a blow."
}
gloveFencing = ItemKind
{ isymbol = symbolMiscArmor
, iname = "leather gauntlet"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [BrYellow]
, icount = 1
, irarity = [(5, 9), (10, 9)]
, iverbHit = "flap"
, iweight = 100
, iaspects = [ AddHurtMelee $ (d 2 + dl 10) |*| 3
, AddArmorRanged $ d 2 |*| 5 ]
, ieffects = []
, ifeature = [ toVelocity 30 -- flaps and flutters
, Durable, EqpSlot EqpSlotAddArmorRanged "", Identified ]
, idesc = "A fencing glove from rough leather ensuring a good grip. Also quite effective in deflecting or even catching slow projectiles."
, ikit = []
}
gloveGauntlet = gloveFencing
{ iname = "steel gauntlet"
, iflavour = zipPlain [BrCyan]
, irarity = [(1, 9), (10, 3)]
, iweight = 300
, iaspects = [ AddArmorMelee $ 1 + dl 2 |*| 5
, AddArmorRanged $ 1 + dl 2 |*| 5 ]
, idesc = "Long leather gauntlet covered in overlapping steel plates."
}
gloveJousting = gloveFencing
{ iname = "Tournament Gauntlet"
, iflavour = zipFancy [BrRed]
, irarity = [(1, 3), (10, 3)]
, iweight = 500
, iaspects = [ Unique
, AddHurtMelee $ dl 4 - 6 |*| 3
, AddArmorMelee $ 2 + dl 2 |*| 5
, AddArmorRanged $ 2 + dl 2 |*| 5 ]
, idesc = "Rigid, steel, jousting handgear. If only you had a lance. And a horse."
}
-- * Shields
-- Shield doesn't protect against ranged attacks to prevent
-- micromanagement: walking with shield, melee without.
buckler = ItemKind
{ isymbol = symbolShield
, iname = "buckler"
, ifreq = [("useful", 100)]
, iflavour = zipPlain [Blue]
, icount = 1
, irarity = [(4, 6)]
, iverbHit = "bash"
, iweight = 2000
, iaspects = [ AddArmorMelee 40
, AddHurtMelee (-30)
, Timeout $ d 3 + 3 - dl 3 |*| 2 ]
, ieffects = [ Hurt (1 * d 1) -- to display xdy everywhre in Hurt
, Recharging (PushActor (ThrowMod 200 50)) ]
, ifeature = [ toVelocity 40 -- unwieldy to throw
, Durable, EqpSlot EqpSlotAddArmorMelee "", Identified ]
, idesc = "Heavy and unwieldy. Absorbs a percentage of melee damage, both dealt and sustained. Too small to intercept projectiles with."
, ikit = []
}
shield = buckler
{ iname = "shield"
, irarity = [(8, 3)]
, iflavour = zipPlain [Green]
, iweight = 3000
, iaspects = [ AddArmorMelee 80
, AddHurtMelee (-70)
, Timeout $ d 6 + 6 - dl 6 |*| 2 ]
, ieffects = [Hurt (1 * d 1), Recharging (PushActor (ThrowMod 400 50))]
, ifeature = [ toVelocity 30 -- unwieldy to throw
, Durable, EqpSlot EqpSlotAddArmorMelee "", Identified ]
, idesc = "Large and unwieldy. Absorbs a percentage of melee damage, both dealt and sustained. Too heavy to intercept projectiles with."
}
-- * Weapons
dagger = ItemKind
{ isymbol = symbolEdged
, iname = "dagger"
, ifreq = [("useful", 100), ("starting weapon", 100)]
, iflavour = zipPlain [BrCyan]
, icount = 1
, irarity = [(1, 20)]
, iverbHit = "stab"
, iweight = 1000
, iaspects = [ AddHurtMelee $ d 3 + dl 3 |*| 3
, AddArmorMelee $ d 2 |*| 5
, AddHurtRanged (-60) ] -- as powerful as a dart
, ieffects = [Hurt (6 * d 1)]
, ifeature = [ toVelocity 40 -- ensuring it hits with the tip costs speed
, Durable, EqpSlot EqpSlotWeapon "", Identified ]
, idesc = "A short dagger for thrusting and parrying blows. Does not penetrate deeply, but is hard to block. Especially useful in conjunction with a larger weapon."
, ikit = []
}
daggerDropBestWeapon = dagger
{ iname = "Double Dagger"
, ifreq = [("treasure", 20)]
, irarity = [(1, 2), (10, 4)]
-- The timeout has to be small, so that the player can count on the effect
-- occuring consistently in any longer fight. Otherwise, the effect will be
-- absent in some important fights, leading to the feeling of bad luck,
-- but will manifest sometimes in fights where it doesn't matter,
-- leading to the feeling of wasted power.
-- If the effect is very powerful and so the timeout has to be significant,
-- let's make it really large, for the effect to occur only once in a fight:
-- as soon as the item is equipped, or just on the first strike.
, iaspects = [Unique, Timeout $ d 3 + 4 - dl 3 |*| 2]
, ieffects = ieffects dagger
++ [Recharging DropBestWeapon, Recharging $ RefillCalm (-3)]
, idesc = "A double dagger that a focused fencer can use to catch and twist an opponent's blade occasionally."
}
hammer = ItemKind
{ isymbol = symbolHafted
, iname = "war hammer"
, ifreq = [("useful", 100), ("starting weapon", 100)]
, iflavour = zipPlain [BrMagenta]
, icount = 1
, irarity = [(5, 15)]
, iverbHit = "club"
, iweight = 1500
, iaspects = [ AddHurtMelee $ d 2 + dl 2 |*| 3
, AddHurtRanged (-80) ] -- as powerful as a dart
, ieffects = [Hurt (8 * d 1)]
, ifeature = [ toVelocity 20 -- ensuring it hits with the sharp tip costs
, Durable, EqpSlot EqpSlotWeapon "", Identified ]
, idesc = "It may not cause grave wounds, but neither does it glance off nor ricochet. Great sidearm for opportunistic blows against armored foes."
, ikit = []
}
hammerParalyze = hammer
{ iname = "Concussion Hammer"
, ifreq = [("treasure", 20)]
, irarity = [(5, 2), (10, 4)]
, iaspects = [Unique, Timeout $ d 2 + 3 - dl 2 |*| 2]
, ieffects = ieffects hammer ++ [Recharging $ Paralyze 5]
}
hammerSpark = hammer
{ iname = "Grand Smithhammer"
, ifreq = [("treasure", 20)]
, irarity = [(5, 2), (10, 4)]
, iaspects = [Unique, Timeout $ d 4 + 4 - dl 4 |*| 2]
, ieffects = ieffects hammer ++ [Recharging $ Explode "spark"]
}
sword = ItemKind
{ isymbol = symbolEdged
, iname = "sword"
, ifreq = [("useful", 100), ("starting weapon", 100)]
, iflavour = zipPlain [BrBlue]
, icount = 1
, irarity = [(4, 1), (5, 15)]
, iverbHit = "slash"
, iweight = 2000
, iaspects = []
, ieffects = [Hurt (10 * d 1)]
, ifeature = [ toVelocity 5 -- ensuring it hits with the tip costs speed
, Durable, EqpSlot EqpSlotWeapon "", Identified ]
, idesc = "Difficult to master; deadly when used effectively. The steel is particularly hard and keen, but rusts quickly without regular maintenance."
, ikit = []
}
swordImpress = sword
{ iname = "Master's Sword"
, ifreq = [("treasure", 20)]
, irarity = [(5, 1), (10, 4)]
, iaspects = [Unique, Timeout $ d 4 + 5 - dl 4 |*| 2]
, ieffects = ieffects sword ++ [Recharging Impress]
, idesc = "A particularly well-balance blade, lending itself to impressive shows of fencing skill."
}
swordNullify = sword
{ iname = "Gutting Sword"
, ifreq = [("treasure", 20)]
, irarity = [(5, 1), (10, 4)]
, iaspects = [Unique, Timeout $ d 4 + 5 - dl 4 |*| 2]
, ieffects = ieffects sword
++ [ Recharging $ DropItem COrgan "temporary conditions" True
, Recharging $ RefillHP (-2) ]
, idesc = "Cold, thin blade that pierces deeply and sends its victim into abrupt, sobering shock."
}
halberd = ItemKind
{ isymbol = symbolPolearm
, iname = "war scythe"
, ifreq = [("useful", 100), ("starting weapon", 1)]
, iflavour = zipPlain [BrYellow]
, icount = 1
, irarity = [(7, 1), (10, 10)]
, iverbHit = "impale"
, iweight = 3000
, iaspects = [AddArmorMelee $ 1 + dl 3 |*| 5]
, ieffects = [Hurt (12 * d 1)]
, ifeature = [ toVelocity 5 -- not balanced
, Durable, EqpSlot EqpSlotWeapon "", Identified ]
, idesc = "An improvised but deadly weapon made of a blade from a scythe attached to a long pole."
, ikit = []
}
halberdPushActor = halberd
{ iname = "Swiss Halberd"
, ifreq = [("treasure", 20)]
, irarity = [(7, 1), (10, 4)]
, iaspects = [Unique, Timeout $ d 5 + 5 - dl 5 |*| 2]
, ieffects = ieffects halberd ++ [Recharging (PushActor (ThrowMod 400 25))]
, idesc = "A versatile polearm, with great reach and leverage. Foes are held at a distance."
}
-- * Wands
wand = ItemKind
{ isymbol = symbolWand
, iname = "wand"
, ifreq = [("useful", 100)]
, iflavour = zipFancy brightCol
, icount = 1
, irarity = [] -- TODO: add charges, etc.
, iverbHit = "club"
, iweight = 300
, iaspects = [AddLight 1, AddSpeed (-1)] -- pulsing with power, distracts
, ieffects = []
, ifeature = [ toVelocity 125 -- magic
, Applicable, Durable ]
, idesc = "Buzzing with dazzling light that shines even through appendages that handle it." -- TODO: add math flavour
, ikit = []
}
wand1 = wand
{ ieffects = [] -- TODO: emit a cone of sound shrapnel that makes enemy cover his ears and so drop '|' and '{'
}
wand2 = wand
{ ieffects = []
}
-- * Treasure
gem = ItemKind
{ isymbol = symbolGem
, iname = "gem"
, ifreq = [("treasure", 100), ("gem", 100)]
, iflavour = zipPlain $ delete BrYellow brightCol -- natural, so not fancy
, icount = 1
, irarity = []
, iverbHit = "tap"
, iweight = 50
, iaspects = [AddLight 1, AddSpeed (-1)]
-- reflects strongly, distracts; so it glows in the dark,
-- is visible on dark floor, but not too tempting to wear
, ieffects = []
, ifeature = [Precious]
, idesc = "Useless, and still worth around 100 gold each. Would gems of thought and pearls of artful design be valued that much in our age of Science and Progress!"
, ikit = []
}
gem1 = gem
{ irarity = [(2, 0), (10, 12)]
}
gem2 = gem
{ irarity = [(4, 0), (10, 14)]
}
gem3 = gem
{ irarity = [(6, 0), (10, 16)]
}
gem4 = gem
{ iname = "elixir"
, iflavour = zipPlain [BrYellow]
, irarity = [(1, 40), (10, 40)]
, iaspects = []
, ieffects = [NoEffect "of youth", OverfillCalm 5, OverfillHP 15]
, ifeature = [Identified, Applicable, Precious] -- TODO: only heal humans
, idesc = "A crystal vial of amber liquid, supposedly granting eternal youth and fetching 100 gold per piece. The main effect seems to be mild euphoria, but it admittedly heals minor ailments rather well."
}
currency = ItemKind
{ isymbol = symbolGold
, iname = "gold piece"
, ifreq = [("treasure", 100), ("currency", 100)]
, iflavour = zipPlain [BrYellow]
, icount = 10 + d 20 + dl 20
, irarity = [(1, 25), (10, 10)]
, iverbHit = "tap"
, iweight = 31
, iaspects = []
, ieffects = []
, ifeature = [Identified, Precious]
, idesc = "Reliably valuable in every civilized plane of existence."
, ikit = []
}
|
Concomitant/LambdaHack
|
GameDefinition/Content/ItemKind.hs
|
Haskell
|
bsd-3-clause
| 38,245
|
module Nullable where
import FFI
data R = R (Nullable Double)
main :: Fay ()
main = do
printD $ Nullable (1 :: Double)
printNS $ Nullable "Hello, World!"
printSS $ Defined ["Hello,","World!"]
printD $ (Null :: Nullable Double)
print' $ R (Nullable 1)
print' $ R Null
print' $ r1
print' $ r2
print' $ parseInt "3"
print' $ parseInt "x"
return ()
printD :: Nullable Double -> Fay ()
printD = ffi "console.log(%1)"
printNS :: Nullable String -> Fay ()
printNS = ffi "console.log(%1)"
printS :: Defined String -> Fay ()
printS = ffi "console.log(%1)"
printSS :: Defined [String] -> Fay ()
printSS = ffi "console.log(%1)"
print' :: Automatic f -> Fay ()
print' = ffi "console.log(%1)"
r1 :: R
r1 = ffi "{ instance: 'R', slot1 : 1 }"
r2 :: R
r2 = ffi "{ instance : 'R', slot1 : null }"
parseInt :: String -> Nullable Int
parseInt = ffi "(function () { var n = global.parseInt(%1, 10); if (isNaN(n)) return null; return n; })()"
|
beni55/fay
|
tests/Nullable.hs
|
Haskell
|
bsd-3-clause
| 965
|
{-# LANGUAGE Haskell98, MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances #-}
{-# LINE 1 "Control/Monad/List.hs" #-}
-----------------------------------------------------------------------------
-- |
-- Module : Control.Monad.List
-- Copyright : (c) Andy Gill 2001,
-- (c) Oregon Graduate Institute of Science and Technology, 2001
-- License : BSD-style (see the file LICENSE)
--
-- Maintainer : libraries@haskell.org
-- Stability : experimental
-- Portability : portable
--
-- The List monad.
--
-----------------------------------------------------------------------------
module Control.Monad.List (
ListT(..),
mapListT,
module Control.Monad,
module Control.Monad.Trans,
) where
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.List
|
phischu/fragnix
|
tests/packages/scotty/Control.Monad.List.hs
|
Haskell
|
bsd-3-clause
| 832
|
{-# Language PatternGuards #-}
module Blub
( blub
, foo
, bar
) where
import Control.Applicative
(r, t, z)
import Control.Foo (foo)
import Ugah.Blub
( a
, b
, c
)
f :: Int -> Int
f = (+ 3)
r :: Int -> Int
r =
|
jystic/hsimport
|
tests/goldenFiles/SymbolTest32.hs
|
Haskell
|
bsd-3-clause
| 237
|
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
-- | Extra functions for optparse-applicative.
module Options.Applicative.Builder.Extra
(boolFlags
,boolFlagsNoDefault
,firstBoolFlagsNoDefault
,firstBoolFlagsTrue
,firstBoolFlagsFalse
,enableDisableFlags
,enableDisableFlagsNoDefault
,extraHelpOption
,execExtraHelp
,textOption
,textArgument
,optionalFirst
,optionalFirstTrue
,optionalFirstFalse
,absFileOption
,relFileOption
,absDirOption
,relDirOption
,eitherReader'
,fileCompleter
,fileExtCompleter
,dirCompleter
,PathCompleterOpts(..)
,defaultPathCompleterOpts
,pathCompleterWith
,unescapeBashArg
) where
import Data.List (isPrefixOf)
import Data.Maybe
import Data.Monoid hiding ((<>))
import qualified Data.Text as T
import Options.Applicative
import Options.Applicative.Types (readerAsk)
import Path hiding ((</>))
import Stack.Prelude
import System.Directory (getCurrentDirectory, getDirectoryContents, doesDirectoryExist)
import System.Environment (withArgs)
import System.FilePath (takeBaseName, (</>), splitFileName, isRelative, takeExtension)
-- | Enable/disable flags for a 'Bool'.
boolFlags :: Bool -- ^ Default value
-> String -- ^ Flag name
-> String -- ^ Help suffix
-> Mod FlagFields Bool
-> Parser Bool
boolFlags defaultValue name helpSuffix =
enableDisableFlags defaultValue True False name $ concat
[ helpSuffix
, " (default: "
, if defaultValue then "enabled" else "disabled"
, ")"
]
-- | Enable/disable flags for a 'Bool', without a default case (to allow chaining with '<|>').
boolFlagsNoDefault :: String -- ^ Flag name
-> String -- ^ Help suffix
-> Mod FlagFields Bool
-> Parser Bool
boolFlagsNoDefault = enableDisableFlagsNoDefault True False
-- | Flag with no default of True or False
firstBoolFlagsNoDefault :: String -> String -> Mod FlagFields (Maybe Bool) -> Parser (First Bool)
firstBoolFlagsNoDefault name helpSuffix mod' =
First <$>
enableDisableFlags Nothing (Just True) (Just False)
name helpSuffix mod'
-- | Flag with a Semigroup instance and a default of True
firstBoolFlagsTrue :: String -> String -> Mod FlagFields FirstTrue -> Parser FirstTrue
firstBoolFlagsTrue name helpSuffix =
enableDisableFlags mempty (FirstTrue (Just True)) (FirstTrue (Just False))
name $ helpSuffix ++ " (default: enabled)"
-- | Flag with a Semigroup instance and a default of False
firstBoolFlagsFalse :: String -> String -> Mod FlagFields FirstFalse -> Parser FirstFalse
firstBoolFlagsFalse name helpSuffix =
enableDisableFlags mempty (FirstFalse (Just True)) (FirstFalse (Just False))
name $ helpSuffix ++ " (default: disabled)"
-- | Enable/disable flags for any type.
enableDisableFlags :: a -- ^ Default value
-> a -- ^ Enabled value
-> a -- ^ Disabled value
-> String -- ^ Name
-> String -- ^ Help suffix
-> Mod FlagFields a
-> Parser a
enableDisableFlags defaultValue enabledValue disabledValue name helpSuffix mods =
enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods <|>
pure defaultValue
-- | Enable/disable flags for any type, without a default (to allow chaining with '<|>')
enableDisableFlagsNoDefault :: a -- ^ Enabled value
-> a -- ^ Disabled value
-> String -- ^ Name
-> String -- ^ Help suffix
-> Mod FlagFields a
-> Parser a
enableDisableFlagsNoDefault enabledValue disabledValue name helpSuffix mods =
last <$> some
((flag'
enabledValue
(hidden <>
internal <>
long name <>
help helpSuffix <>
mods) <|>
flag'
disabledValue
(hidden <>
internal <>
long ("no-" ++ name) <>
help helpSuffix <>
mods)) <|>
flag'
disabledValue
(long ("[no-]" ++ name) <>
help ("Enable/disable " ++ helpSuffix) <>
mods))
where
last xs =
case reverse xs of
[] -> impureThrow $ stringException "enableDisableFlagsNoDefault.last"
x:_ -> x
-- | Show an extra help option (e.g. @--docker-help@ shows help for all @--docker*@ args).
--
-- To actually have that help appear, use 'execExtraHelp' before executing the main parser.
extraHelpOption :: Bool -- ^ Hide from the brief description?
-> String -- ^ Program name, e.g. @"stack"@
-> String -- ^ Option glob expression, e.g. @"docker*"@
-> String -- ^ Help option name, e.g. @"docker-help"@
-> Parser (a -> a)
extraHelpOption hide progName fakeName helpName =
infoOption (optDesc' ++ ".") (long helpName <> hidden <> internal) <*>
infoOption (optDesc' ++ ".") (long fakeName <>
help optDesc' <>
(if hide then hidden <> internal else idm))
where optDesc' = concat ["Run '", takeBaseName progName, " --", helpName, "' for details"]
-- | Display extra help if extra help option passed in arguments.
--
-- Since optparse-applicative doesn't allow an arbitrary IO action for an 'abortOption', this
-- was the best way I found that doesn't require manually formatting the help.
execExtraHelp :: [String] -- ^ Command line arguments
-> String -- ^ Extra help option name, e.g. @"docker-help"@
-> Parser a -- ^ Option parser for the relevant command
-> String -- ^ Option description
-> IO ()
execExtraHelp args helpOpt parser pd =
when (args == ["--" ++ helpOpt]) $
withArgs ["--help"] $ do
_ <- execParser (info (hiddenHelper <*>
((,) <$>
parser <*>
some (strArgument (metavar "OTHER ARGUMENTS") :: Parser String)))
(fullDesc <> progDesc pd))
return ()
where hiddenHelper = abortOption ShowHelpText (long "help" <> hidden <> internal)
-- | 'option', specialized to 'Text'.
textOption :: Mod OptionFields Text -> Parser Text
textOption = option (T.pack <$> readerAsk)
-- | 'argument', specialized to 'Text'.
textArgument :: Mod ArgumentFields Text -> Parser Text
textArgument = argument (T.pack <$> readerAsk)
-- | Like 'optional', but returning a 'First'.
optionalFirst :: Alternative f => f a -> f (First a)
optionalFirst = fmap First . optional
-- | Like 'optional', but returning a 'FirstTrue'.
optionalFirstTrue :: Alternative f => f Bool -> f FirstTrue
optionalFirstTrue = fmap FirstTrue . optional
-- | Like 'optional', but returning a 'FirstFalse'.
optionalFirstFalse :: Alternative f => f Bool -> f FirstFalse
optionalFirstFalse = fmap FirstFalse . optional
absFileOption :: Mod OptionFields (Path Abs File) -> Parser (Path Abs File)
absFileOption mods = option (eitherReader' parseAbsFile) $
completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False }) <> mods
relFileOption :: Mod OptionFields (Path Rel File) -> Parser (Path Rel File)
relFileOption mods = option (eitherReader' parseRelFile) $
completer (pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False }) <> mods
absDirOption :: Mod OptionFields (Path Abs Dir) -> Parser (Path Abs Dir)
absDirOption mods = option (eitherReader' parseAbsDir) $
completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False, pcoFileFilter = const False }) <> mods
relDirOption :: Mod OptionFields (Path Rel Dir) -> Parser (Path Rel Dir)
relDirOption mods = option (eitherReader' parseRelDir) $
completer (pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False, pcoFileFilter = const False }) <> mods
-- | Like 'eitherReader', but accepting any @'Show' e@ on the 'Left'.
eitherReader' :: Show e => (String -> Either e a) -> ReadM a
eitherReader' f = eitherReader (mapLeft show . f)
data PathCompleterOpts = PathCompleterOpts
{ pcoAbsolute :: Bool
, pcoRelative :: Bool
, pcoRootDir :: Maybe FilePath
, pcoFileFilter :: FilePath -> Bool
, pcoDirFilter :: FilePath -> Bool
}
defaultPathCompleterOpts :: PathCompleterOpts
defaultPathCompleterOpts = PathCompleterOpts
{ pcoAbsolute = True
, pcoRelative = True
, pcoRootDir = Nothing
, pcoFileFilter = const True
, pcoDirFilter = const True
}
fileCompleter :: Completer
fileCompleter = pathCompleterWith defaultPathCompleterOpts
fileExtCompleter :: [String] -> Completer
fileExtCompleter exts = pathCompleterWith defaultPathCompleterOpts { pcoFileFilter = (`elem` exts) . takeExtension }
dirCompleter :: Completer
dirCompleter = pathCompleterWith defaultPathCompleterOpts { pcoFileFilter = const False }
pathCompleterWith :: PathCompleterOpts -> Completer
pathCompleterWith PathCompleterOpts {..} = mkCompleter $ \inputRaw -> do
-- Unescape input, to handle single and double quotes. Note that the
-- results do not need to be re-escaped, due to some fiddly bash
-- magic.
let input = unescapeBashArg inputRaw
let (inputSearchDir0, searchPrefix) = splitFileName input
inputSearchDir = if inputSearchDir0 == "./" then "" else inputSearchDir0
msearchDir <-
case (isRelative inputSearchDir, pcoAbsolute, pcoRelative) of
(True, _, True) -> do
rootDir <- maybe getCurrentDirectory return pcoRootDir
return $ Just (rootDir </> inputSearchDir)
(False, True, _) -> return $ Just inputSearchDir
_ -> return Nothing
case msearchDir of
Nothing
| input == "" && pcoAbsolute -> return ["/"]
| otherwise -> return []
Just searchDir -> do
entries <- getDirectoryContents searchDir `catch` \(_ :: IOException) -> return []
fmap catMaybes $ forM entries $ \entry ->
-- Skip . and .. unless user is typing . or ..
if entry `elem` ["..", "."] && searchPrefix `notElem` ["..", "."] then return Nothing else
if searchPrefix `isPrefixOf` entry
then do
let path = searchDir </> entry
case (pcoFileFilter path, pcoDirFilter path) of
(True, True) -> return $ Just (inputSearchDir </> entry)
(fileAllowed, dirAllowed) -> do
isDir <- doesDirectoryExist path
if (if isDir then dirAllowed else fileAllowed)
then return $ Just (inputSearchDir </> entry)
else return Nothing
else return Nothing
unescapeBashArg :: String -> String
unescapeBashArg ('\'' : rest) = rest
unescapeBashArg ('\"' : rest) = go rest
where
pattern = "$`\"\\\n" :: String
go [] = []
go ('\\' : x : xs)
| x `elem` pattern = x : xs
| otherwise = '\\' : x : go xs
go (x : xs) = x : go xs
unescapeBashArg input = go input
where
go [] = []
go ('\\' : x : xs) = x : go xs
go (x : xs) = x : go xs
|
juhp/stack
|
src/Options/Applicative/Builder/Extra.hs
|
Haskell
|
bsd-3-clause
| 11,661
|
module Hint.Type(module Hint.Type, module Idea, module HSE.All, module Refact) where
import Data.Monoid
import HSE.All
import Idea
import Prelude
import Refact
type DeclHint = Scope -> Module_ -> Decl_ -> [Idea]
type ModuHint = Scope -> Module_ -> [Idea]
type CrossHint = [(Scope, Module_)] -> [Idea]
-- | Functions to generate hints, combined using the 'Monoid' instance.
data Hint = Hint
{hintModules :: [(Scope, Module SrcSpanInfo)] -> [Idea] -- ^ Given a list of modules (and their scope information) generate some 'Idea's.
,hintModule :: Scope -> Module SrcSpanInfo -> [Idea] -- ^ Given a single module and its scope information generate some 'Idea's.
,hintDecl :: Scope -> Module SrcSpanInfo -> Decl SrcSpanInfo -> [Idea]
-- ^ Given a declaration (with a module and scope) generate some 'Idea's.
-- This function will be partially applied with one module/scope, then used on multiple 'Decl' values.
,hintComment :: Comment -> [Idea] -- ^ Given a comment generate some 'Idea's.
}
instance Monoid Hint where
mempty = Hint (const []) (\_ _ -> []) (\_ _ _ -> []) (const [])
mappend (Hint x1 x2 x3 x4) (Hint y1 y2 y3 y4) =
Hint (\a -> x1 a ++ y1 a) (\a b -> x2 a b ++ y2 a b) (\a b c -> x3 a b c ++ y3 a b c) (\a -> x4 a ++ y4 a)
|
mpickering/hlint
|
src/Hint/Type.hs
|
Haskell
|
bsd-3-clause
| 1,300
|
{-# OPTIONS_GHC -XGADTs -XRankNTypes -O1 #-}
-- #2018
module Bug1 where
data A a where
MkA :: A ()
class C w where
f :: forall a . w a -> Maybe a
instance C A where
f MkA = Just ()
|
sdiehl/ghc
|
testsuite/tests/typecheck/should_compile/tc241.hs
|
Haskell
|
bsd-3-clause
| 198
|
{-# LANGUAGE TemplateHaskell #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
{-| Unittests for the 'Ganeti.Common' module.
-}
{-
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-}
module Test.Ganeti.Common
( testCommon
, checkOpt
, passFailOpt
, checkEarlyExit
) where
import Test.QuickCheck hiding (Result)
import Test.HUnit
import qualified System.Console.GetOpt as GetOpt
import System.Exit
import Test.Ganeti.TestHelper
import Test.Ganeti.TestCommon
import Ganeti.BasicTypes
import Ganeti.Common
import Ganeti.HTools.Program.Main (personalities)
{-# ANN module "HLint: ignore Use camelCase" #-}
-- | Helper to check for correct parsing of an option.
checkOpt :: (StandardOptions b) =>
(a -> Maybe String) -- ^ Converts the value into a cmdline form
-> b -- ^ The default options
-> (String -> c) -- ^ Fail test function
-> (String -> d -> d -> c) -- ^ Check for equality function
-> (a -> d) -- ^ Transforms the value to a compare val
-> (a, GenericOptType b, b -> d) -- ^ Triple of value, the
-- option, function to
-- extract the set value
-- from the options
-> c
checkOpt repr defaults failfn eqcheck valfn
(val, opt@(GetOpt.Option _ longs _ _, _), fn) =
case longs of
[] -> failfn "no long options?"
cmdarg:_ ->
case parseOptsInner defaults
["--" ++ cmdarg ++ maybe "" ("=" ++) (repr val)]
"prog" [opt] [] of
Left e -> failfn $ "Failed to parse option '" ++ cmdarg ++ ": " ++
show e
Right (options, _) -> eqcheck ("Wrong value in option " ++
cmdarg ++ "?") (valfn val) (fn options)
-- | Helper to check for correct and incorrect parsing of an option.
passFailOpt :: (StandardOptions b) =>
b -- ^ The default options
-> (String -> c) -- ^ Fail test function
-> c -- ^ Pass function
-> (GenericOptType b, String, String)
-- ^ The list of enabled options, fail value and pass value
-> c
passFailOpt defaults failfn passfn
(opt@(GetOpt.Option _ longs _ _, _), bad, good) =
let first_opt = case longs of
[] -> error "no long options?"
x:_ -> x
prefix = "--" ++ first_opt ++ "="
good_cmd = prefix ++ good
bad_cmd = prefix ++ bad in
case (parseOptsInner defaults [bad_cmd] "prog" [opt] [],
parseOptsInner defaults [good_cmd] "prog" [opt] []) of
(Left _, Right _) -> passfn
(Right _, Right _) -> failfn $ "Command line '" ++ bad_cmd ++
"' succeeded when it shouldn't"
(Left _, Left _) -> failfn $ "Command line '" ++ good_cmd ++
"' failed when it shouldn't"
(Right _, Left _) ->
failfn $ "Command line '" ++ bad_cmd ++
"' succeeded when it shouldn't, while command line '" ++
good_cmd ++ "' failed when it shouldn't"
-- | Helper to test that a given option is accepted OK with quick exit.
checkEarlyExit :: (StandardOptions a) =>
a -> String -> [GenericOptType a] -> [ArgCompletion]
-> Assertion
checkEarlyExit defaults name options arguments =
mapM_ (\param ->
case parseOptsInner defaults [param] name options arguments of
Left (code, _) ->
assertEqual ("Program " ++ name ++
" returns invalid code " ++ show code ++
" for option " ++ param) ExitSuccess code
_ -> assertFailure $ "Program " ++ name ++
" doesn't consider option " ++
param ++ " as early exit one"
) ["-h", "--help", "-V", "--version"]
-- | Test parseYesNo.
prop_parse_yes_no :: Bool -> Bool -> String -> Property
prop_parse_yes_no def testval val =
forAll (elements [val, "yes", "no"]) $ \actual_val ->
if testval
then parseYesNo def Nothing ==? Ok def
else let result = parseYesNo def (Just actual_val)
in if actual_val `elem` ["yes", "no"]
then result ==? Ok (actual_val == "yes")
else property $ isBad result
-- | Check that formatCmdUsage works similar to Python _FormatUsage.
case_formatCommands :: Assertion
case_formatCommands =
assertEqual "proper wrap for HTools Main"
resCmdTest (formatCommands personalities)
where resCmdTest :: [String]
resCmdTest =
[ " hail - Ganeti IAllocator plugin that implements the instance\
\ placement and"
, " movement using the same algorithm as hbal(1)"
, " harep - auto-repair tool that detects certain kind of problems\
\ with"
, " instances and applies the allowed set of solutions"
, " hbal - cluster balancer that looks at the current state of\
\ the cluster and"
, " computes a series of steps designed to bring the\
\ cluster into a"
, " better state"
, " hcheck - cluster checker; prints information about cluster's\
\ health and"
, " checks whether a rebalance done using hbal would help"
, " hinfo - cluster information printer; it prints information\
\ about the current"
, " cluster state and its residing nodes/instances"
, " hroller - cluster rolling maintenance helper; it helps\
\ scheduling node reboots"
, " in a manner that doesn't conflict with the instances'\
\ topology"
, " hscan - tool for scanning clusters via RAPI and saving their\
\ data in the"
, " input format used by hbal(1) and hspace(1)"
, " hspace - computes how many additional instances can be fit on a\
\ cluster,"
, " while maintaining N+1 status."
, " hsqueeze - cluster dynamic power management; it powers up and\
\ down nodes to"
, " keep the amount of free online resources in a given\
\ range"
]
testSuite "Common"
[ 'prop_parse_yes_no
, 'case_formatCommands
]
|
grnet/snf-ganeti
|
test/hs/Test/Ganeti/Common.hs
|
Haskell
|
bsd-2-clause
| 7,765
|
{-# LANGUAGE RebindableSyntax, NPlusKPatterns #-}
module Main where
{
-- import Prelude;
import qualified Prelude;
import Prelude(String,undefined,Maybe(..),IO,putStrLn,
Integer,(++),Rational, (==), (>=) );
import Prelude(Monad(..),Applicative(..),Functor(..));
import Control.Monad(ap, liftM);
debugFunc :: String -> IO a -> IO a;
debugFunc s ioa = (putStrLn ("++ " ++ s)) Prelude.>>
(ioa Prelude.>>= (\a ->
(putStrLn ("-- " ++ s)) Prelude.>> (Prelude.return a)
));
newtype TM a = MkTM {unTM :: IO a};
instance (Functor TM) where
{
fmap = liftM;
};
instance (Applicative TM) where
{
pure = return;
(<*>) = ap;
};
instance (Monad TM) where
{
return a = MkTM (debugFunc "return" (Prelude.return a));
(>>=) ma amb = MkTM (debugFunc ">>=" ((Prelude.>>=) (unTM ma) (\a -> unTM (amb a))));
(>>) ma mb = MkTM (debugFunc ">>" ((Prelude.>>) (unTM ma) (unTM mb)));
fail s = MkTM (debugFunc "fail" (Prelude.return undefined));
};
preturn a = MkTM (Prelude.return a);
fromInteger :: Integer -> Integer;
fromInteger a = a Prelude.+ a Prelude.+ a Prelude.+ a Prelude.+ a; -- five times
fromRational :: Rational -> Rational;
fromRational a = a Prelude.+ a Prelude.+ a; -- three times
negate :: a -> a;
negate a = a; -- don't actually negate
(-) :: a -> a -> a;
(-) x y = y; -- changed function
test_do f g = do
{
f; -- >>
Just a <- g; -- >>= (and fail if g returns Nothing)
return a; -- return
};
test_fromInteger = 27;
test_fromRational = 31.5;
test_negate a = - a;
test_fromInteger_pattern a@1 = "1=" ++ (Prelude.show a);
test_fromInteger_pattern a@(-2) = "(-2)=" ++ (Prelude.show a);
test_fromInteger_pattern (a + 7) = "(a + 7)=" ++ Prelude.show a;
test_fromRational_pattern a@0.5 = "0.5=" ++ (Prelude.show a);
test_fromRational_pattern a@(-0.7) = "(-0.7)=" ++ (Prelude.show a);
test_fromRational_pattern a = "_=" ++ (Prelude.show a);
tmPutStrLn s = MkTM (putStrLn s);
doTest :: String -> TM a -> IO ();
doTest s ioa =
(putStrLn ("start test " ++ s))
Prelude.>>
(unTM ioa)
Prelude.>>
(putStrLn ("end test " ++ s));
main :: IO ();
main =
(doTest "test_do failure"
(test_do (preturn ()) (preturn Nothing))
)
Prelude.>>
(doTest "test_do success"
(test_do (preturn ()) (preturn (Just ())))
)
Prelude.>>
(doTest "test_fromInteger"
(tmPutStrLn (Prelude.show test_fromInteger)) -- 27 * 5 = 135
)
Prelude.>>
(doTest "test_fromRational"
(tmPutStrLn (Prelude.show test_fromRational)) -- 31.5 * 3 = 189%2
)
Prelude.>>
(doTest "test_negate"
(tmPutStrLn (Prelude.show (test_negate 3))) -- 3 * 5 = 15, non-negate
)
Prelude.>>
(doTest "test_fromInteger_pattern 1"
(tmPutStrLn (test_fromInteger_pattern 1)) -- 1 * 5 = 5, matches "1"
)
Prelude.>>
(doTest "test_fromInteger_pattern (-2)"
(tmPutStrLn (test_fromInteger_pattern (-2))) -- "-2" = 2 * 5 = 10
)
Prelude.>>
(doTest "test_fromInteger_pattern 9"
(tmPutStrLn (test_fromInteger_pattern 9)) -- "9" = 45, 45 "-" "7" = "7" = 35
)
Prelude.>>
(doTest "test_fromRational_pattern 0.5"
(tmPutStrLn (test_fromRational_pattern 0.5)) -- "0.5" = 3%2
)
Prelude.>>
(doTest "test_fromRational_pattern (-0.7)"
(tmPutStrLn (test_fromRational_pattern (-0.7))) -- "-0.7" = "0.7" = 21%10
)
Prelude.>>
(doTest "test_fromRational_pattern 1.7"
(tmPutStrLn (test_fromRational_pattern 1.7)) -- "1.7" = 51%10
);
}
|
ezyang/ghc
|
testsuite/tests/rebindable/rebindable2.hs
|
Haskell
|
bsd-3-clause
| 4,854
|
{-# LANGUAGE DataKinds, KindSignatures, TypeFamilies #-}
module T9263a where
import T9263b
import Data.Proxy
data Void
instance PEq ('KProxy :: KProxy Void)
|
urbanslug/ghc
|
testsuite/tests/polykinds/T9263a.hs
|
Haskell
|
bsd-3-clause
| 160
|
module Main where
import LambdaPi.Bound
import Test.Tasty
import Test.Tasty.HUnit
assertType :: String -> TestName -> (Expr Int, Expr Int) -> TestTree
assertType s n (e, t) = testCase n $ assertBool s (hasType e t)
consts :: TestTree
consts = testGroup "Constant Tests"
[ assertType "ETrue is wrong" "True" (ETrue, Bool)
, assertType "EFalse is wrong" "False" (EFalse, Bool)
, assertType "Bool is wrong" "Bool" (Bool, Star) ]
boolId :: TestTree
boolId = assertType "Simple lambdas failed" "Bool identity"
(lam 0 (Var 0), pit 0 Bool Bool)
app :: TestTree
app = assertType "Application fails" "Application"
(App (Annot (lam 0 $ Var 0)
(pit 0 Bool Bool))
ETrue
, Bool)
main :: IO ()
main = defaultMain . testGroup "bound Tests"
$ [consts, boolId, app]
|
jozefg/cooked-pi
|
test/Bound.hs
|
Haskell
|
mit
| 838
|
{-# LANGUAGE RecordWildCards, NamedFieldPuns #-}
module Language.Plover.Simplify
(simplify, Expr(..))
where
import qualified Data.Map.Strict as M
import Control.Monad (foldM)
import Data.Maybe (mapMaybe)
-- TODO add rebuild to atom
data Expr e num
= Sum [(Expr e num)]
| Mul [(Expr e num)]
| Atom e
| Prim num
| Zero
| One
deriving (Show, Eq, Ord)
-- Monomial expression
data Term e n
= Term n (M.Map e Integer)
| Z
deriving (Show, Eq)
term1 :: Num n => Term atom n
term1 = Term 1 M.empty
-- Main simplification function
reduce :: (Ord e, Num num) => Term e num -> Expr e num
-> [Term e num]
reduce Z _ = return Z
reduce term x = step x
where
-- Distribute
step (Sum as) = concatMap (reduce term) as
-- Sequence
step (Mul as) = foldM reduce term as
-- Increment
step (Atom e) =
let Term coefficient m = term in
return $ Term coefficient (M.insertWith (+) e 1 m)
-- Numeric simplify
step (Prim n) =
let Term coefficient m = term in
return $ Term (n * coefficient) m
step Zero = return $ Z
step One = return $ term
rebuildTerm :: Num expr => [(expr, Integer)] -> expr
rebuildTerm [] = 1
rebuildTerm (e : es) = foldl (\acc pair -> acc * fix pair) (fix e) es
where
fix = uncurry (^)
sum' :: (Num a) => [a] -> a
sum' [] = 0
sum' xs = foldl1 (+) xs
rebuild :: (Num expr, Eq num, Num num) => (num -> expr -> expr) -> Polynomial expr num -> expr
rebuild scale poly = sum' $ mapMaybe fixPair (M.toList poly)
where
fixPair (_, coef) | coef == 0 = Nothing
fixPair (term, coef) = Just $ scale coef (rebuildTerm term)
type Polynomial expr num = M.Map [(expr, Integer)] num
poly0 :: Polynomial expr num
poly0 = M.empty
addTerm :: (Ord expr, Eq num, Num num)
=> Term expr num -> Polynomial expr num -> Polynomial expr num
addTerm Z p = p
addTerm (Term coefficient m) p =
M.insertWith (+) (M.toList m) coefficient p
(.>) :: (a -> b) -> (b -> c) -> a -> c
(.>) = flip (.)
simplify :: (Ord expr, Num expr, Eq num, Num num)
=> (num -> expr -> expr) -> Expr expr num -> expr
simplify scale = reduce term1 .> foldr addTerm poly0 .> rebuild scale
|
swift-nav/plover
|
src/Language/Plover/Simplify.hs
|
Haskell
|
mit
| 2,146
|
-- | Implementation of an execution environment that uses /systemdNspawn/.
module B9.SystemdNspawn
( SystemdNspawn (..),
)
where
import B9.B9Config
( getB9Config,
systemdNspawnConfigs,
)
import B9.B9Config.SystemdNspawn as X
import B9.B9Error
import B9.B9Exec
import B9.B9Logging
import B9.BuildInfo
import B9.Container
import B9.DiskImages
import B9.ExecEnv
import B9.ShellScript
import Control.Eff
import Control.Lens (view)
import Control.Monad (when)
import Control.Monad.IO.Class
( liftIO,
)
import Data.Foldable (traverse_)
import Data.List (intercalate, partition)
import Data.Maybe (fromMaybe, maybe)
import System.Directory
import System.FilePath
import Text.Printf (printf)
newtype SystemdNspawn = SystemdNspawn SystemdNspawnConfig
type SudoPrepender = String -> String
instance Backend SystemdNspawn where
getBackendConfig _ =
fmap SystemdNspawn . view systemdNspawnConfigs <$> getB9Config
supportedImageTypes _ = [Raw]
runInEnvironment (SystemdNspawn dCfg) env scriptIn =
if emptyScript scriptIn
then return True
else do
let sudo = if _systemdNspawnUseSudo dCfg then ("sudo " ++) else id
containerBuildDirs <- createContainerBuildRootDir
containerMounts <- mountLoopbackImages sudo env containerBuildDirs
finallyB9
( do
bootScript <- prepareBootScript containerBuildDirs scriptIn
execBuild sudo containerMounts (envSharedDirectories env) bootScript dCfg
)
( do
umountLoopbackImages sudo containerMounts
removeContainerBuildRootDir sudo containerBuildDirs
)
createContainerBuildRootDir ::
(Member BuildInfoReader e, Member ExcB9 e, CommandIO e) => Eff e ContainerBuildDirectories
createContainerBuildRootDir = do
buildD <- getBuildDir
let loopbackMountDir = root </> "loopback_mounts"
root = buildD </> "container_build_root"
liftIO $ do
createDirectoryIfMissing True root
createDirectoryIfMissing True loopbackMountDir
let res = ContainerBuildDirectories {containerBuildRoot = root, containerLoopbackMountRoot = loopbackMountDir}
traceL ("Created container build directories: " ++ show res)
return res
data ContainerBuildDirectories
= ContainerBuildDirectories
{ containerBuildRoot :: FilePath,
containerLoopbackMountRoot :: FilePath
}
deriving (Show)
mountLoopbackImages ::
(Member BuildInfoReader e, Member ExcB9 e, CommandIO e) =>
SudoPrepender ->
ExecEnv ->
ContainerBuildDirectories ->
Eff e ContainerMounts
mountLoopbackImages sudo e containerDirs = do
let imgMounts0 = [(img, mountPoint) | (img, MountPoint mountPoint) <- envImageMounts e]
imgMounts = [(imgPath, mountPoint) | (Image imgPath _ _, mountPoint) <- imgMounts0]
invalidImages = [x | x@(Image _ t _, _) <- imgMounts0, t /= Raw]
when
(not (null invalidImages))
(throwB9Error ("Internal Error: Only 'raw' disk images can be used for container builds, and these images were supposed to be automatically converted: " ++ show invalidImages))
case partition ((== "/") . snd) imgMounts of
([rootImg], otherImgs) -> do
rootMount <- mountLoopback rootImg
otherMounts <- traverse mountLoopback otherImgs
return (ContainerMounts (Right rootMount) otherMounts)
([], _) ->
throwB9Error "A containerized build requires that a disk image for the root-, i.e. the '/' directory is configured."
(rootImgs, _) ->
throwB9Error ("A containerized build requires that only one disk image for the root-, i.e. the '/' directory, instead these were given: " ++ show rootImgs)
where
mountLoopback (imgPath, containerMountPoint) = do
let hostMountPoint =
containerLoopbackMountRoot containerDirs
</> printHash (imgPath, containerMountPoint)
liftIO $ createDirectoryIfMissing True hostMountPoint
hostCmd (sudo (printf "mount -o loop '%s' '%s'" imgPath hostMountPoint)) timeoutFastCmd
return
( LoopbackMount
{ loopbackHost = hostMountPoint,
loopbackContainer = containerMountPoint
}
)
newtype ContainerRootImage
= ContainerRootImage FilePath
deriving (Show)
data ContainerMounts
= ContainerMounts
{ containerRootImage :: Either ContainerRootImage LoopbackMount,
containerLoopbackMounts :: [LoopbackMount]
}
deriving (Show)
data LoopbackMount = LoopbackMount {loopbackHost :: FilePath, loopbackContainer :: FilePath}
deriving (Show)
prepareBootScript ::
(Member ExcB9 e, CommandIO e) =>
ContainerBuildDirectories ->
Script ->
Eff e BootScript
prepareBootScript containerDirs script = do
let bs =
BootScript
{ bootScriptHostDir = containerBuildRoot containerDirs </> "boot_script",
bootScriptContainerDir = "/mnt/boot_script",
bootScriptContainerCommand = bootScriptContainerDir bs </> scriptFile
}
scriptFile = "run.sh"
scriptEnv =
Begin
[ Run "export" ["HOME=/root"],
Run "export" ["USER=root"],
-- IgnoreErrors True [Run "source" ["/etc/profile"]],
script
]
liftIO $ do
createDirectoryIfMissing True (bootScriptHostDir bs)
writeSh (bootScriptHostDir bs </> scriptFile) scriptEnv
traceL ("wrote script: \n" ++ show scriptEnv)
traceL ("created boot-script: " ++ show bs)
return bs
data BootScript
= BootScript
{ bootScriptHostDir :: FilePath,
bootScriptContainerDir :: FilePath,
bootScriptContainerCommand :: String
}
deriving (Show)
execBuild ::
(Member ExcB9 e, Member BuildInfoReader e, CommandIO e) =>
SudoPrepender ->
ContainerMounts ->
[SharedDirectory] ->
BootScript ->
SystemdNspawnConfig ->
Eff e Bool
execBuild sudo containerMounts sharedDirs bootScript dCfg = do
let systemdCmd =
unwords
( systemdNspawnExe
++ consoleOptions
++ rootImageOptions
++ capabilityOptions
++ bindMounts
++ extraArgs
++ execOptions
)
systemdNspawnExe =
[fromMaybe "systemd-nspawn" (_systemdNspawnExecutable dCfg)]
consoleOptions =
["--console=" ++ show (_systemdNspawnConsole dCfg)]
rootImageOptions =
case containerRootImage containerMounts of
Left (ContainerRootImage imgPath) ->
["-i", imgPath]
Right loopbackMounted ->
["-D", loopbackHost loopbackMounted]
capabilityOptions =
case _systemdNspawnCapabilities dCfg of
[] -> []
caps -> ["--capability=" ++ intercalate "," (map show caps)]
bindMounts =
map mkBind loopbackMounts
++ map mkBind sharedDirMounts
++ map mkBindRo sharedDirMountsRo
++ [mkBindRo (bootScriptHostDir bootScript, bootScriptContainerDir bootScript)]
where
mkBind (hostDir, containerDir) = "--bind=" ++ hostDir ++ ":" ++ containerDir
mkBindRo (hostDir, containerDir) = "--bind-ro=" ++ hostDir ++ ":" ++ containerDir
loopbackMounts =
[ (h, c)
| LoopbackMount {loopbackHost = h, loopbackContainer = c} <-
containerLoopbackMounts containerMounts
]
sharedDirMounts = [(h, c) | SharedDirectory h (MountPoint c) <- sharedDirs]
sharedDirMountsRo = [(h, c) | SharedDirectoryRO h (MountPoint c) <- sharedDirs]
extraArgs = maybe [] (: []) (_systemdNspawnExtraArgs dCfg)
execOptions = ["/bin/sh", bootScriptContainerCommand bootScript]
timeout = (TimeoutMicros . (* 1000000)) <$> _systemdNspawnMaxLifetimeSeconds dCfg
traceL ("executing systemd-nspawn container build")
interactiveAction <- isInteractive
let
runInteractively =
case _systemdNspawnConsole dCfg of
SystemdNspawnInteractive ->
True
_ ->
interactiveAction
if runInteractively
then
hostCmdStdIn HostCommandInheritStdin (sudo systemdCmd) Nothing
else
hostCmd (sudo systemdCmd) timeout
umountLoopbackImages ::
forall e.
(Member ExcB9 e, CommandIO e) =>
SudoPrepender ->
ContainerMounts ->
Eff e ()
umountLoopbackImages sudo c = do
case containerRootImage c of
Left _ -> return ()
Right r -> umount r
traverse_ umount (containerLoopbackMounts c)
where
umount :: LoopbackMount -> Eff e ()
umount l = do
traceL $ "unmounting: " ++ show l
res <- hostCmd (sudo (printf "umount '%s'" (loopbackHost l))) timeoutFastCmd
when (not res) (errorL ("failed to unmount: " ++ show l))
removeContainerBuildRootDir ::
forall e.
(Member ExcB9 e, CommandIO e) =>
SudoPrepender ->
ContainerBuildDirectories ->
Eff e ()
removeContainerBuildRootDir sudo containerBuildDirs = do
let target = containerBuildRoot containerBuildDirs
traceL $ "removing: " ++ target
res <- hostCmd (sudo (printf "rm -rf '%s'" target)) timeoutFastCmd
when (not res) (errorL ("failed to remove: " ++ target))
timeoutFastCmd :: Maybe Timeout
timeoutFastCmd = Just (TimeoutMicros 10000000)
|
sheyll/b9-vm-image-builder
|
src/lib/B9/SystemdNspawn.hs
|
Haskell
|
mit
| 9,163
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE TypeSynonymInstances #-}
module IHaskell.Display.Widgets.Bool.ToggleButton (
-- * The ToggleButton Widget
ToggleButton,
-- * Constructor
mkToggleButton) where
-- To keep `cabal repl` happy when running from the ihaskell repo
import Prelude
import Control.Monad (when, join, void)
import Data.Aeson
import Data.HashMap.Strict as HM
import Data.IORef (newIORef)
import Data.Text (Text)
import Data.Vinyl (Rec(..), (<+>))
import IHaskell.Display
import IHaskell.Eval.Widgets
import IHaskell.IPython.Message.UUID as U
import IHaskell.Display.Widgets.Types
import IHaskell.Display.Widgets.Common
-- | A 'ToggleButton' represents a ToggleButton widget from IPython.html.widgets.
type ToggleButton = IPythonWidget ToggleButtonType
-- | Create a new output widget
mkToggleButton :: IO ToggleButton
mkToggleButton = do
-- Default properties, with a random uuid
uuid <- U.random
let boolState = defaultBoolWidget "ToggleButtonView"
toggleState = (Tooltip =:: "")
:& (Icon =:: "")
:& (ButtonStyle =:: DefaultButton)
:& RNil
widgetState = WidgetState (boolState <+> toggleState)
stateIO <- newIORef widgetState
let widget = IPythonWidget uuid stateIO
-- Open a comm for this widget, and store it in the kernel state
widgetSendOpen widget $ toJSON widgetState
-- Return the image widget
return widget
instance IHaskellDisplay ToggleButton where
display b = do
widgetSendView b
return $ Display []
instance IHaskellWidget ToggleButton where
getCommUUID = uuid
comm widget (Object dict1) _ = do
let key1 = "sync_data" :: Text
key2 = "value" :: Text
Just (Object dict2) = HM.lookup key1 dict1
Just (Bool value) = HM.lookup key2 dict2
setField' widget BoolValue value
triggerChange widget
|
beni55/IHaskell
|
ihaskell-display/ihaskell-widgets/src/IHaskell/Display/Widgets/Bool/ToggleButton.hs
|
Haskell
|
mit
| 2,085
|
{-# OPTIONS_GHC -O0 #-}
{-# LANGUAGE TypeOperators, OverloadedStrings, DeriveGeneric #-}
{-# LANGUAGE ScopedTypeVariables, GeneralizedNewtypeDeriving, CPP #-}
-- | Tests that modify the database.
module Tests.Mutable (mutableTests) where
import Control.Concurrent
import Control.Monad.Catch
import Data.ByteString (ByteString)
import qualified Data.ByteString.Lazy as Lazy (ByteString)
import Data.List hiding (groupBy, insert)
import Data.Proxy
import Data.Time
import Database.Selda
import Database.Selda.Backend hiding (disableForeignKeys)
import Database.Selda.Migrations
import Database.Selda.MakeSelectors
import Database.Selda.Validation (validateTable)
import Database.Selda.Unsafe (unsafeSelector, rawStm)
import Test.HUnit
import Utils
import Tables
#if !MIN_VERSION_base(4, 11, 0)
import Data.Semigroup
#endif
mutableTests :: (SeldaM b () -> IO ()) -> Test
mutableTests freshEnv = test
[ "tryDrop never fails" ~: freshEnv tryDropNeverFails
, "tryCreate never fails" ~: freshEnv tryCreateNeverFails
, "drop fails on missing" ~: freshEnv dropFailsOnMissing
, "create fails on duplicate" ~: freshEnv createFailsOnDuplicate
, "auto primary increments" ~: freshEnv (autoPrimaryIncrements comments)
, "insert returns number of rows" ~: freshEnv insertReturnsNumRows
, "update updates table" ~: freshEnv updateUpdates
, "update nothing" ~: freshEnv updateNothing
, "insert time values" ~: freshEnv insertTime
, "transaction completes" ~: freshEnv transactionCompletes
, "transaction rolls back" ~: freshEnv transactionRollsBack
, "queries are consistent" ~: freshEnv consistentQueries
, "delete deletes" ~: freshEnv deleteDeletes
, "delete everything" ~: freshEnv deleteEverything
, "override auto-increment" ~: freshEnv (overrideAutoIncrement comments)
, "insert all defaults" ~: freshEnv insertAllDefaults
, "insert some defaults" ~: freshEnv insertSomeDefaults
, "quoted weird names" ~: freshEnv weirdNames
, "dupe insert throws SeldaError" ~: freshEnv dupeInsertThrowsSeldaError
, "dupe insert 2 throws SeldaError"~: freshEnv dupeInsert2ThrowsSeldaError
, "dupe update throws SeldaError" ~: freshEnv dupeUpdateThrowsSeldaError
, "nul queries don't fail" ~: freshEnv nulQueries
, "fk violation fails" ~: freshEnv fkViolationFails
, "table with multiple FKs" ~: freshEnv multipleFKs
, "uniqueness violation fails" ~: freshEnv uniqueViolation
, "upsert inserts/updates right" ~: freshEnv insertOrUpdate
, "tryInsert doesn't fail" ~: freshEnv tryInsertDoesntFail
, "isIn list gives right result" ~: freshEnv isInList
, "isIn query gives right result" ~: freshEnv isInQuery
, "strict blob column" ~: freshEnv blobColumn
, "lazy blob column" ~: freshEnv lazyBlobColumn
, "insertWhen/Unless" ~: freshEnv whenUnless
, "insert >999 parameters" ~: freshEnv manyParameters
, "empty insertion" ~: freshEnv emptyInsert
, "correct boolean representation" ~: freshEnv boolTable
, "optional foreign keys" ~: freshEnv optionalFK
, "custom enum type" ~: freshEnv customEnum
, "disable foreign key checks" ~: freshEnv disableForeignKeys
, "mod fk violation fails" ~: freshEnv genModFkViolationFails
, "mod fk insertion ok" ~: freshEnv genModFkInsertSucceeds
, "migrate into self" ~: freshEnv (migrationTest migrateIntoSelf)
, "drop column migration" ~: freshEnv (migrationTest dropColumn)
, "auto-migrate one step" ~: freshEnv (migrationTest autoMigrateOneStep)
, "auto-migrate no-op" ~: freshEnv (migrationTest autoMigrateNoOp)
, "migrate aggregate" ~: freshEnv (migrationTest migrateAggregate)
, "auto-migrate multi-step" ~: freshEnv (migrationTest autoMigrateOneStep)
, "multi-unique insert" ~: freshEnv multiUnique
, "uuid inserts" ~: freshEnv uuidInserts
, "uuid queries" ~: freshEnv uuidQueries
, "migrate table with index" ~: freshEnv migrateIndex
, "weak auto primary increments" ~: freshEnv (autoPrimaryIncrements weakComments)
, "override weak auto-increment" ~: freshEnv (overrideAutoIncrement weakComments)
, "disable FKs with rawStm" ~: freshEnv disableFKsWithRawStm
, "overwrite row on update" ~: freshEnv overwriteRow
]
tryDropNeverFails :: SeldaM b ()
tryDropNeverFails = teardown
tryCreateNeverFails :: SeldaM b ()
tryCreateNeverFails = tryCreateTable comments >> tryCreateTable comments
dropFailsOnMissing = assertFail $ dropTable comments
createFailsOnDuplicate = createTable people >> assertFail (createTable people)
autoPrimaryIncrements c = do
setup
k <- untyped <$> insertWithPK c [(def, Just "Kobayashi", "チョロゴン")]
k' <- untyped <$> insertWithPK c [(def, Nothing, "more anonymous spam")]
[name] <- query $ do
t <- select c
restrict (t!cId .== literal k)
return (t!cName)
assEq "inserted key refers to wrong value" name (Just "Kobayashi")
let k0 = read (show k) :: Int
k1 = read (show k') :: Int
ass "primary key doesn't increment properly" (k1 == k0+1)
insertReturnsNumRows = do
setup
rows <- insert comments
[ (def, Just "Kobayashi", "チョロゴン")
, (def, Nothing, "more anonymous spam")
, (def, Nothing, "even more spam")
]
assEq "insert returns wrong number of inserted rows" 3 rows
updateUpdates = do
setup
insert_ comments
[ (def, Just "Kobayashi", "チョロゴン")
, (def, Nothing, "more anonymous spam")
, (def, Nothing, "even more spam")
]
rows <- update comments (isNull . (!cName))
(`with` [cName := just "anon"])
[upd] <- query $ aggregate $ do
t <- select comments
restrict (not_ $ isNull (t!cName))
restrict (t!cName .== just "anon")
return (count (t!cName))
assEq "update returns wrong number of updated rows" 3 rows
assEq "rows were not updated" 3 upd
updateNothing = do
setup
a <- query $ select people
n <- update people (const true) id
b <- query $ select people
assEq "identity update didn't happen" (length a) n
assEq "identity update did something weird" a b
insertTime = do
tryDropTable times
createTable times
let Just t = parseTimeM True defaultTimeLocale "%F %H:%M:%S%Q" "2011-11-11 11:11:11.11111"
Just d = parseTimeM True defaultTimeLocale "%F" "2011-11-11"
Just lt = parseTimeM True defaultTimeLocale "%H:%M:%S%Q" "11:11:11.11111"
insert_ times [("now", t, d, lt)]
[("now", t', d', lt')] <- query $ select times
assEq "time not properly inserted" (t, d, lt) (t', d', lt')
dropTable times
where
times :: Table (Text, UTCTime, Day, TimeOfDay)
times = table "times" []
transactionCompletes = do
setup
transaction $ do
insert_ comments [(def, Just "Kobayashi", c1)]
insert_ comments
[ (def, Nothing, "more anonymous spam")
, (def, Just "Kobayashi", c2)
]
cs <- query $ do
t <- select comments
restrict (t!cName .== just "Kobayashi")
return (t!cComment)
ass "some inserts were not performed"
(c1 `elem` cs && c2 `elem` cs && length cs == 2)
where
c1 = "チョロゴン"
c2 = "メイド最高!"
transactionRollsBack :: SeldaM b ()
transactionRollsBack = do
setup
res <- try $ transaction $ do
insert_ comments [(def, Just "Kobayashi", c1)]
insert_ comments
[ (def, Nothing, "more anonymous spam")
, (def, Just "Kobayashi", c2)
]
fail "nope"
case res of
Right _ ->
liftIO $ assertFailure "exception didn't propagate"
Left (SomeException _) -> do
cs <- query $ do
t <- select comments
restrict (t!cName .== just "Kobayashi")
return (t!cComment)
assEq "commit was not rolled back" [] cs
where
c1 = "チョロゴン"
c2 = "メイド最高!"
consistentQueries = do
setup
a <- query q
b <- query q
assEq "query result changed on its own" a b
where
q = do
t <- select people
restrict (round_ (t!pCash) .> (t!pAge))
return (t!pName)
deleteDeletes = do
setup
a <- query q
deleteFrom_ people (\t -> t!pName .== "Velvet")
b <- query q
ass "rows not deleted" (a /= b && length b < length a)
where
q = do
t <- select people
restrict (round_ (t!pCash) .< (t!pAge))
return (t!pName)
deleteEverything = do
tryDropTable people
createTable people
insert_ people peopleItems
a <- query q
deleteFrom_ people (const true)
b <- query q
ass "table empty before delete" (a /= [])
assEq "rows not deleted" [] b
where
q = do
t <- select people
restrict (round_ (t!pCash) .> (t!pAge))
return (t!pName)
overrideAutoIncrement c = do
setup
insert_ c [(toRowId 123, Nothing, "hello")]
num <- query $ aggregate $ do
t <- select c
restrict (t!cId .== literal (toRowId 123))
return (count (t!cId))
assEq "failed to override auto-incrementing column" [1] num
insertAllDefaults = do
setup
pk <- untyped <$> insertWithPK comments [(def, def, def)]
res <- query $ do
comment <- select comments
restrict (comment!cId .== literal pk)
return comment
assEq "wrong default values inserted" [(pk, Nothing, "")] res
insertSomeDefaults = do
setup
insert_ people [Person "Celes" def (Just "chocobo") def]
res <- query $ do
person <- select people
restrict (person!pPet .== just "chocobo")
return person
assEq "wrong values inserted" [Person "Celes" 0 (Just "chocobo") 0] res
weirdNames = do
tryDropTable tableWithWeirdNames
createTable tableWithWeirdNames
i1 <- insert tableWithWeirdNames [(42, Nothing)]
assEq "first insert failed" 1 i1
i2 <- insert tableWithWeirdNames [(123, Just 321)]
assEq "second insert failed" 1 i2
up <- update tableWithWeirdNames (\c -> c ! weird1 .== 42)
(\c -> c `with` [weird2 := just 11])
assEq "update failed" 1 up
res <- query $ do
t <- select tableWithWeirdNames
restrict (t ! weird1 .== 42)
return (t ! weird2)
assEq "select failed" [Just 11] res
dropTable tableWithWeirdNames
where
tableWithWeirdNames :: Table (Int, Maybe Int)
tableWithWeirdNames =
tableFieldMod "DROP TABLE comments" []
(<> "one \" quote \1\2\3\DEL\n two \"quotes\"")
weird1 :*: weird2 = selectors tableWithWeirdNames
dupeInsertThrowsSeldaError = do
tryDropTable comments'
createTable comments'
assertFail $ do
insert_ comments'
[ (0, Just "Kobayashi", "チョロゴン")
, (0, Nothing, "some spam")
]
dropTable comments'
where
comments' :: Table (Int, Maybe Text, Text)
comments' = table "comments" [Single cId :- primary]
cId :*: cName :*: cComment = selectors comments'
dupeInsert2ThrowsSeldaError :: SeldaM b ()
dupeInsert2ThrowsSeldaError = do
setup
insert_ comments [(def, Just "Kobayashi", "チョロゴン")]
[(ident, _, _)] <- query $ limit 0 1 $ select comments
e <- try $ insert_ comments [(ident, Nothing, "Spam, spam, spaaaaaam!")]
case e :: Either SeldaError () of
Left _ -> return ()
_ -> liftIO $ assertFailure "SeldaError not thrown"
dupeUpdateThrowsSeldaError :: SeldaM b ()
dupeUpdateThrowsSeldaError = do
setup
insert_ comments
[ (def, Just "Kobayashi", "チョロゴン")
, (def, Just "spammer" , "some spam")
]
[(ident, _, _)] <- query $ limit 0 1 $ select comments
e <- try $ do
update_ comments
(\c -> c ! cName .== just "spammer")
(\c -> c `with` [cId := literal ident])
case e :: Either SeldaError () of
Left _ -> return ()
_ -> liftIO $ assertFailure "SeldaError not thrown"
nulQueries = do
setup
insert_ comments
[ (def, Just "Kobayashi", "チョロゴン")
, (def, Nothing , "more \0 spam")
, (def, Nothing , "even more spam")
]
rows <- update comments (isNull . (!cName))
(`with` [cName := just "\0"])
[upd] <- query $ aggregate $ do
t <- select comments
restrict (not_ $ isNull (t!cName))
restrict (t!cName .== just "\0")
return (count (t!cName))
assEq "update returns wrong number of updated rows" 3 rows
assEq "rows were not updated" 3 upd
fkViolationFails = do
-- Note that this is intended to test that FKs are in place and enabled.
-- If we get an FK violation here, we assume that the database does the
-- right thing in other situations, since FKs behavior is determined by
-- the DB, not by Selda, except when creating tables.
setup
createTable addressesWithFK
assertFail $ insert_ addressesWithFK [("Nobody", "Nowhere")]
dropTable addressesWithFK
where
addressesWithFK :: Table (Text, Text)
addressesWithFK = table "addressesWithFK" [one :- foreignKey people pName]
one :*: two = selectors addressesWithFK
data FKAddrs = FKAddrs
{ fkaName :: Text
, fkaCity :: Text
} deriving Generic
instance SqlRow FKAddrs
genModFkViolationFails = do
setup
createTable addressesWithFK
assertFail $ insert_ addressesWithFK [FKAddrs "Nobody" "Nowhere"]
dropTable addressesWithFK
where
addressesWithFK :: Table FKAddrs
addressesWithFK = tableFieldMod "addressesWithFK"
[aName :- foreignKey people pName]
("test_" <>)
aName :*: aCity = selectors addressesWithFK
genModFkInsertSucceeds = do
setup
createTable addressesWithFK
insert_ addressesWithFK [FKAddrs "Link" "Nowhere"]
res <- query $ do
t <- select addressesWithFK
person <- select people
restrict (t!aName .== "Link" .&& t!aName .== person ! pName)
return (person!pName :*: t!aCity)
assEq "wrong state after insert" ["Link" :*: "Nowhere"] res
dropTable addressesWithFK
where
addressesWithFK :: Table FKAddrs
addressesWithFK = tableFieldMod "addressesWithFK"
[aName :- foreignKey people pName]
("test_" <>)
aName :*: aCity = selectors addressesWithFK
multipleFKs = do
setup
createTable addressesWithFK
assertFail $ insert_ addressesWithFK [("Nobody", "Nowhere")]
dropTable addressesWithFK
where
addressesWithFK :: Table (Text, Text)
addressesWithFK = table "addressesWithFK"
[ one :- foreignKey people pName
, two :- foreignKey people pName
]
one :*: two = selectors addressesWithFK
uniqueViolation = do
tryDropTable uniquePeople
createTable uniquePeople
assertFail $ insert_ uniquePeople
[ ("Link", Nothing)
, ("Link", Nothing)
]
r1 <- query $ select uniquePeople
assertFail $ do
insert_ uniquePeople [("Link", Nothing)]
insert_ uniquePeople [("Link", Nothing)]
r2 <- query $ select uniquePeople
assEq "inserted rows despite constraint violation" [] r1
assEq "row disappeared after violation" [("Link", Nothing)] r2
dropTable uniquePeople
where
uniquePeople :: Table (Text, Maybe Text)
(uniquePeople, upName :*: upPet) =
tableWithSelectors "uniquePeople" [Single upName :- unique]
insertOrUpdate = do
tryDropTable counters
createTable counters
r1 <- fmap untyped <$> upsert counters
(\t -> t!c .== 0)
(\t -> t `with` [v += 1])
[(0, 1)]
assEq "wrong return value from inserting upsert" (Just invalidRowId) r1
r2 <- fmap untyped <$> upsert counters
(\t -> t!c .== 0)
(\t -> t `with` [v $= (+1)])
[(0, 1)]
assEq "wrong return value from updating upsert" Nothing r2
res <- query $ select counters
assEq "wrong value for counter" [(0, 2)] res
r3 <- fmap untyped <$> upsert counters
(\t -> t ! c .== 15)
(\t -> t `with` [v := t!v + 1])
[(15, 1)]
assEq "wrong return value from second inserting upsert" (Just invalidRowId) r3
dropTable counters
where
counters :: Table (Int, Int)
counters = table "counters" [Single c :- primary]
c :*: v = selectors counters
tryInsertDoesntFail = do
createTable uniquePeople
res1 <- tryInsert uniquePeople [("Link", Nothing)]
r1 <- query $ select uniquePeople
res2 <- tryInsert uniquePeople [("Link", Nothing)]
r2 <- query $ select uniquePeople
assEq "wrong return value from successful tryInsert" True res1
assEq "row not inserted" [("Link", Nothing)] r1
assEq "wrong return value from failed tryInsert" False res2
assEq "row inserted despite violation" [("Link", Nothing)] r2
dropTable uniquePeople
where
uniquePeople :: Table (Text, Maybe Text)
(uniquePeople, upName :*: upPet) =
tableWithSelectors "uniquePeople" [Single upName :- unique]
isInList = do
setup
res <- query $ do
p <- select people
restrict (p ! pName .== "Link")
return ( "Link" `isIn` [p ! pName, "blah"]
:*: 0 `isIn` [p ! pAge, 42, 19]
:*: 1 `isIn` ([] :: [Col () Int])
)
assEq "wrong result from isIn" [True :*: False :*: False] res
isInQuery = do
setup
res <- query $ do
return ( "Link" `isIn` pName `from` select people
:*: "Zelda" `isIn` pName `from` select people
)
assEq "wrong result from isIn" [True :*: False] res
blobColumn = do
tryDropTable blobs
createTable blobs
n <- insert blobs [("b1", someBlob), ("b2", otherBlob)]
assEq "wrong number of rows inserted" 2 n
[(k, v)] <- query $ do
t <- select blobs
restrict (t ! ks .== "b1")
return t
assEq "wrong key for blob" "b1" k
assEq "got wrong blob back" someBlob v
dropTable blobs
where
blobs :: Table (Text, ByteString)
blobs = table "blobs" []
ks :*: vs = selectors blobs
someBlob = "\0\1\2\3hello!漢字"
otherBlob = "blah"
lazyBlobColumn = do
tryDropTable blobs
createTable blobs
n <- insert blobs [("b1", someBlob), ("b2", otherBlob)]
assEq "wrong number of rows inserted" 2 n
[(k, v)] <- query $ do
t <- select blobs
restrict (t ! ks .== "b1")
return t
assEq "wrong key for blob" "b1" k
assEq "got wrong blob back" someBlob v
dropTable blobs
where
blobs :: Table (Text, Lazy.ByteString)
blobs = table "blobs" []
ks :*: vs = selectors blobs
someBlob = "\0\1\2\3hello!漢字"
otherBlob = "blah"
whenUnless = do
setup
insertUnless people (\t -> t ! pName .== "Lord Buckethead") theBucket
oneBucket <- query $ select people `suchThat` ((.== "Lord Buckethead") . (! pName))
assEq "Lord Buckethead wasn't inserted" theBucket (oneBucket)
insertWhen people (\t -> t ! pName .== "Lord Buckethead") theSara
oneSara <- query $ select people `suchThat` ((.== "Sara") . (! pName))
assEq "Sara wasn't inserted" theSara (oneSara)
insertUnless people (\t -> t ! pName .== "Lord Buckethead")
[Person "Jessie" 16 Nothing (10^6)]
noJessie <- query $ select people `suchThat` ((.== "Jessie") . (! pName))
assEq "Jessie was wrongly inserted" [] (noJessie :: [Person])
insertWhen people (\t -> t ! pName .== "Jessie")
[Person "Lavinia" 16 Nothing (10^8)]
noLavinia <- query $ select people `suchThat` ((.== "Lavinia") . (! pName))
assEq "Lavinia was wrongly inserted" [] (noLavinia :: [Person])
teardown
where
theBucket = [Person "Lord Buckethead" 30 Nothing 0]
theSara = [Person "Sara" 14 Nothing 0]
manyParameters = do
tryDropTable things
createTable things
inserted <- insert things [0..1000]
actuallyInserted <- query $ aggregate $ count . the <$> select things
dropTable things
assEq "insert returned wrong insertion count" 1001 inserted
assEq "wrong number of items inserted" [1001] actuallyInserted
where
things :: Table (Only Int)
things = table "things" []
emptyInsert = do
setup
inserted <- insert people []
assEq "wrong insertion count reported" 0 inserted
teardown
boolTable = do
tryDropTable tbl
createTable tbl
insert tbl [(def, True), (def, False), (def, def)]
bs <- query $ (! two) <$> select tbl
assEq "wrong values inserted into table" [True, False, False] bs
dropTable tbl
where
tbl :: Table (RowID, Bool)
tbl = table "booltable" [one :- untypedAutoPrimary]
one :*: two = selectors tbl
optionalFK = do
tryDropTable tbl
createTable tbl
pk <- untyped <$> insertWithPK tbl [(def, Nothing)]
insert tbl [(def, Just pk)]
vs <- query $ (! mrid) <$> select tbl
assEq "wrong value for nullable FK" [Nothing, Just pk] vs
dropTable tbl
where
tbl :: Table (RowID, Maybe RowID)
tbl = table "booltable" [rid :- untypedAutoPrimary, mrid :- foreignKey tbl rid]
(rid :*: mrid) = selectors tbl
-- | For genericAutoPrimary.
data AutoPrimaryUser = AutoPrimaryUser
{ uid :: ID AutoPrimaryUser
, admin :: Bool
, username :: Text
, password :: Text
, dateCreated :: UTCTime
, dateModified :: UTCTime
} deriving ( Eq, Show, Generic )
-- | For customEnum
data Foo = A | B | C | D
deriving (Show, Read, Eq, Ord, Enum, Bounded)
instance SqlType Foo
customEnum = do
tryDropTable tbl
createTable tbl
inserted <- insert tbl [(def, A), (def, C), (def, C), (def, B)]
assEq "wrong # of rows inserted" 4 inserted
res <- query $ do
t <- select tbl
order (t ! two) descending
return (t ! two)
assEq "wrong pre-delete result list" [C, C, B, A] res
deleted <- deleteFrom tbl ((.== literal C) . (! two))
assEq "wrong # of rows deleted" 2 deleted
res2 <- query $ do
t <- select tbl
order (t ! two) ascending
return (t ! two)
assEq "wrong post-delete result list" [A, B] res2
dropTable tbl
where
tbl :: Table (RowID, Foo)
tbl = table "enums" [one :- untypedAutoPrimary]
one :*: two = selectors tbl
disableForeignKeys = do
-- Run the test twice, to check that FK checking gets turned back on again
-- properly.
go ; go
where
go = do
tryDropTable tbl2
tryDropTable tbl1
createTable tbl1
createTable tbl2
pk <- untyped <$> insertWithPK tbl1 [Only def]
insert tbl2 [(def, pk)]
assertFail $ dropTable tbl1
withoutForeignKeyEnforcement $ dropTable tbl1 >> dropTable tbl2
tryDropTable tbl2
tryDropTable tbl1
tbl1 :: Table (Only RowID)
tbl1 = table "table1" [id1 :- untypedAutoPrimary]
id1 = selectors tbl1
tbl2 :: Table (RowID, RowID)
tbl2 = table "table2" [s_fst :- untypedAutoPrimary, s_snd :- foreignKey tbl1 id1]
s_fst :*: s_snd = selectors tbl2
migrationTest test = do
tryDropTable migrationTable1
createTable migrationTable1
insert_ migrationTable1 [1,2,3]
test
tryDropTable migrationTable1
tryDropTable migrationTable2
tryDropTable migrationTable3
migrationTable1 :: Table (Only Int)
migrationTable1 = table "table1" [Single mt1_1 :- primary]
mt1_1 = selectors migrationTable1
migrationTable2 :: Table (Text, Int)
migrationTable2 = table "table1" [Single mt2_1 :- primary]
mt2_1 :*: mt2_2 = selectors migrationTable2
migrationTable3 :: Table (Only Int)
migrationTable3 = table "table3" [Single mt3_1 :- primary]
mt3_1 = selectors migrationTable3
steps =
[ [Migration migrationTable1 migrationTable1 pure]
, [Migration migrationTable1 migrationTable2 $ \foo -> pure $ new
[ mt2_1 := toString (the foo)
, mt2_2 := the foo
]
]
, [Migration migrationTable2 migrationTable3 $ \t -> pure (only (t ! mt2_2))]
, [Migration migrationTable3 migrationTable1 pure]
]
migrateIntoSelf = do
migrate migrationTable1 migrationTable1 id
res <- query $ do
x <- select migrationTable1
order (the x) ascending
return x
assEq "migrating into self went wrong" [1,2,3] res
addColumn = do
migrate migrationTable1 migrationTable2 $ \foo -> new
[ mt2_1 := toString (the foo)
, mt2_2 := the foo
]
res <- query $ do
t <- select migrationTable2
order (t ! mt2_1) ascending
return t
assEq "adding column went wrong" [("1",1),("2",2),("3",3)] res
dropColumn = do
migrate migrationTable1 migrationTable2 $ \foo -> new
[ mt2_1 := toString (the foo)
, mt2_2 := the foo
]
migrate migrationTable2 migrationTable3 $ \tbl -> only (tbl ! mt2_2)
assertFail $ query $ select migrationTable2
res <- query $ do
x <- select migrationTable3
order (the x) ascending
return x
assEq "migrating back went wrong" [1,2,3] res
autoMigrateOneStep = do
migrate migrationTable1 migrationTable3 id
autoMigrate False steps
res <- query $ do
x <- select migrationTable1
order (the x) ascending
return x
assEq "automigration failed" [1,2,3] res
autoMigrateNoOp = do
autoMigrate True steps
res <- query $ do
x <- select migrationTable1
order (the x) ascending
return x
assEq "no-op automigration failed" [1,2,3] res
migrateAggregate = do
setup
migrateM migrationTable1 migrationTable2 $ \foo -> do
age <- aggregate $ do
person <- select people
return $ ifNull 0 .<$> min_ (person ! pAge)
return $ new [mt2_1 := toString (the foo), mt2_2 := age]
res <- query $ do
t <- select migrationTable2
order (t ! mt2_2) ascending
return t
assEq "query migration failed" [("1",10),("2",10),("3",10)] res
autoMigrateMultiStep = do
autoMigrate True steps
res <- query $ do
x <- select migrationTable1
order (the x) ascending
return x
assEq "multi-step automigration failed" [1,2,3] res
multiUnique = do
tryDropTable uniques
createTable uniques
insert_ uniques [(1,1), (1,2), (2,1), (2,2)]
expectFalse1 <- tryInsert uniques [(1,1)]
expectFalse2 <- tryInsert uniques [(1,2)]
expectTrue1 <- tryInsert uniques [(1,3)]
expectTrue2 <- tryInsert uniques [(3,3)]
assEq "uniqueness violation" False expectFalse1
assEq "uniqueness violation" False expectFalse2
assEq "overly strict uniqueness constraint" True expectTrue1
assEq "overly strict uniqueness constraint" True expectTrue2
dropTable uniques
where
uniques :: Table (Int, Int)
(uniques, ua :*: ub) =
tableWithSelectors "uniques" [(ua :+ Single ub) :- unique]
uuidTable :: Table (UUID, Int)
uuidTable = table "uuidTable"
[ Single (unsafeSelector 0 :: Selector (UUID, Int) UUID) :- primary
]
uuidSetup = do
tryDropTable uuidTable
createTable uuidTable
uuid <- newUuid
assertFail $ insert_ uuidTable [(uuid, 1), (uuid, 2)]
uuid2 <- newUuid
insert_ uuidTable [(uuid, 1), (uuid2, 2)]
return (uuid, uuid2)
uuidInserts = do
_ <- uuidSetup
dropTable uuidTable
uuidQueries = do
(a, b) <- uuidSetup
[(a', n)] <- query $ do
x <- select uuidTable
restrict (x ! unsafeSelector 0 .== literal a)
return x
dropTable uuidTable
assEq "wrong uuid returned" a a'
migrateIndex :: SeldaM b ()
migrateIndex = do
tryDropTable tbl1
createTable tbl1
migrate tbl1 tbl2 (\x -> new [a2 := x ! a1, b := 0])
validateTable tbl2
migrate tbl2 tbl1 (\x -> new [a1 := x ! a2])
validateTable tbl1
dropTable tbl1
where
tbl1 :: Table (Only Int)
(tbl1, a1) = tableWithSelectors "foo" [Single a1 :- index]
tbl2 :: Table (Int, Int)
(tbl2, a2 :*: b) = tableWithSelectors "foo" [Single a2 :- index]
disableFKsWithRawStm :: SeldaM b ()
disableFKsWithRawStm = do
createTable people
createTable tbl
assertFail $ insert_ tbl [("nonexistent person", "asdas")]
#ifdef SQLITE
rawStm "PRAGMA foreign_keys = OFF"
#endif
#ifdef POSTGRES
rawStm "ALTER TABLE fkaddrs DISABLE TRIGGER ALL"
#endif
n <- insert tbl [("nonexistent person", "asdas")]
assEq "failed to insert bad person" 1 n
dropTable tbl
dropTable people
#ifdef SQLITE
rawStm "PRAGMA foreign_keys = OFF"
#endif
where
tbl = table "fkaddrs" [aName :- foreignKey people pName]
overwriteRow :: SeldaM b ()
overwriteRow = do
createTable people
insert people [p1]
update people (\p -> p!pName .== "Testingway") (const (row p2))
ps <- query $ select people
assEq "row not overwritten" [p2] ps
where
p1 = Person "Testingway" 101 Nothing 0.2
p2 = Person "Changingway" 99 (Just "Pet Rock") 100
|
valderman/selda
|
selda-tests/test/Tests/Mutable.hs
|
Haskell
|
mit
| 28,481
|
module Icarus.Bezier (Point(..),
Despair(..),
pointToList,
pointToTuple, tupleToPoint,
bezier,
line1d',
cubic, cubicSeq, trange) where
import Control.Monad (zipWithM)
-------------------------------------------------------------------------------
-- http://mathfaculty.fullerton.edu/mathews/n2003/BezierCurveMod.html
-------------------------------------------------------------------------------
newtype Despair a = Despair { getDespair :: (a, a) } deriving (Show, Eq, Ord)
-- -- To apply a function per coord the Despair signature should be `Despair a`
-- -- where `a` would be a tuple.
instance Functor Despair where
fmap f (Despair (x, y)) = Despair (f x, f y)
-- instance Applicative Despair where
-- pure = Despair
-- Despair (f, g) <*> Despair (x, y) = Despair ((f x), (g y))
-- TODO: Should be restricted to numbers.
data Point a = Point { getCoordX :: a,
getCoordY :: a } deriving (Show, Eq, Ord)
-- Let's opperate on Points without manual unwrapping.
--
-- fmap (* 2) (Point 3 4) -- => Point 6 8
instance Functor Point where
fmap f (Point x y) = Point (f x) (f y)
-- Applicative functors too
--
-- (Point (* 2) (* 3)) <*> (Point 2 4) -- => Point 4 12
-- pure (+) <*> Point 1 1 <*> (Point 1 2) -- => Point 2 3
-- (+) <$> Point 1 1 <*> (Point 1 2) -- => Point 2 3
instance Applicative Point where
pure a = Point a a
Point f g <*> Point x y = Point (f x) (g y)
instance Num a => Monoid (Point a) where
mempty = Point 0 0
Point x0 y0 `mappend` Point x1 y1 = Point (x0 + x1) (y0 + y1)
-- Notes/Ideas
--
-- Is it possible to create a function with `sequenceA` that gathers all points
-- and applies the interpolation function? is it idiotic?
-- maybe it is a matter of using `map` or `zipWith`.
tupleToPoint :: (a, a) -> Point a
tupleToPoint (x, y) = Point x y
pointToList :: Point a -> [a]
pointToList (Point x y) = [x, y]
pointToTuple :: Point a -> (a, a)
pointToTuple (Point x y) = (x, y)
cubicSeq :: Point Float -> Point Float -> Point Float -> Point Float
-> Float -> Float
-> [Point Float]
cubicSeq p0 p1 p2 p3
t0 t1 = map (cubic p0 p1 p2 p3) $ trange t0 t1
-- cubicSeq helper
trange :: Float -> Float -> [Float]
trange a b = [x | x <- xs, x >= a,
x <= b]
where xs = [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0]
-- p = P0 B1(t) + P1 B2(t) + P2 B3(t) + P3 B4(t)
-- where:
-- Pi are the control points
-- Bi are the Beziér functions
-- t is a percentage of the distance along the curve (between 0 and 1)
-- p is the point in 2D space
cubic :: Point Float -> Point Float -> Point Float -> Point Float
-> Float
-> Point Float
cubic (Point x0 y0)
(Point x1 y1)
(Point x2 y2)
(Point x3 y3) t = Point (coord x0 x1 x2 x3 t)
(coord y0 y1 y2 y3 t)
-- parametric equation
coord :: Float -> Float -> Float -> Float -> Float -> Float
coord x1 x2 x3 x4 t = x1 * b1 t
+ x2 * b2 t
+ x3 * b3 t
+ x4 * b4 t
b1 :: Float -> Float
b1 t = (1 - t) ** 3
b2 :: Float -> Float
b2 t = 3 * (1 - t) ** 2 * t
b3 :: Float -> Float
b3 t = 3 * (1 - t) * t ** 2
b4 :: Float -> Float
b4 t = t ** 3
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- REVIEW Functors, Applicatives and Monads
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- bezier' :: [Point] -> Point
-- bezier' [p] = const p
-- bezier' ps = do l <- bezier' (init ps)
-- r <- bezier' (tail ps)
-- line' l r
-- Line between two points:
-- line' :: Point -> Point -> Point
-- line' (Point x1 y1)
-- (Point x2 y2) = toPoint $ zipWithM line1d' [x1, y1] [x2, y2]
-- toPoint :: [Float] -> Point
-- toPoint [x, y] = Point x y
-- Linear interpolation between two numbers
line1d' :: Float -> Float -> Float -> Float
line1d' x y t = (1 - t) * x + t * y
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- https://github.com/hrldcpr/Bezier.hs/blob/master/Bezier.hs
-------------------------------------------------------------------------------
-- bezier of one point is fixed at that point, and bezier of N points is the
-- linear interpolation between bezier of first N-1 points and bezier of last
-- N-1 points.
type BPoint = [Float]
type Parametric a = Float -> a
bezier :: [BPoint] -> Parametric BPoint
bezier [p] = const p
bezier ps = do l <- bezier (init ps)
r <- bezier (tail ps)
line l r
-- line between two points:
line :: BPoint -> BPoint -> Parametric BPoint
line = zipWithM line1d
-- linear interpolation between two numbers:
line1d :: Float -> Float -> Parametric Float
line1d a b t = (1 - t) * a + t * b
|
arnau/icarus
|
src/Icarus/Bezier.hs
|
Haskell
|
mit
| 5,419
|
-- | Upload to Stackage and Hackage
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE NoImplicitPrelude #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE FlexibleContexts #-}
module Stackage.Upload (uploadHackageDistro) where
import Network.HTTP.Client
import Stackage.Prelude
import Stackage.ServerBundle (bpAllPackages)
uploadHackageDistro
:: Text -- ^ distro name
-> BuildPlan
-> ByteString -- ^ Hackage username
-> ByteString -- ^ Hackage password
-> Manager
-> IO (Response LByteString)
uploadHackageDistro name bp username password manager = do
req1 <- parseRequest $ concat
[ "https://hackage.haskell.org/distro/"
, unpack name
, "/packages.csv"
]
let req2 = req1
{ requestHeaders = [("Content-Type", "text/csv")]
, requestBody = RequestBodyLBS csv
, method = "PUT"
}
httpLbs (applyBasicAuth username password req2) manager
where
csv = encodeUtf8
$ builderToLazy
$ mconcat
$ intersperse "\n"
$ map go
$ mapToList
$ bpAllPackages bp
go (name', version) =
"\"" ++
(toBuilder $ display name') ++
"\",\"" ++
(toBuilder $ display version) ++
"\",\"https://www.stackage.org/package/" ++
(toBuilder $ display name') ++
"\""
|
fpco/stackage-curator
|
src/Stackage/Upload.hs
|
Haskell
|
mit
| 1,513
|
{-# LANGUAGE NoDeriveAnyClass #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module TokenCSFR ( TokenCSFR()
, csfrChallenge
, correctCSFR
) where
import Database.Persist
import Lucid
import Orphan.UUID
import Protolude hiding(show)
import Web.HttpApiData
import Web.PathPieces
import Prelude(Show(..))
import Data.Aeson
newtype TokenCSFR = TokenCSFR UUID
deriving (Read,Eq,Ord,FromHttpApiData,ToJSON,FromJSON)
instance Show TokenCSFR where
show (TokenCSFR t) = show t
-- TODO: implement
csfrChallenge :: Text -> UUID -> TokenCSFR
csfrChallenge _ = TokenCSFR
correctCSFR :: Text -> UUID -> TokenCSFR -> Bool
correctCSFR _ x (TokenCSFR y) = x == y
instance ToHtml TokenCSFR where
toHtmlRaw = toHtml
toHtml (TokenCSFR t) = toHtml (toText t)
|
mreider/kinda-might-work
|
src/TokenCSFR.hs
|
Haskell
|
mit
| 948
|
{-# OPTIONS -Wall #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE LambdaCase #-}
import Control.Monad (foldM)
import Data.Bool (bool)
import Data.Functor
import qualified Data.List as List
import Data.Ord (comparing)
import Data.Text (Text)
import Data.Vector (Vector, (//))
import qualified Data.Vector as Vector
import Helpers.Parse
import System.Environment (getArgs)
import Text.Parsec hiding (Error, State)
import Prelude hiding (lookup)
modelNumberLength :: Int
modelNumberLength = 14
startingModelNumber :: Input
startingModelNumber = Vector.replicate modelNumberLength 1
data Var = W | X | Y | Z
deriving (Show)
data Value = Var Var | Num Int
instance Show Value where
show (Var var) = show var
show (Num num) = show num
data State = State Int Int Int Int
deriving (Show)
data Instruction
= Inp Var
| Add Var Value
| Mul Var Value
| Div Var Value
| Mod Var Value
| Eql Var Value
deriving (Show)
type Program = [Instruction]
type Input = Vector Int
data Error = DivisionByZero | NegativeModulus
deriving (Show)
data Result a = Done a | NoInputLeft State | Error Error
deriving (Functor, Show)
instance Applicative Result where
pure = Done
Error failure <*> _ = Error failure
_ <*> Error failure = Error failure
NoInputLeft state <*> _ = NoInputLeft state
_ <*> NoInputLeft state = NoInputLeft state
Done f <*> Done x = Done (f x)
instance Monad Result where
Error failure >>= _ = Error failure
NoInputLeft state >>= _ = NoInputLeft state
Done x >>= f = f x
main :: IO ()
main = do
program <- parseLinesIO parser
argsInput <- Vector.fromList . map read <$> getArgs
if null argsInput
then putStrLn $ concatMap show $ Vector.toList $ findInput program startingModelNumber
else print $ run program argsInput
findInput :: Program -> Input -> Input
findInput program input =
case run program input of
Done (State _ _ _ 0) -> input
_ ->
let attempts = do
position <- [0 .. modelNumberLength - 1]
value <- [1 .. 9]
let trial = input // [(position, value)]
return (trial, run program trial)
in findInput program $ selectBest attempts
selectBest :: [(a, Result State)] -> a
selectBest results =
fst $
List.minimumBy (comparing (\(_, State _ _ _ z) -> z)) $
results >>= \case (updated, Done state) -> [(updated, state)]; _ -> []
run :: Program -> Input -> Result State
run program input = snd <$> foldM eval (input, State 0 0 0 0) program
eval :: (Input, State) -> Instruction -> Result (Input, State)
eval (input, state) (Inp a) =
case Vector.uncons input of
Nothing -> NoInputLeft state
Just (i, rest) -> Done (rest, update a i state)
eval (input, state) (Add a b) =
let c = lookup (Var a) state + lookup b state
in Done (input, update a c state)
eval (input, state) (Mul a b) =
let c = lookup (Var a) state * lookup b state
in Done (input, update a c state)
eval (input, state) (Div a b) =
case lookup b state of
0 -> Error DivisionByZero
b' ->
let c = lookup (Var a) state `quot` b'
in Done (input, update a c state)
eval (input, state) (Mod a b) =
case (lookup (Var a) state, lookup b state) of
(_, 0) -> Error DivisionByZero
(a', _) | a' < 0 -> Error NegativeModulus
(_, b') | b' < 0 -> Error NegativeModulus
(a', b') ->
let c = a' `mod` b'
in Done (input, update a c state)
eval (input, state) (Eql a b) =
let c = bool 0 1 $ lookup (Var a) state == lookup b state
in Done (input, update a c state)
update :: Var -> Int -> State -> State
update W w (State _ x y z) = State w x y z
update X x (State w _ y z) = State w x y z
update Y y (State w x _ z) = State w x y z
update Z z (State w x y _) = State w x y z
lookup :: Value -> State -> Int
lookup (Var W) (State w _ _ _) = w
lookup (Var X) (State _ x _ _) = x
lookup (Var Y) (State _ _ y _) = y
lookup (Var Z) (State _ _ _ z) = z
lookup (Num n) _ = n
parser :: Parsec Text () Instruction
parser =
choice
[ try (string "inp") *> (Inp <$> var),
try (string "add") *> (Add <$> var <*> value),
try (string "mul") *> (Mul <$> var <*> value),
try (string "div") *> (Div <$> var <*> value),
try (string "mod") *> (Mod <$> var <*> value),
try (string "eql") *> (Eql <$> var <*> value)
]
where
value = try (Var <$> var) <|> (Num <$> (spaces *> int))
var =
spaces
*> choice
[ char 'w' $> W,
char 'x' $> X,
char 'y' $> Y,
char 'z' $> Z
]
|
SamirTalwar/advent-of-code
|
2021/AOC_24_2.hs
|
Haskell
|
mit
| 4,543
|
{-# htermination (maxOrdering :: Ordering -> Ordering -> Ordering) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Ordering = LT | EQ | GT ;
ltEsOrdering :: Ordering -> Ordering -> MyBool
ltEsOrdering LT LT = MyTrue;
ltEsOrdering LT EQ = MyTrue;
ltEsOrdering LT GT = MyTrue;
ltEsOrdering EQ LT = MyFalse;
ltEsOrdering EQ EQ = MyTrue;
ltEsOrdering EQ GT = MyTrue;
ltEsOrdering GT LT = MyFalse;
ltEsOrdering GT EQ = MyFalse;
ltEsOrdering GT GT = MyTrue;
max0 x y MyTrue = x;
otherwise :: MyBool;
otherwise = MyTrue;
max1 x y MyTrue = y;
max1 x y MyFalse = max0 x y otherwise;
max2 x y = max1 x y (ltEsOrdering x y);
maxOrdering :: Ordering -> Ordering -> Ordering
maxOrdering x y = max2 x y;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/max_6.hs
|
Haskell
|
mit
| 768
|
-- | Evaluators for Language.TaPL.Arith.
module Language.TaPL.Arith.Eval (eval, eval') where
import Control.Applicative ((<$>))
import Language.TaPL.Arith.Syntax (Term(..), isVal, isNumericVal)
-- | Small step evaluator.
eval :: Term -> Maybe Term
eval t | isVal t = Just t
| otherwise =
case eval1 t of
Nothing -> Nothing
Just t' -> eval t'
-- | A single step.
eval1 :: Term -> Maybe Term
eval1 t | isVal t = Just t
eval1 (TmIf TmTrue t2 _) = Just t2
eval1 (TmIf TmFalse _ t3) = Just t3
eval1 (TmIf t1 t2 t3) | not $ isVal t1 = (\t1' -> TmIf t1' t2 t3) <$> eval1 t1
eval1 (TmPred TmZero) = Just TmZero
eval1 (TmPred (TmSucc t)) | isNumericVal t = Just t
eval1 (TmPred t) | not $ isVal t = TmPred <$> eval1 t
eval1 (TmIsZero TmZero) = Just TmTrue
eval1 (TmIsZero (TmSucc t)) | isNumericVal t = Just TmFalse
eval1 (TmIsZero t) | not (isVal t) = TmIsZero <$> eval1 t
eval1 (TmSucc t) | not $ isVal t = TmSucc <$> eval1 t
eval1 _ = Nothing
-- | Big step evaluator.
eval' :: Term -> Maybe Term
eval' (TmIf t1 t2 t3) =
case eval' t1 of
Just TmTrue -> eval' t2
Just TmFalse -> eval' t3
_ -> Nothing
eval' (TmPred t) =
case eval' t of
Just TmZero -> Just TmZero
Just (TmSucc t') -> Just t'
_ -> Nothing
eval' (TmSucc t) =
case eval' t of
Just TmZero -> Just $ TmSucc TmZero
Just (TmSucc t) -> Just $ TmSucc $ TmSucc t
_ -> Nothing
eval' (TmIsZero t) =
case eval' t of
Just TmZero -> Just TmTrue
Just (TmSucc t') -> Just TmFalse
_ -> Nothing
eval' t | isVal t = Just t
| otherwise = Nothing
|
zeckalpha/TaPL
|
src/Language/TaPL/Arith/Eval.hs
|
Haskell
|
mit
| 1,613
|
-- TODO: remove mkApp, unsafeUnApp
{-# LANGUAGE TemplateHaskell, TypeOperators #-}
module Language.CL.C.HOAS.Naming (function, cl) where
import Language.CL.C.HOAS.AST
import Language.CL.C.Types.Classes
import Language.Haskell.TH
import Control.Monad (join)
import Data.Monoid (mempty)
-- | 'function' should be used for user-defined functions.
-- UNSAFE! should be used only inside cl splicing!
function :: (Parameters a, RetType b) => (a -> Language.CL.C.HOAS.AST.Body b) -> (a -> Expression b)
function def = mkApp $ mkFun (error "function") mempty def
cl :: Q [Dec] -> Q [Dec]
cl quote = join (mapM naming `fmap` quote)
naming :: Dec -> Q Dec
naming (FunD name [Clause funargs (NormalB expr) []]) = do
flist <- mkFunargList funargs
let newBody = AppE (AppE (VarE 'mkFunarged) flist) (AppE (mkEntry name) expr)
return $ FunD name [Clause funargs (NormalB newBody) []]
naming (ValD (VarP name) (NormalB expr) _) =
return $ ValD (VarP name) (NormalB (AppE (mkEntry name) expr)) []
naming (FunD name _) = fail (show name ++ " -- clause should be only one!")
naming a = return a -- leave other declarations unchanged
mkFunargList :: [Pat] -> Q Exp
mkFunargList args = do fars <- mapM f args
return $ AppE (ConE 'Funargs) (ListE fars)
where f (VarP v) = return $ AppE (VarE 'mkFunarg) (VarE v)
f _ = fail "Pattern matching is not allowed!"
mkFunarged :: (Parameters a, RetType b) => Funargs -> (a -> Expression b) -> (a -> Expression b)
mkFunarged funargs fun = mkApp (addFunargs funargs $ unsafeUnApp fun)
mkEntry :: Name -> Exp
mkEntry identificator = AppE (VarE 'mkNamed) (LitE (StringL (show identificator)))
mkNamed :: (Parameters a, RetType b) => String -> (a -> Expression b) -> (a -> Expression b)
mkNamed ident fun = mkApp (setName ident $ unsafeUnApp fun)
|
pxqr/language-cl-c
|
Language/CL/C/HOAS/Naming.hs
|
Haskell
|
mit
| 1,848
|
module Javelin.Lib.ByteCode.FieldMethod where
import qualified Data.Binary.Get as Get
import qualified Data.Map.Lazy as Map
import qualified Data.Word as Word
import qualified Javelin.Lib.ByteCode.Attribute as Attribute
import qualified Javelin.Lib.ByteCode.Data as ByteCode
import qualified Javelin.Lib.ByteCode.Utils as Utils
fieldInfoAF :: Map.Map Word.Word16 ByteCode.FieldInfoAccessFlag
fieldInfoAF =
Map.fromList
[ (0x0001, ByteCode.FieldPublic)
, (0x0002, ByteCode.FieldPrivate)
, (0x0004, ByteCode.FieldProtected)
, (0x0008, ByteCode.FieldStatic)
, (0x0010, ByteCode.FieldFinal)
, (0x0040, ByteCode.FieldVolatile)
, (0x0080, ByteCode.FieldTransient)
, (0x1000, ByteCode.FieldSynthetic)
, (0x4000, ByteCode.FieldEnum)
]
methodInfoAF =
Map.fromList
[ (0x0001, ByteCode.MethodPublic)
, (0x0002, ByteCode.MethodPrivate)
, (0x0004, ByteCode.MethodProtected)
, (0x0008, ByteCode.MethodStatic)
, (0x0010, ByteCode.MethodFinal)
, (0x0020, ByteCode.MethodSynchronized)
, (0x0040, ByteCode.MethodBridge)
, (0x0080, ByteCode.MethodVarargs)
, (0x0100, ByteCode.MethodNative)
, (0x0400, ByteCode.MethodAbstract)
, (0x0800, ByteCode.MethodStrict)
, (0x1000, ByteCode.MethodSynthetic)
]
getFieldMethod ::
Map.Map Word.Word16 flag
-> ([flag] -> Word.Word16 -> Word.Word16 -> [ByteCode.AttrInfo] -> x)
-> [ByteCode.Constant]
-> Get.Get x
getFieldMethod accessFlags constr pool = do
maskBytes <- Get.getWord16be
let mask = Utils.foldMask accessFlags maskBytes
nameIndex <- Get.getWord16be
descriptorIndex <- Get.getWord16be
attrsCount <- Get.getWord16be
attributes <- Utils.times (Attribute.getAttr pool) attrsCount
return $ constr mask nameIndex descriptorIndex attributes
getField :: [ByteCode.Constant] -> Get.Get ByteCode.FieldInfo
getField = getFieldMethod fieldInfoAF ByteCode.FieldInfo
getMethod :: [ByteCode.Constant] -> Get.Get ByteCode.MethodInfo
getMethod = getFieldMethod methodInfoAF ByteCode.MethodInfo
|
antonlogvinenko/javelin
|
src/Javelin/Lib/ByteCode/FieldMethod.hs
|
Haskell
|
mit
| 2,035
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FlexibleInstances #-}
module Spark.IO.Internal.InputGeneric(
generic',
genericWithSchema',
genericWithSchema,
extractResourcePath,
updateResourceStamp
) where
import Spark.Core.Types
import Spark.Core.Try
import Spark.Core.Dataset
import Spark.Core.Internal.Utilities(forceRight)
import Spark.Core.Internal.DatasetFunctions(asDF, emptyDataset, emptyLocalData)
import Spark.Core.Internal.TypesStructures(SQLType(..))
import Spark.Core.Internal.ContextStructures(SparkState)
import Spark.Core.Internal.OpStructures
import Spark.Core.Internal.OpFunctions(convertToExtra')
import Spark.Core.Internal.ContextIOInternal(executeCommand1)
import Spark.IO.Internal.InputStructures
{-| Generates a dataframe from a source description.
This may trigger some calculations on the Spark side if schema inference is
required.
-}
generic' :: SourceDescription -> SparkState DataFrame
generic' sd = do
dtt <- _inferSchema sd
return $ dtt >>= \dt -> genericWithSchema' dt sd
{-| Generates a dataframe from a source description, and assumes a given schema.
This schema overrides whatever may have been given in the source description. If
the source description specified that the schema must be checked or inferred,
this instruction is overriden.
While this is convenient, it may lead to runtime errors that are hard to
understand if the data does not follow the given schema.
-}
genericWithSchema' :: DataType -> SourceDescription -> DataFrame
genericWithSchema' dt sd = asDF $ emptyDataset no (SQLType dt) where
sd' = sd { inputSchema = UseSchema dt }
so = StandardOperator {
soName = "org.spark.GenericDatasource",
soOutputType = dt,
soExtra = convertToExtra' sd'
}
no = NodeDistributedOp so
{-| Generates a dataframe from a source description, and assumes a certain
schema on the source.
-}
genericWithSchema :: forall a. (SQLTypeable a) => SourceDescription -> Dataset a
genericWithSchema sd =
let sqlt = buildType :: SQLType a
dt = unSQLType sqlt in
forceRight $ castType sqlt =<< genericWithSchema' dt sd
-- Wraps the action of inferring the schema.
-- This is not particularly efficient here: it does a first pass to get the
-- schema, and then will do a second pass in order to read the data.
_inferSchema :: SourceDescription -> SparkState (Try DataType)
_inferSchema = executeCommand1 . _inferSchemaCmd
-- TODO: this is a monoidal operation, it could be turned into a universal
-- aggregator.
_inferSchemaCmd :: SourceDescription -> LocalData DataType
_inferSchemaCmd sd = emptyLocalData no sqlt where
sqlt = buildType :: SQLType DataType
dt = unSQLType sqlt
so = StandardOperator {
soName = "org.spark.InferSchema",
soOutputType = dt,
soExtra = convertToExtra' sd
}
no = NodeOpaqueAggregator so
|
tjhunter/karps
|
haskell/src/Spark/IO/Internal/InputGeneric.hs
|
Haskell
|
apache-2.0
| 2,906
|
-----------------------------------------------------------------------------
-- |
-- Module : Finance.Hqfl.Pricer.Asay
-- Copyright : (C) 2016 Mika'il Khan
-- License : (see the file LICENSE)
-- Maintainer : Mika'il Khan <co.kleisli@gmail.com>
-- Stability : stable
-- Portability : portable
--
----------------------------------------------------------------------------
{-# LANGUAGE FlexibleInstances #-}
module Finance.Hqfl.Pricer.Asay where
import Finance.Hqfl.Instrument
import Statistics.Distribution.Normal
import Data.Random
class Asay a where
price :: a -> Double -> Double -> Double
instance Asay (Option Future) where
price (Option (Future f) m European k t) r v =
case m of
Call -> f * cdf normal d1 - k * cdf normal d2
Put -> k * cdf normal (-d2) - f * cdf normal (-d1)
where d1 = (log (f / k) + ((v * v) / 2) * t) / (v * sqrt t)
d2 = d1 - v * sqrt t
normal = Normal (0 :: Double) 1
|
cokleisli/hqfl
|
src/Finance/Hqfl/Pricer/Asay.hs
|
Haskell
|
apache-2.0
| 971
|
{-# LANGUAGE BangPatterns #-}
{-# OPTIONS -Wall #-}
----------------------------------------------------------------------
-- |
-- Module : Data.ZoomCache.Multichannel.Internal
-- Copyright : Conrad Parker
-- License : BSD3-style (see LICENSE)
--
-- Maintainer : Conrad Parker <conrad@metadecks.org>
-- Stability : unstable
-- Portability : unknown
--
-- ZoomCache multichannel API
----------------------------------------------------------------------
module Data.ZoomCache.Multichannel.Internal (
supportMultichannel
, identifyCodecMultichannel
, oneTrackMultichannel
, mkTrackSpecMultichannel
) where
import Data.ByteString (ByteString)
import Data.Functor.Identity
import qualified Data.IntMap as IM
import qualified Data.Iteratee as I
import Data.TypeLevel.Num hiding ((==))
import qualified Data.Iteratee.Offset as OffI
import Data.Iteratee.ZoomCache.Utils
import Data.Offset
import Data.ZoomCache.Common
import Data.ZoomCache.Multichannel.Common
import Data.ZoomCache.Multichannel.NList()
import Data.ZoomCache.NList
import Data.ZoomCache.Types
----------------------------------------------------------------------
supportMultichannel :: [IdentifyCodec] -> [IdentifyCodec]
supportMultichannel = f
where f x = x ++ [identifyCodecMultichannel (f x)]
runner1 :: Identity (I.Iteratee s Identity c) -> c
runner1 = runIdentity . I.run . runIdentity
identifyCodecMultichannel :: [IdentifyCodec] -> IdentifyCodec
identifyCodecMultichannel identifiers bs = runner1 $ I.enumPure1Chunk (Offset 0 bs) identifyMulti
where
identifyMulti :: (Functor m, Monad m) => I.Iteratee (Offset ByteString) m (Maybe Codec)
identifyMulti = do
mIdent <- OffI.takeBS 8
if mIdent == trackTypeMultichannel
then do
channels <- readInt32be
subIdentLength <- readInt32be
subCodec <- readCodec identifiers subIdentLength
return (fmap (foo channels) subCodec)
else return Nothing
foo :: Int -> Codec -> Codec
foo channels (Codec a) = reifyIntegral channels (\n -> Codec (NList n [a]))
----------------------------------------------------------------------
-- | Create a track map for a stream of a given type, as track no. 1
oneTrackMultichannel :: (ZoomReadable a)
=> Int -> a -> Bool -> Bool -> SampleRateType -> Rational -> ByteString -> TrackMap
oneTrackMultichannel channels a delta zlib !drType !rate !name =
IM.singleton 1 (mkTrackSpecMultichannel channels a delta zlib drType rate name)
{-# INLINABLE oneTrackMultichannel #-}
{-# DEPRECATED oneTrackMultichannel "Use setCodecMultichannel instead" #-}
mkTrackSpecMultichannel :: (ZoomReadable a)
=> Int -> a -> Bool -> Bool -> SampleRateType -> Rational -> ByteString
-> TrackSpec
mkTrackSpecMultichannel channels a = reifyIntegral channels
(\n -> TrackSpec (Codec (NList n [a])))
{-# DEPRECATED mkTrackSpecMultichannel "Use setCodecMultichannel instead" #-}
|
kfish/zoom-cache
|
Data/ZoomCache/Multichannel/Internal.hs
|
Haskell
|
bsd-2-clause
| 3,084
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DoAndIfThenElse #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Web.Spock.Shared
(-- * Helpers for running Spock
runSpock, spockAsApp
-- * Action types
, SpockAction, SpockActionCtx, ActionT, W.ActionCtxT
-- * Handling requests
, request, header, rawHeader, cookie, reqMethod
, preferredFormat, ClientPreferredFormat(..)
, body, jsonBody, jsonBody'
, files, UploadedFile (..)
, params, param, param'
-- * Working with context
, getContext, runInContext
-- * Sending responses
, setStatus, setHeader, redirect, jumpNext, CookieSettings(..), defaultCookieSettings, CookieEOL(..), setCookie, deleteCookie, bytes, lazyBytes
, text, html, file, json, stream, response
-- * Middleware helpers
, middlewarePass, modifyVault, queryVault
-- * Configuration
, SpockCfg (..), defaultSpockCfg
-- * Database
, PoolOrConn (..), ConnBuilder (..), PoolCfg (..)
-- * Accessing Database and State
, HasSpock (runQuery, getState), SpockConn, SpockState, SpockSession
-- * Basic HTTP-Auth
, requireBasicAuth
-- * Sessions
, defaultSessionCfg, SessionCfg (..)
, defaultSessionHooks, SessionHooks (..)
, SessionPersistCfg(..), readShowSessionPersist
, SessionId
, getSessionId, readSession, writeSession
, modifySession, modifySession', modifyReadSession, mapAllSessions, clearAllSessions
-- * Internals for extending Spock
, getSpockHeart, runSpockIO, WebStateM, WebState
)
where
import Web.Spock.Internal.Monad
import Web.Spock.Internal.SessionManager
import Web.Spock.Internal.Types
import Web.Spock.Internal.CoreAction
import Control.Monad
import Control.Concurrent.STM (STM)
import System.Directory
import qualified Web.Spock.Internal.Wire as W
import qualified Network.Wai as Wai
import qualified Network.Wai.Handler.Warp as Warp
-- | Run a Spock application. Basically just a wrapper aroung @Warp.run@.
runSpock :: Warp.Port -> IO Wai.Middleware -> IO ()
runSpock port mw =
do putStrLn ("Spock is running on port " ++ show port)
app <- spockAsApp mw
Warp.run port app
-- | Convert a middleware to an application. All failing requests will
-- result in a 404 page
spockAsApp :: IO Wai.Middleware -> IO Wai.Application
spockAsApp = liftM W.middlewareToApp
-- | Get the current users sessionId. Note that this ID should only be
-- shown to it's owner as otherwise sessions can be hijacked.
getSessionId :: SpockActionCtx ctx conn sess st SessionId
getSessionId =
getSessMgr >>= sm_getSessionId
-- | Write to the current session. Note that all data is stored on the server.
-- The user only reciedes a sessionId to be identified.
writeSession :: sess -> SpockActionCtx ctx conn sess st ()
writeSession d =
do mgr <- getSessMgr
sm_writeSession mgr d
-- | Modify the stored session
modifySession :: (sess -> sess) -> SpockActionCtx ctx conn sess st ()
modifySession f =
modifySession' $ \sess -> (f sess, ())
-- | Modify the stored session and return a value
modifySession' :: (sess -> (sess, a)) -> SpockActionCtx ctx conn sess st a
modifySession' f =
do mgr <- getSessMgr
sm_modifySession mgr f
-- | Modify the stored session and return the new value after modification
modifyReadSession :: (sess -> sess) -> SpockActionCtx ctx conn sess st sess
modifyReadSession f =
modifySession' $ \sess ->
let x = f sess
in (x, x)
-- | Read the stored session
readSession :: SpockActionCtx ctx conn sess st sess
readSession =
do mgr <- getSessMgr
sm_readSession mgr
-- | Globally delete all existing sessions. This is useful for example if you want
-- to require all users to relogin
clearAllSessions :: SpockActionCtx ctx conn sess st ()
clearAllSessions =
do mgr <- getSessMgr
sm_clearAllSessions mgr
-- | Apply a transformation to all sessions. Be careful with this, as this
-- may cause many STM transaction retries.
mapAllSessions :: (sess -> STM sess) -> SpockActionCtx ctx conn sess st ()
mapAllSessions f =
do mgr <- getSessMgr
sm_mapSessions mgr f
-- | Simple session persisting configuration. DO NOT USE IN PRODUCTION
readShowSessionPersist :: (Read a, Show a) => FilePath -> SessionPersistCfg a
readShowSessionPersist fp =
SessionPersistCfg
{ spc_load =
do isThere <- doesFileExist fp
if isThere
then do str <- readFile fp
return (read str)
else return []
, spc_store = writeFile fp . show
}
|
nmk/Spock
|
src/Web/Spock/Shared.hs
|
Haskell
|
bsd-3-clause
| 4,684
|
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE TypeFamilies #-}
module Futhark.Optimise.Fusion.LoopKernel
( FusedKer(..)
, newKernel
, inputs
, setInputs
, arrInputs
, kernelType
, transformOutput
, attemptFusion
, SOAC
, MapNest
, toNestedSeqStream --not used!
)
where
import Control.Applicative
import Control.Arrow (first)
import Control.Monad
import qualified Data.HashSet as HS
import qualified Data.HashMap.Lazy as HM
import Data.Maybe
import Data.Monoid
import Data.List
import Prelude
import Futhark.Representation.SOACS hiding (SOAC(..))
import qualified Futhark.Representation.SOACS as Futhark
import Futhark.Transform.Rename (renameLambda)
import Futhark.Transform.Substitute
import Futhark.MonadFreshNames
import qualified Futhark.Analysis.HORepresentation.SOAC as SOAC
import qualified Futhark.Analysis.HORepresentation.MapNest as MapNest
import Futhark.Pass.ExtractKernels.ISRWIM (rwimPossible)
import Futhark.Optimise.Fusion.TryFusion
import Futhark.Optimise.Fusion.Composing
import Futhark.Construct
type SOAC = SOAC.SOAC SOACS
type MapNest = MapNest.MapNest SOACS
-- XXX: This function is very gross.
transformOutput :: SOAC.ArrayTransforms -> [VName] -> SOAC
-> Binder SOACS ()
transformOutput ts names soac = do
validents <- zipWithM newIdent (map baseString names) $ SOAC.typeOf soac
e <- SOAC.toExp soac
letBind_ (basicPattern' [] validents) e
descend ts validents
where descend ts' validents =
case SOAC.viewf ts' of
SOAC.EmptyF ->
forM_ (zip names validents) $ \(k, valident) ->
letBindNames' [k] $ PrimOp $ SubExp $ Var $ identName valident
t SOAC.:< ts'' -> do
let es = map (applyTransform t) validents
mkPat (Ident nm tp) = Pattern [] [PatElem nm BindVar tp]
opts <- concat <$> mapM primOpType es
newIds <- forM (zip names opts) $ \(k, opt) ->
newIdent (baseString k) opt
zipWithM_ letBind (map mkPat newIds) $ map PrimOp es
descend ts'' newIds
applyTransform :: SOAC.ArrayTransform -> Ident -> PrimOp
applyTransform (SOAC.Rearrange cs perm) v =
Rearrange cs perm $ identName v
applyTransform (SOAC.Reshape cs shape) v =
Reshape cs shape $ identName v
applyTransform (SOAC.ReshapeOuter cs shape) v =
let shapes = reshapeOuter shape 1 $ arrayShape $ identType v
in Reshape cs shapes $ identName v
applyTransform (SOAC.ReshapeInner cs shape) v =
let shapes = reshapeInner shape 1 $ arrayShape $ identType v
in Reshape cs shapes $ identName v
applyTransform (SOAC.Replicate n) v =
Replicate n $ Var $ identName v
inputToOutput :: SOAC.Input -> Maybe (SOAC.ArrayTransform, SOAC.Input)
inputToOutput (SOAC.Input ts ia iat) =
case SOAC.viewf ts of
t SOAC.:< ts' -> Just (t, SOAC.Input ts' ia iat)
SOAC.EmptyF -> Nothing
data FusedKer = FusedKer {
fsoac :: SOAC
-- ^ the SOAC expression, e.g., mapT( f(a,b), x, y )
, inplace :: Names
-- ^ every kernel maintains a set of variables
-- that alias vars used in in-place updates,
-- such that fusion is prevented to move
-- a use of an
, fusedVars :: [VName]
-- ^ whether at least a fusion has been performed.
, kernelScope :: Scope SOACS
-- ^ The names in scope at the kernel.
, outputTransform :: SOAC.ArrayTransforms
, outNames :: [VName]
}
deriving (Show)
newKernel :: SOAC -> [VName] -> Scope SOACS -> FusedKer
newKernel soac out_nms scope =
FusedKer { fsoac = soac
, inplace = HS.empty
, fusedVars = []
, outputTransform = SOAC.noTransforms
, outNames = out_nms
, kernelScope = scope
}
arrInputs :: FusedKer -> HS.HashSet VName
arrInputs = HS.fromList . map SOAC.inputArray . inputs
inputs :: FusedKer -> [SOAC.Input]
inputs = SOAC.inputs . fsoac
setInputs :: [SOAC.Input] -> FusedKer -> FusedKer
setInputs inps ker = ker { fsoac = inps `SOAC.setInputs` fsoac ker }
kernelType :: FusedKer -> [Type]
kernelType = SOAC.typeOf . fsoac
tryOptimizeSOAC :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
tryOptimizeSOAC unfus_nms outVars soac ker = do
(soac', ots) <- optimizeSOAC Nothing soac mempty
let ker' = map (SOAC.addTransforms ots) (inputs ker) `setInputs` ker
outIdents = zipWith Ident outVars $ SOAC.typeOf soac'
ker'' = fixInputTypes outIdents ker'
applyFusionRules unfus_nms outVars soac' ker''
tryOptimizeKernel :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
tryOptimizeKernel unfus_nms outVars soac ker = do
ker' <- optimizeKernel (Just outVars) ker
applyFusionRules unfus_nms outVars soac ker'
tryExposeInputs :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
tryExposeInputs unfus_nms outVars soac ker = do
(ker', ots) <- exposeInputs outVars ker
if SOAC.nullTransforms ots
then fuseSOACwithKer unfus_nms outVars soac ker'
else do
(soac', ots') <- pullOutputTransforms soac ots
let outIdents = zipWith Ident outVars $ SOAC.typeOf soac'
ker'' = fixInputTypes outIdents ker'
if SOAC.nullTransforms ots'
then applyFusionRules unfus_nms outVars soac' ker''
else fail "tryExposeInputs could not pull SOAC transforms"
fixInputTypes :: [Ident] -> FusedKer -> FusedKer
fixInputTypes outIdents ker =
ker { fsoac = fixInputTypes' $ fsoac ker }
where fixInputTypes' soac =
map fixInputType (SOAC.inputs soac) `SOAC.setInputs` soac
fixInputType (SOAC.Input ts v _)
| Just v' <- find ((==v) . identName) outIdents =
SOAC.Input ts v $ identType v'
fixInputType inp = inp
applyFusionRules :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
applyFusionRules unfus_nms outVars soac ker =
tryOptimizeSOAC unfus_nms outVars soac ker <|>
tryOptimizeKernel unfus_nms outVars soac ker <|>
tryExposeInputs unfus_nms outVars soac ker <|>
fuseSOACwithKer unfus_nms outVars soac ker
attemptFusion :: MonadFreshNames m =>
Names -> [VName] -> SOAC -> FusedKer
-> m (Maybe FusedKer)
attemptFusion unfus_nms outVars soac ker =
fmap removeUnusedParamsFromKer <$>
tryFusion (applyFusionRules unfus_nms outVars soac ker)
(kernelScope ker)
removeUnusedParamsFromKer :: FusedKer -> FusedKer
removeUnusedParamsFromKer ker =
case soac of
SOAC.Map {} -> ker { fsoac = soac' }
SOAC.Redomap {} -> ker { fsoac = soac' }
SOAC.Scanomap {} -> ker { fsoac = soac' }
_ -> ker
where soac = fsoac ker
l = SOAC.lambda soac
inps = SOAC.inputs soac
(l', inps') = removeUnusedParams l inps
soac' = l' `SOAC.setLambda`
(inps' `SOAC.setInputs` soac)
removeUnusedParams :: Lambda -> [SOAC.Input] -> (Lambda, [SOAC.Input])
removeUnusedParams l inps =
(l { lambdaParams = accParams ++ ps' }, inps')
where allParams = lambdaParams l
(accParams, arrParams) =
splitAt (length allParams - length inps) allParams
pInps = zip arrParams inps
(ps', inps') = case (unzip $ filter (used . fst) pInps, pInps) of
(([], []), (p,inp):_) -> ([p], [inp])
((ps_, inps_), _) -> (ps_, inps_)
used p = paramName p `HS.member` freeVars
freeVars = freeInBody $ lambdaBody l
-- | Check that the consumer uses at least one output of the producer
-- unmodified.
mapFusionOK :: [VName] -> FusedKer -> Bool
mapFusionOK outVars ker = any (`elem` inpIds) outVars
where inpIds = mapMaybe SOAC.isVarishInput (inputs ker)
-- | Check that the consumer uses all the outputs of the producer unmodified.
mapWriteFusionOK :: [VName] -> FusedKer -> Bool
mapWriteFusionOK outVars ker = all (`elem` inpIds) outVars
where inpIds = mapMaybe SOAC.isVarishInput (inputs ker)
-- | The brain of this module: Fusing a SOAC with a Kernel.
fuseSOACwithKer :: Names -> [VName] -> SOAC -> FusedKer
-> TryFusion FusedKer
fuseSOACwithKer unfus_set outVars soac1 ker = do
-- We are fusing soac1 into soac2, i.e, the output of soac1 is going
-- into soac2.
let soac2 = fsoac ker
cs1 = SOAC.certificates soac1
cs2 = SOAC.certificates soac2
inp1_arr = SOAC.inputs soac1
horizFuse= not (HS.null unfus_set) &&
SOAC.width soac1 == SOAC.width soac2
inp2_arr = SOAC.inputs soac2
lam1 = SOAC.lambda soac1
lam2 = SOAC.lambda soac2
w = SOAC.width soac1
returned_outvars = filter (`HS.member` unfus_set) outVars
success res_outnms res_soac = do
let fusedVars_new = fusedVars ker++outVars
-- Avoid name duplication, because the producer lambda is not
-- removed from the program until much later.
uniq_lam <- renameLambda $ SOAC.lambda res_soac
return $ ker { fsoac = uniq_lam `SOAC.setLambda` res_soac
, fusedVars = fusedVars_new
, outNames = res_outnms
}
outPairs <- forM (zip outVars $ SOAC.typeOf soac1) $ \(outVar, t) -> do
outVar' <- newVName $ baseString outVar ++ "_elem"
return (outVar, Ident outVar' t)
let mapLikeFusionCheck =
let (res_lam, new_inp) = fuseMaps unfus_set lam1 inp1_arr outPairs lam2 inp2_arr
(extra_nms,extra_rtps) = unzip $ filter ((`HS.member` unfus_set) . fst) $
zip outVars $ map (stripArray 1) $ SOAC.typeOf soac1
res_lam' = res_lam { lambdaReturnType = lambdaReturnType res_lam ++ extra_rtps }
in (extra_nms, res_lam', new_inp)
case (soac2, soac1) of
------------------------------
-- Redomap-Redomap Fusions: --
------------------------------
(SOAC.Map {}, SOAC.Map {})
| mapFusionOK outVars ker || horizFuse -> do
let (extra_nms, res_lam', new_inp) = mapLikeFusionCheck
success (outNames ker ++ extra_nms) $
SOAC.Map (cs1++cs2) w res_lam' new_inp
(SOAC.Map {}, SOAC.Redomap _ _ comm1 lam11 _ nes _)
| mapFusionOK (drop (length nes) outVars) ker || horizFuse -> do
let (res_lam', new_inp) = fuseRedomap unfus_set outVars nes lam1 inp1_arr
outPairs lam2 inp2_arr
unfus_accs = take (length nes) outVars
unfus_arrs = returned_outvars \\ unfus_accs
success (unfus_accs ++ outNames ker ++ unfus_arrs) $
SOAC.Redomap (cs1++cs2) w comm1 lam11 res_lam' nes new_inp
(SOAC.Redomap _ _ comm2 lam2r _ nes2 _, SOAC.Redomap _ _ comm1 lam1r _ nes1 _)
| mapFusionOK (drop (length nes1) outVars) ker || horizFuse -> do
let (res_lam', new_inp) = fuseRedomap unfus_set outVars nes1 lam1 inp1_arr
outPairs lam2 inp2_arr
unfus_accs = take (length nes1) outVars
unfus_arrs = returned_outvars \\ unfus_accs
lamr = mergeReduceOps lam1r lam2r
success (unfus_accs ++ outNames ker ++ unfus_arrs) $
SOAC.Redomap (cs1++cs2) w (comm1<>comm2) lamr res_lam' (nes1++nes2) new_inp
(SOAC.Redomap _ _ comm2 lam21 _ nes _, SOAC.Map {})
| mapFusionOK outVars ker || horizFuse -> do
let (res_lam, new_inp) = fuseMaps unfus_set lam1 inp1_arr outPairs lam2 inp2_arr
(_,extra_rtps) = unzip $ filter ((`HS.member` unfus_set) . fst) $
zip outVars $ map (stripArray 1) $ SOAC.typeOf soac1
res_lam' = res_lam { lambdaReturnType = lambdaReturnType res_lam ++ extra_rtps }
success (outNames ker ++ returned_outvars) $
SOAC.Redomap (cs1++cs2) w comm2 lam21 res_lam' nes new_inp
----------------------------
-- Scanomap Fusions: --
----------------------------
(SOAC.Scanomap _ _ lam2r _ nes2 _, SOAC.Scanomap _ _ lam1r _ nes1 _)
| horizFuse -> do
let (res_lam', new_inp) = fuseRedomap unfus_set outVars nes1 lam1 inp1_arr outPairs lam2 inp2_arr
lamr = mergeReduceOps lam1r lam2r
unfus_arrs = returned_outvars \\ unfus_accs
unfus_accs = take (length nes1) outVars
success (unfus_accs ++ outNames ker ++ unfus_arrs) $
SOAC.Scanomap (cs1++cs2) w lamr res_lam' (nes1++nes2) new_inp
-- Map -> Scanomap Fusion
(SOAC.Scanomap _ _ lam21 _ nes _, SOAC.Map {})
| mapFusionOK outVars ker || horizFuse -> do
-- Create new inner reduction function
let (res_lam, new_inp) = fuseMaps unfus_set lam1 inp1_arr outPairs lam2 inp2_arr
-- Get the lists from soac1 that still need to be returned
(_,extra_rtps) = unzip $ filter (\(nm,_)->nm `HS.member` unfus_set) $
zip outVars $ map (stripArray 1) $ SOAC.typeOf soac1
res_lam' = res_lam { lambdaReturnType = lambdaReturnType res_lam ++ extra_rtps }
success (outNames ker ++ returned_outvars) $
SOAC.Scanomap (cs1++cs2) w lam21 res_lam' nes new_inp
------------------
-- Write fusion --
------------------
-- Map-write fusion.
(SOAC.Write _cs _len _lam _ivs as,
SOAC.Map {})
| mapWriteFusionOK (outVars ++ map snd as) ker -> do
let (extra_nms, res_lam', new_inp) = mapLikeFusionCheck
success (outNames ker ++ extra_nms) $
SOAC.Write (cs1++cs2) w res_lam' new_inp as
-- Write-write fusion.
(SOAC.Write _cs2 _len2 _lam2 ivs2 as2,
SOAC.Write _cs1 _len1 _lam1 ivs1 as1)
| horizFuse -> do
let zipW xs ys = ys1 ++ xs1 ++ ys2 ++ xs2
where len = length xs `div` 2 -- same as with ys
xs1 = take len xs
xs2 = drop len xs
ys1 = take len ys
ys2 = drop len ys
let (body1, body2) = (lambdaBody lam1, lambdaBody lam2)
let body' = Body { bodyLore = bodyLore body1 -- body1 and body2 have the same lores
, bodyBindings = bodyBindings body1 ++ bodyBindings body2
, bodyResult = zipW (bodyResult body1) (bodyResult body2)
}
let lam' = Lambda { lambdaParams = lambdaParams lam1 ++ lambdaParams lam2
, lambdaBody = body'
, lambdaReturnType = zipW (lambdaReturnType lam1) (lambdaReturnType lam2)
}
success (outNames ker ++ returned_outvars) $
SOAC.Write (cs1 ++ cs2) w lam' (ivs1 ++ ivs2) (as2 ++ as1)
(SOAC.Write {}, _) ->
fail "Cannot fuse a write with anything else than a write or a map"
(_, SOAC.Write {}) ->
fail "Cannot fuse a write with anything else than a write or a map"
----------------------------
-- Stream-Stream Fusions: --
----------------------------
(SOAC.Stream _ _ Sequential{} _ _, SOAC.Stream _ _ form1@Sequential{} _ _)
| mapFusionOK (drop (length $ getStreamAccums form1) outVars) ker || horizFuse -> do
-- fuse two SEQUENTIAL streams
(res_nms, res_stream) <- fuseStreamHelper (outNames ker) unfus_set outVars outPairs soac2 soac1
success res_nms res_stream
(SOAC.Stream _ _ Sequential{} _ _, SOAC.Stream _ _ Sequential{} _ _) ->
fail "Fusion conditions not met for two SEQ streams!"
(SOAC.Stream _ _ Sequential{} _ _, SOAC.Stream{}) ->
fail "Cannot fuse a parallel with a sequential Stream!"
(SOAC.Stream{}, SOAC.Stream _ _ Sequential{} _ _) ->
fail "Cannot fuse a parallel with a sequential Stream!"
(SOAC.Stream{}, SOAC.Stream _ _ form1 _ _)
| mapFusionOK (drop (length $ getStreamAccums form1) outVars) ker || horizFuse -> do
-- fuse two PARALLEL streams
(res_nms, res_stream) <- fuseStreamHelper (outNames ker) unfus_set outVars outPairs soac2 soac1
success res_nms res_stream
(SOAC.Stream{}, SOAC.Stream {}) ->
fail "Fusion conditions not met for two PAR streams!"
-------------------------------------------------------------------
--- If one is a stream, translate the other to a stream as well.---
--- This does not get in trouble (infinite computation) because ---
--- scan's translation to Stream introduces a hindrance to ---
--- (horizontal fusion), hence repeated application is for the---
--- moment impossible. However, if with a dependence-graph rep---
--- we could run in an infinite recursion, i.e., repeatedly ---
--- fusing map o scan into an infinity of Stream levels! ---
-------------------------------------------------------------------
(SOAC.Stream _ _ form2 _ _, _) -> do
-- If this rule is matched then soac1 is NOT a stream.
-- To fuse a stream kernel, we transform soac1 to a stream, which
-- borrows the sequential/parallel property of the soac2 Stream,
-- and recursively perform stream-stream fusion.
(soac1', newacc_ids) <- SOAC.soacToStream soac1
soac1'' <- case form2 of
Sequential{} -> toSeqStream soac1'
_ -> return soac1'
fuseSOACwithKer unfus_set (map identName newacc_ids++outVars) soac1'' ker
(_, SOAC.Scan {}) -> do
-- A Scan soac can be currently only fused as a (sequential) stream,
-- hence it is first translated to a (sequential) Stream and then
-- fusion with a kernel is attempted.
(soac1', newacc_ids) <- SOAC.soacToStream soac1
fuseSOACwithKer unfus_set (map identName newacc_ids++outVars) soac1' ker
(_, SOAC.Stream _ _ form1 _ _) -> do
-- If it reached this case then soac2 is NOT a Stream kernel,
-- hence transform the kernel's soac to a stream and attempt
-- stream-stream fusion recursivelly.
-- The newly created stream corresponding to soac2 borrows the
-- sequential/parallel property of the soac1 stream.
(soac2', newacc_ids) <- SOAC.soacToStream soac2
soac2'' <- case form1 of
Sequential _ -> toSeqStream soac2'
_ -> return soac2'
fuseSOACwithKer unfus_set outVars soac1 $
ker { fsoac = soac2'', outNames = map identName newacc_ids ++ outNames ker }
---------------------------------
--- DEFAULT, CANNOT FUSE CASE ---
---------------------------------
_ -> fail "Cannot fuse"
fuseStreamHelper :: [VName] -> Names -> [VName] -> [(VName,Ident)]
-> SOAC -> SOAC -> TryFusion ([VName], SOAC)
fuseStreamHelper out_kernms unfus_set outVars outPairs
(SOAC.Stream cs2 w2 form2 lam2 inp2_arr)
(SOAC.Stream cs1 _ form1 lam1 inp1_arr) =
if getStreamOrder form2 /= getStreamOrder form1
then fail "fusion conditions not met!"
else do -- very similar to redomap o redomap composition,
-- but need to remove first the `chunk' and `i'
-- parameters of streams' lambdas and put them
-- lab in the resulting stream lambda.
let nes1 = getStreamAccums form1
chunk1 = head $ lambdaParams lam1
chunk2 = head $ lambdaParams lam2
hmnms = HM.fromList [(paramName chunk2, paramName chunk1)]
lam20 = substituteNames hmnms lam2
lam1' = lam1 { lambdaParams = tail $ lambdaParams lam1 }
lam2' = lam20 { lambdaParams = tail $ lambdaParams lam20 }
(res_lam', new_inp) = fuseRedomap unfus_set outVars nes1 lam1'
inp1_arr outPairs lam2' inp2_arr
res_lam'' = res_lam' { lambdaParams = chunk1 : lambdaParams res_lam' }
unfus_accs = take (length nes1) outVars
unfus_arrs = filter (`HS.member` unfus_set) outVars
res_form <- mergeForms form2 form1
return ( unfus_accs ++ out_kernms ++ unfus_arrs,
SOAC.Stream (cs1++cs2) w2 res_form res_lam'' new_inp )
where mergeForms (MapLike _) (MapLike o ) = return $ MapLike o
mergeForms (MapLike _) (RedLike o comm lam0 acc0) = return $ RedLike o comm lam0 acc0
mergeForms (RedLike o comm lam0 acc0) (MapLike _) = return $ RedLike o comm lam0 acc0
mergeForms (Sequential acc2) (Sequential acc1) = return $ Sequential (acc1++acc2)
mergeForms (RedLike _ comm2 lam2r acc2) (RedLike o1 comm1 lam1r acc1) =
return $ RedLike o1 (comm1<>comm2) (mergeReduceOps lam1r lam2r) (acc1++acc2)
mergeForms _ _ = fail "Fusing sequential to parallel stream disallowed!"
fuseStreamHelper _ _ _ _ _ _ = fail "Cannot Fuse Streams!"
-- | If a Stream is passed as argument then it converts it to a
-- Sequential Stream; Otherwise it FAILS!
toSeqStream :: SOAC -> TryFusion SOAC
toSeqStream s@(SOAC.Stream _ _ (Sequential _) _ _) = return s
toSeqStream (SOAC.Stream cs w (MapLike _) l inps) =
return $ SOAC.Stream cs w (Sequential []) l inps
toSeqStream (SOAC.Stream cs w (RedLike _ _ _ acc) l inps) =
return $ SOAC.Stream cs w (Sequential acc) l inps
toSeqStream _ = fail "toSeqStream expects a string, but given a SOAC."
-- | This is not currently used, but it might be useful in the future,
-- so I am going to export it in order not to complain about it.
toNestedSeqStream :: SOAC -> TryFusion SOAC
--toNestedSeqStream s@(SOAC.Stream _ (Sequential _) _ _ _) = return s
toNestedSeqStream (SOAC.Stream cs w form lam arrs) = do
innerlam <- renameLambda lam
instrm_resids <- mapM (newIdent "res_instream") $ lambdaReturnType lam
let inner_extlam = ExtLambda (lambdaParams innerlam)
(lambdaBody innerlam)
(staticShapes $ lambdaReturnType innerlam)
nes = getStreamAccums form
instrm_inarrs = drop (1 + length nes) $ map paramName $ lambdaParams lam
insoac = Futhark.Stream cs w form inner_extlam instrm_inarrs
lam_bind = mkLet' [] instrm_resids $ Op insoac
lam_body = mkBody [lam_bind] $ map (Futhark.Var . identName) instrm_resids
lam' = lam { lambdaBody = lam_body }
return $ SOAC.Stream cs w (Sequential nes) lam' arrs
toNestedSeqStream _ = fail "In toNestedSeqStream: Input paramter not a stream"
-- Here follows optimizations and transforms to expose fusability.
optimizeKernel :: Maybe [VName] -> FusedKer -> TryFusion FusedKer
optimizeKernel inp ker = do
(soac, resTrans) <- optimizeSOAC inp (fsoac ker) startTrans
return $ ker { fsoac = soac
, outputTransform = resTrans
}
where startTrans = outputTransform ker
optimizeSOAC :: Maybe [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
optimizeSOAC inp soac os = do
res <- foldM comb (False, soac, os) optimizations
case res of
(False, _, _) -> fail "No optimisation applied"
(True, soac', os') -> return (soac', os')
where comb (changed, soac', os') f = do
(soac'', os'') <- f inp soac' os
return (True, soac'', os'')
<|> return (changed, soac', os')
type Optimization = Maybe [VName]
-> SOAC
-> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
optimizations :: [Optimization]
optimizations = [iswim, scanToScanomap]
iswim :: Maybe [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
iswim _ (SOAC.Scan cs w scan_fun scan_input) ots
| Just (map_pat, map_cs, map_w, map_fun) <- rwimPossible scan_fun,
(nes, arrs) <- unzip scan_input,
Just nes_names <- mapM subExpVar nes = do
let nes_idents = zipWith Ident nes_names $ lambdaReturnType scan_fun
nes' = map SOAC.identInput nes_idents
map_arrs' = nes' ++ map (SOAC.transposeInput 0 1) arrs
(scan_acc_params, scan_elem_params) =
splitAt (length arrs) $ lambdaParams scan_fun
map_params = map removeParamOuterDim scan_acc_params ++
map (setParamOuterDimTo w) scan_elem_params
map_rettype = map (setOuterDimTo w) $ lambdaReturnType scan_fun
map_fun' = Lambda map_params map_body map_rettype
scan_params = lambdaParams map_fun
scan_body = lambdaBody map_fun
scan_rettype = lambdaReturnType map_fun
scan_fun' = Lambda scan_params scan_body scan_rettype
scan_input' = map (first Var) $
uncurry zip $ splitAt (length nes') $ map paramName map_params
map_body = mkBody [Let (setPatternOuterDimTo w map_pat) () $
Op $ Futhark.Scan cs w scan_fun' scan_input'] $
map Var $ patternNames map_pat
let perm = case lambdaReturnType map_fun of
[] -> []
t:_ -> 1 : 0 : [2..arrayRank t]
return (SOAC.Map map_cs map_w map_fun' map_arrs',
ots SOAC.|> SOAC.Rearrange map_cs perm)
iswim _ _ _ =
fail "ISWIM does not apply."
scanToScanomap :: Maybe [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
scanToScanomap _ (SOAC.Scan cs w scan_fun scan_input) ots = do
let (nes, array_inputs) = unzip scan_input
return (SOAC.Scanomap cs w scan_fun scan_fun nes array_inputs, ots)
scanToScanomap _ _ _ =
fail "Only turn scan into scanomaps"
removeParamOuterDim :: LParam -> LParam
removeParamOuterDim param =
let t = rowType $ paramType param
in param { paramAttr = t }
setParamOuterDimTo :: SubExp -> LParam -> LParam
setParamOuterDimTo w param =
let t = setOuterDimTo w $ paramType param
in param { paramAttr = t }
setIdentOuterDimTo :: SubExp -> Ident -> Ident
setIdentOuterDimTo w ident =
let t = setOuterDimTo w $ identType ident
in ident { identType = t }
setOuterDimTo :: SubExp -> Type -> Type
setOuterDimTo w t =
arrayOfRow (rowType t) w
setPatternOuterDimTo :: SubExp -> Pattern -> Pattern
setPatternOuterDimTo w pat =
basicPattern' [] $ map (setIdentOuterDimTo w) $ patternValueIdents pat
-- Now for fiddling with transpositions...
commonTransforms :: [VName] -> [SOAC.Input]
-> (SOAC.ArrayTransforms, [SOAC.Input])
commonTransforms interesting inps = commonTransforms' inps'
where inps' = [ (SOAC.inputArray inp `elem` interesting, inp)
| inp <- inps ]
commonTransforms' :: [(Bool, SOAC.Input)] -> (SOAC.ArrayTransforms, [SOAC.Input])
commonTransforms' inps =
case foldM inspect (Nothing, []) inps of
Just (Just mot, inps') -> first (mot SOAC.<|) $ commonTransforms' $ reverse inps'
_ -> (SOAC.noTransforms, map snd inps)
where inspect (mot, prev) (True, inp) =
case (mot, inputToOutput inp) of
(Nothing, Just (ot, inp')) -> Just (Just ot, (True, inp') : prev)
(Just ot1, Just (ot2, inp'))
| ot1 == ot2 -> Just (Just ot2, (True, inp') : prev)
_ -> Nothing
inspect (mot, prev) inp = Just (mot,inp:prev)
mapDepth :: MapNest -> Int
mapDepth (MapNest.MapNest _ _ lam levels _) =
min resDims (length levels) + 1
where resDims = minDim $ case levels of
[] -> lambdaReturnType lam
nest:_ -> MapNest.nestingReturnType nest
minDim [] = 0
minDim (t:ts) = foldl min (arrayRank t) $ map arrayRank ts
pullRearrange :: SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
pullRearrange soac ots = do
nest <- join $ liftMaybe <$> MapNest.fromSOAC soac
SOAC.Rearrange cs perm SOAC.:< ots' <- return $ SOAC.viewf ots
if rearrangeReach perm <= mapDepth nest then do
let -- Expand perm to cover the full extent of the input dimensionality
perm' inp = perm ++ [length perm..SOAC.inputRank inp-1]
addPerm inp = SOAC.addTransform (SOAC.Rearrange cs $ perm' inp) inp
inputs' = map addPerm $ MapNest.inputs nest
soac' <- MapNest.toSOAC $
inputs' `MapNest.setInputs` rearrangeReturnTypes nest perm
return (soac', ots')
else fail "Cannot pull transpose"
pushRearrange :: [VName] -> SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
pushRearrange inpIds soac ots = do
nest <- join $ liftMaybe <$> MapNest.fromSOAC soac
(perm, inputs') <- liftMaybe $ fixupInputs inpIds $ MapNest.inputs nest
if rearrangeReach perm <= mapDepth nest then do
let invertRearrange = SOAC.Rearrange [] $ rearrangeInverse perm
soac' <- MapNest.toSOAC $
inputs' `MapNest.setInputs`
rearrangeReturnTypes nest perm
return (soac', ots SOAC.|> invertRearrange)
else fail "Cannot push transpose"
-- | Actually also rearranges indices.
rearrangeReturnTypes :: MapNest -> [Int] -> MapNest
rearrangeReturnTypes nest@(MapNest.MapNest cs w body nestings inps) perm =
MapNest.MapNest cs w
body
(zipWith setReturnType
nestings $
drop 1 $ iterate (map rowType) ts)
inps
where origts = MapNest.typeOf nest
ts = map (rearrangeType perm) origts
setReturnType nesting t' =
nesting { MapNest.nestingReturnType = t' }
fixupInputs :: [VName] -> [SOAC.Input] -> Maybe ([Int], [SOAC.Input])
fixupInputs inpIds inps =
case mapMaybe inputRearrange $ filter exposable inps of
perm:_ -> do inps' <- mapM (fixupInput (rearrangeReach perm) perm) inps
return (perm, inps')
_ -> Nothing
where exposable = (`elem` inpIds) . SOAC.inputArray
inputRearrange (SOAC.Input ts _ _)
| _ SOAC.:> SOAC.Rearrange _ perm <- SOAC.viewl ts = Just perm
inputRearrange _ = Nothing
fixupInput d perm inp
| SOAC.inputRank inp >= d =
Just $ SOAC.addTransform (SOAC.Rearrange [] $ rearrangeInverse perm) inp
| otherwise = Nothing
pullReshape :: SOAC -> SOAC.ArrayTransforms -> TryFusion (SOAC, SOAC.ArrayTransforms)
pullReshape (SOAC.Map mapcs _ maplam inps) ots
| SOAC.Reshape cs shape SOAC.:< ots' <- SOAC.viewf ots,
all primType $ lambdaReturnType maplam = do
let mapw' = case reverse $ newDims shape of
[] -> intConst Int32 0
d:_ -> d
inputs' = map (SOAC.addTransform $ SOAC.ReshapeOuter cs shape) inps
inputTypes = map SOAC.inputType inputs'
let outersoac :: ([SOAC.Input] -> SOAC) -> (SubExp, [SubExp])
-> TryFusion ([SOAC.Input] -> SOAC)
outersoac inner (w, outershape) = do
let addDims t = arrayOf t (Shape outershape) NoUniqueness
retTypes = map addDims $ lambdaReturnType maplam
ps <- forM inputTypes $ \inpt ->
newParam "pullReshape_param" $
stripArray (length shape-length outershape) inpt
inner_body <- runBodyBinder $
eBody [SOAC.toExp $ inner $ map (SOAC.identInput . paramIdent) ps]
let inner_fun = Lambda { lambdaParams = ps
, lambdaReturnType = retTypes
, lambdaBody = inner_body
}
return $ SOAC.Map [] w inner_fun
op' <- foldM outersoac (SOAC.Map mapcs mapw' maplam) $
zip (drop 1 $ reverse $ newDims shape) $
drop 1 $ reverse $ drop 1 $ tails $ newDims shape
return (op' inputs', ots')
pullReshape _ _ = fail "Cannot pull reshape"
-- We can make a Replicate output-transform part of a map SOAC simply
-- by adding another dimension to the SOAC.
pullReplicate :: SOAC -> SOAC.ArrayTransforms -> TryFusion (SOAC, SOAC.ArrayTransforms)
pullReplicate soac@SOAC.Map{} ots
| SOAC.Replicate n SOAC.:< ots' <- SOAC.viewf ots = do
let rettype = SOAC.typeOf soac
body <- runBodyBinder $ do
names <- letTupExp "pull_replicate" =<< SOAC.toExp soac
resultBodyM $ map Var names
let lam = Lambda { lambdaReturnType = rettype
, lambdaBody = body
, lambdaParams = []
}
return (SOAC.Map [] n lam [], ots')
pullReplicate _ _ = fail "Cannot pull replicate"
-- Tie it all together in exposeInputs (for making inputs to a
-- consumer available) and pullOutputTransforms (for moving
-- output-transforms of a producer to its inputs instead).
exposeInputs :: [VName] -> FusedKer
-> TryFusion (FusedKer, SOAC.ArrayTransforms)
exposeInputs inpIds ker = do
let soac = fsoac ker
(exposeInputs' =<< pushRearrange' soac) <|>
(exposeInputs' =<< pullRearrange' soac) <|>
exposeInputs' ker
where ot = outputTransform ker
pushRearrange' soac = do
(soac', ot') <- pushRearrange inpIds soac ot
return ker { fsoac = soac'
, outputTransform = ot'
}
pullRearrange' soac = do
(soac',ot') <- pullRearrange soac ot
unless (SOAC.nullTransforms ot') $
fail "pullRearrange was not enough"
return ker { fsoac = soac'
, outputTransform = SOAC.noTransforms
}
exposeInputs' ker' =
case commonTransforms inpIds $ inputs ker' of
(ot', inps') | all exposed inps' ->
return (ker' { fsoac = inps' `SOAC.setInputs` fsoac ker'}, ot')
_ -> fail "Cannot expose"
exposed (SOAC.Input ts _ _)
| SOAC.nullTransforms ts = True
exposed inp = SOAC.inputArray inp `notElem` inpIds
outputTransformPullers :: [SOAC -> SOAC.ArrayTransforms -> TryFusion (SOAC, SOAC.ArrayTransforms)]
outputTransformPullers = [pullRearrange, pullReshape, pullReplicate]
pullOutputTransforms :: SOAC -> SOAC.ArrayTransforms
-> TryFusion (SOAC, SOAC.ArrayTransforms)
pullOutputTransforms = attempt outputTransformPullers
where attempt [] _ _ = fail "Cannot pull anything"
attempt (p:ps) soac ots = do
(soac',ots') <- p soac ots
if SOAC.nullTransforms ots' then return (soac', SOAC.noTransforms)
else pullOutputTransforms soac' ots' <|> return (soac', ots')
<|> attempt ps soac ots
|
mrakgr/futhark
|
src/Futhark/Optimise/Fusion/LoopKernel.hs
|
Haskell
|
bsd-3-clause
| 34,103
|
{-# OPTIONS_GHC
-XFlexibleInstances
-XOverlappingInstances
-XMultiParamTypeClasses
-XFlexibleContexts
-XUndecidableInstances
-XTemplateHaskell
-cpp #-}
module Text.RJson (TranslateField,
TranslateFieldD,
translateField,
ToJson,
ToJsonD,
toJson,
exclude,
arrayPrepend,
arrayAppend,
objectExtras,
genericToJson,
enumToJson,
JsonData(..),
FromJson,
FromJsonD,
objectDefaults,
parseJsonString,
parseJsonByteString,
fromJson,
fromJsonString,
fromJsonByteString,
genericFromJson,
enumFromJson,
stripInitialUnderscores,
toJsonString,
firstCharToUpper,
firstCharToLower,
Union(..), Union3, Union4, Union5, Union6,
Union7,Union8,Union9,Union10,
cond)
where
import Data.Generics.SYB.WithClass.Basics
import Data.Generics.SYB.WithClass.Instances
import Data.Generics.SYB.WithClass.Context
import Data.Generics.SYB.WithClass.Derive
import qualified Data.Map as M
import qualified Text.Printf as Printf
import Data.Char
import Data.Ratio
import Data.Array
import Data.Maybe
import Control.Monad.State.Strict
import Control.Monad.Trans
import Control.Monad.Error
import qualified Text.ParserCombinators.Parsec as P
import qualified Data.ByteString.Lazy as B
import System.IO.Unsafe
import qualified Control.Exception as E
import Codec.Text.IConv
import qualified Data.Word as W
-- | A Haskell representation of a JSON
-- data structure.
data JsonData = JDString String |
JDNumber Double |
JDArray [JsonData] |
JDBool Bool |
JDNull |
JDObject (M.Map String JsonData)
listJoin :: a -> [a] -> [a]
listJoin _ [] = []
listJoin _ l@[x] = l
listJoin k (x:ys) = x : k : (listJoin k ys)
concatJoin :: String -> [String] -> String
concatJoin k l = concat (listJoin k l)
alistToJsonDict :: [(String, String)] -> String
alistToJsonDict l =
"{" ++
concatJoin "," (map (\(k,v) -> (escapeString k) ++ ":" ++ v) l)
++ "}"
-- Special characters which will be pretty printed.
escapeMap :: M.Map Char String
escapeMap = M.fromList [
('\\', "\\"), ('"', "\""), ('\'', "'"), ('\n', "n"),
('\r', "r"), ('\f', "f"), ('\t', "t"), ('\b', "\b")]
escape :: Char -> Maybe String
escape c = M.lookup c escapeMap
-- Characters which can safely be printed as literals.
allowed' c o
| o > 127 = True -- Any unicode char is OK.
| o >= 32 && o < 127 {- exclude DEL == 127 -} && c /= '"' = True
| True = False
allowed c = allowed' c (ord c)
hexEscape :: Char -> String
hexEscape c = Printf.printf "\\u%04x" (ord c)
escapeString' :: String -> String
escapeString' [] = "\""
escapeString' (c:cs)
| allowed c =
c : (escapeString' cs)
| True =
(maybe (hexEscape c) (\s -> "\\" ++ s) (escape c)) ++
(escapeString' cs)
escapeString s = '"' : escapeString' s
instance Show JsonData where
show (JDString s) = escapeString s
show (JDNumber n)
-- Show as an integer if possible, otherwise as a Double.
-- TODO: Not sure if this is the proper way of testing whether a
-- double is an integer value +/- epsilon.
| (fromIntegral (floor n)) == n = show (floor n)
| True = show n
show (JDBool True) = "true"
show (JDBool False) = "false"
show (JDArray l) = "[" ++ concatJoin "," (map show l) ++ "]"
show JDNull = "null"
show (JDObject o) = alistToJsonDict (map (\(k,v) -> (k, show v)) (M.toList o))
--
-- TranslateField class.
--
class TranslateField a where
-- | This method defines the mapping from Haskell record field names
-- to JSON object field names. The default is to strip any initial
-- underscores. Specialize this method to define a different behavior.
translateField :: a -> String -> String
data TranslateFieldD a = TranslateFieldD { translateFieldD :: a -> String -> String }
translateFieldProxy :: Proxy TranslateFieldD
translateFieldProxy = error "'translateFieldProxy' value should never be evaluated!"
instance (TranslateField t) => Sat (TranslateFieldD t) where
dict = TranslateFieldD { translateFieldD = translateField }
-- | Removes initial underscores from a string.
stripInitialUnderscores "" = ""
stripInitialUnderscores ('_':s) = stripInitialUnderscores s
stripInitialUnderscores s = s
instance Typeable a => TranslateField a where
translateField _ x = stripInitialUnderscores x
--
-- ToJson class plus SYB boilerplate.
--
-- | New instances can be added to this class to customize certain aspects
-- of the way in which Haskell types are serialized to JSON.
class TranslateField a => ToJson a where
toJson :: a -> JsonData
-- For lists (same trick used by the Prelude to allow special
-- handling of list types for Show).
lToJson :: [a] -> JsonData
lToJson l = JDArray (map toJson l)
-- | Applies to record types only. You can specialize this method to
-- prevent certain fields from being serialized.
-- Given a Haskell field name, it should return True if that field is
-- to be serialized, and False otherwise.
exclude :: a -> String -> Bool
exclude _ _ = False
-- | Types that will be converted to JSON arrays can override
-- this method to specify additional elements to be prepended to the array.
arrayPrepend :: a -> [JsonData]
arrayPrepend _ = []
-- | Types that will be converted to JSON arrays can override
-- this method to specify additional elements to be appended to the array.
arrayAppend :: a -> [JsonData]
arrayAppend _ = []
-- | Types that will be converted to JSON objects can override
-- this method to specify additional fields of the object.
objectExtras :: a -> [(String, JsonData)]
objectExtras _ = []
-- Note the inclusion of translateField from TranslateField.
data ToJsonD a = ToJsonD { toJsonD :: a -> JsonData,
excludeD :: a -> String -> Bool,
arrayPrependD :: a -> [JsonData],
arrayAppendD :: a -> [JsonData],
objectExtrasD :: a -> [(String, JsonData)],
translateFieldD' :: a -> String -> String }
toJsonProxy :: Proxy ToJsonD
toJsonProxy = error "'toJsonProxy' value should never be evaluated!"
-- Again, note inclusion of translateField from TranslateField.
instance ToJson t => Sat (ToJsonD t) where
dict = ToJsonD { toJsonD = toJson,
excludeD = exclude,
arrayPrependD = arrayPrepend,
arrayAppendD = arrayAppend,
objectExtrasD = objectExtras,
translateFieldD' = translateField }
--
-- Implementations of toJson for different data types.
--
instance ToJson Bool where
toJson b = JDBool b
instance ToJson Int where
toJson i = JDNumber (fromIntegral i)
instance ToJson Integer where
toJson i = JDNumber (fromIntegral i)
--instance Json Float where
-- toJson i = JDNumber (floatToDouble i)
instance ToJson Double where
toJson i = JDNumber i
instance (Integral a, TranslateField a, Typeable a) => ToJson (Ratio a) where
toJson i = JDNumber $ (fromIntegral (numerator i)) / (fromIntegral (denominator i))
instance ToJson Char where
lToJson s = JDString s
toJson c = JDString [c]
instance (Typeable a, ToJson a) => ToJson (Maybe a) where
toJson (Just c) = toJson c
toJson Nothing = JDNull
instance (ToJson a, TranslateField a, Data TranslateFieldD (M.Map String a))
=> ToJson (M.Map String a) where
toJson x = JDObject (M.map toJson x)
instance (ToJson a, TranslateField a, Typeable a) => ToJson [a] where
toJson = lToJson
-- TODO: Add instances for the other array types supported by GHC.
instance (ToJson a, TranslateField a, Typeable a, Typeable i, Ix i) => ToJson (Array i a) where
toJson a = toJson (elems a)
-- | This type can be used for merging two or more records together into a single
-- JSON object. By default, a structure such as (Union X Y) is serialized as follows.
-- First, X and Y are serialized, and a runtime error is signalled if the result of
-- serialization is not a JSON object in both cases. The key/value pairs of the
-- two JSON objects are then merged to form a single object.
data Union a b = Union a b deriving Show
$(derive[''Union]) -- In order to derive (Typeable2 Union).
-- It seems that we get away with overwriting the instance
-- of Data that this creates (if we didn't, we could always
-- instantiate Typeable manually for Union).
-- | Nested Unions are left-branching by convention (since this is what you get
-- by using the constructor as an infix operator).
type Union3 a b c = (Union (Union a b) c)
type Union4 a b c d = (Union (Union3 a b c) d)
type Union5 a b c d e = (Union (Union4 a b c d) e)
type Union6 a b c d e f = (Union (Union5 a b c d e) f)
type Union7 a b c d e f g = (Union (Union6 a b c d e f) g)
type Union8 a b c d e f g h = (Union (Union7 a b c d e f g) h)
type Union9 a b c d e f g h i = (Union (Union8 a b c d e f g h) i)
type Union10 a b c d e f g h i j = (Union (Union9 a b c d e f g h i) j)
-- Used by the (ToJson Union) instance below.
isJDObject (JDObject _) = True
isJDObject _ = False
jdObjectMap (JDObject m) = m
instance (ToJson a, ToJson b, TranslateField a, TranslateField b, Typeable a, Typeable b, Typeable2 Union) => ToJson (Union a b) where
toJson (Union x y) =
let jx = toJson x
jy = toJson y
in
if isJDObject jx && isJDObject jy
then JDObject (M.union (jdObjectMap jx) (jdObjectMap jy))
else error "Bad toJson conversion: Attempt to unify JSON values which aren't both objects"
getFields :: Data ToJsonD a => a -> [String]
getFields = constrFields . (toConstr toJsonProxy)
typename x = dataTypeName (dataTypeOf toJsonProxy x)
-- | This function is used as the the implementation of 'toJson' for the
-- generic instance declaration.
-- It's useful to be able to use the same implentation for
-- other instance declarations which override the default implementations
-- of other methods of the ToJson class.
genericToJson :: (Data ToJsonD a, ToJson a, TranslateField a) => a -> JsonData
genericToJson x
| isAlgType (dataTypeOf toJsonProxy x) =
case getFields x of
[] ->
case gmapQ toJsonProxy (toJsonD dict) x of
[v] -> v -- Special default behavior for algebraic constructors with one field.
vs -> JDArray $ (arrayPrependD dict x) ++ vs ++ (arrayAppendD dict x)
fs ->
let
translatedFsToInclude =
map (translateFieldD' dict x) (filter (not . (excludeD dict x)) (getFields x))
in
JDObject $ M.fromList (objectExtrasD dict x ++ (zip translatedFsToInclude (gmapQ toJsonProxy (toJsonD dict) x)))
| True =
error $ "Unable to serialize the primitive type '" ++ typename x ++ "'"
-- | This function can be used as an implementation of 'toJson' for simple enums.
-- It converts an enum value to a string determined by the name of the constructor,
-- after being fed through the (String -> String) function given as the first argument.
enumToJson :: (Data ToJsonD a, ToJson a, TranslateField a) => (String -> String) -> a -> JsonData
enumToJson transform x
| isAlgType (dataTypeOf toJsonProxy x) = JDString (transform (showConstr (toConstr toJsonProxy x)))
| True = error "Passed non-algebraic type to enumToJson"
instance (Data ToJsonD t, TranslateField t) => ToJson t where
toJson = genericToJson
-- Instances for tuples up to n=7 (this limit it is set by the non-existence of Typeable8).
-- Tuples are converted to (heterogenous) JSON lists.
#define I(x) ToJson x, Typeable x
instance (I(a), I(b)) => ToJson (a, b) where
toJson (a,b) = JDArray [toJson a, toJson b]
instance (I(a), I(b), I(c)) => ToJson (a,b,c) where
toJson (a,b,c) = JDArray [toJson a, toJson b, toJson c]
instance (I(a), I(b), I(c), I(d)) => ToJson (a,b,c,d) where
toJson (a,b,c,d) = JDArray [toJson a, toJson b, toJson c, toJson d]
instance (I(a), I(b), I(c), I(d), I(e)) => ToJson (a,b,c,d,e) where
toJson (a,b,c,d,e) = JDArray [toJson a, toJson b, toJson c, toJson d, toJson e]
instance (I(a), I(b), I(c), I(d), I(e), I(f)) =>
ToJson (a,b,c,d,e,f) where
toJson (a,b,c,d,e,f) = JDArray [toJson a, toJson b, toJson c, toJson d, toJson e,
toJson f]
instance (I(a), I(b), I(c), I(d), I(e), I(f), I(g)) =>
ToJson (a,b,c,d,e,f,g) where
toJson (a,b,c,d,e,f,g) = JDArray [toJson a, toJson b, toJson c, toJson d, toJson e,
toJson f, toJson g]
#undef I
--
-- FromJson
--
class TranslateField a => FromJson a where
fromJson :: a -> JsonData -> Either String a
-- For lists (same trick used by the Prelude to allow special
-- handling of list types for Show).
lFromJson :: a -> JsonData -> Either String [a]
lFromJson dummy (JDArray l) = mapM (fromJson dummy) l
-- | To specify default values for the required fields of a JSON object,
-- specialize this method in the instance definition for the relevant
-- datatype.
objectDefaults :: a -> M.Map String JsonData
objectDefaults _ = M.empty
data FromJsonD a = FromJsonD { fromJsonD :: a -> JsonData -> Either String a,
objectDefaultsD :: a -> M.Map String JsonData,
translateFieldD'' :: a -> String -> String }
fromJsonProxy :: Proxy FromJsonD
fromJsonProxy = error "'fromJsonProxy' should never be evaluated!"
-- Note inclusion of translateField from TranslateField.
instance FromJson t => Sat (FromJsonD t) where
dict = FromJsonD { fromJsonD = fromJson,
objectDefaultsD = objectDefaults,
translateFieldD'' = translateField }
instance FromJson Char where
fromJson _ (JDString [c]) = Right c
fromJson _ _ = Left "Bad fromJson conversion: JSON string not of length 1 to 'Char'"
lFromJson _ (JDString s) = Right s
lFromJson _ _ = Left "Bad fromJson conversion: Non-string to 'String'"
instance (FromJson a, TranslateField a, Typeable a) => FromJson (Maybe a) where
fromJson _ JDNull = Right Nothing
fromJson _ y =
case fromJson undefined y of
Left err -> Left err
Right v -> Right $ Just v
instance (FromJson a, TranslateField a, Typeable a) => FromJson [a] where
fromJson _ x = lFromJson undefined x
instance FromJson Int where
fromJson _ (JDNumber n)
| (fromIntegral (floor n)) == n = Right (floor n)
| True =
Left "Bad fromJson conversion: number does not approximate an integer ('Int')"
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to 'Int'"
instance FromJson Integer where
fromJson _ (JDNumber n)
| (fromIntegral (floor n)) == n = Right (floor n)
| True =
Left "Bad fromJson conversion: number does not approximate an integer ('Integer')"
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to 'Integer'"
instance FromJson Double where
fromJson _ (JDNumber d) = Right d
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to 'Double'"
instance (Typeable a, Integral a) => FromJson (Ratio a) where
fromJson _ (JDNumber i) = Right (fromRational (toRational i))
fromJson _ _ = Left "Bad fromJson conversion: Non-numeric to instance of 'Ratio'"
instance FromJson Bool where
fromJson _ (JDBool b) = Right b
fromJson _ _ = Left "Bad fromJson conversion: Non-boolean to 'Bool'"
-- TODO: Use monads instead of 'ifs' if possible (funky type errors
-- which I haven't figured out yet, something to do with monomorphism
-- in let bindings vs. lambda abstraction?).
instance (FromJson a, FromJson b, Typeable a, Typeable b, TranslateField a, TranslateField b) => FromJson (Union a b) where
fromJson _ o@(JDObject _) =
let r1 = fromJson undefined o
r2 = fromJson undefined o
in
if isRight r1 && isRight r2
then Right $ Union (fromRight r1) (fromRight r2)
else Left "Bad fromJson conversion: error constructing subpart of union (did not serialize to object)"
fromJson _ _ = Left "Bad fromJson conversion: attempt to convert non-object to Union"
tuperror :: Int -> Either String a
tuperror n = Left $ Printf.printf "Bad fromJson conversion: attempt to convert something that was not a list of length %i to a %i-tuple" n n
#define I(x) FromJson x, Typeable x, TranslateField x
instance (I(a), I(b)) => FromJson (a,b) where
fromJson _ (JDArray [x1,x2]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
return (r1,r2)
fromJson _ _ = tuperror 2
instance (I(a), I(b), I(c)) => FromJson (a,b,c) where
fromJson _ (JDArray [x1,x2,x3]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
return (r1,r2,r3)
fromJson _ _ = tuperror 3
instance (I(a), I(b), I(c), I(d)) => FromJson(a,b,c,d) where
fromJson _ (JDArray [x1,x2,x3,x4]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
return (r1,r2,r3,r4)
fromJson _ _ = tuperror 4
instance (I(a), I(b), I(c), I(d), I(e)) => FromJson (a,b,c,d,e) where
fromJson _ (JDArray [x1,x2,x3,x4,x5]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
r5 <- fromJson undefined x5
return (r1,r2,r3,r4,r5)
fromJson _ _ = tuperror 5
instance (I(a), I(b), I(c), I(d), I(e), I(f)) =>
FromJson (a,b,c,d,e,f) where
fromJson _ (JDArray [x1,x2,x3,x4,x5,x6]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
r5 <- fromJson undefined x5
r6 <- fromJson undefined x6
return (r1,r2,r3,r4,r5,r6)
fromJson _ _ = tuperror 6
instance (I(a), I(b), I(c), I(d), I(e), I(f), I(g)) =>
FromJson (a,b,c,d,e,f,g) where
fromJson _ (JDArray [x1,x2,x3,x4,x5,x6,x7]) = do
r1 <- fromJson undefined x1
r2 <- fromJson undefined x2
r3 <- fromJson undefined x3
r4 <- fromJson undefined x4
r5 <- fromJson undefined x5
r6 <- fromJson undefined x6
r7 <- fromJson undefined x7
return (r1,r2,r3,r4,r5,r6,r7)
fromJson _ _ = tuperror 7
#undef I
elemsOfMap :: Ord k => M.Map k v -> [k] -> Maybe [v]
elemsOfMap _ [] = Just []
elemsOfMap m (x:xs) = do
r <- M.lookup x m
rs <- elemsOfMap m xs
return (r : rs)
type ErrorWithState e s a = ErrorT e (State s) a
-- TODO: Not a very descriptive name. Oh well...
m1 :: (Data FromJsonD a) => ErrorWithState String [JsonData] a
m1 = do
jvl <- lift get
(case jvl of
[] -> throwError "Bad fromJson conversion: Not enough elements in JSON array to satisfy constructor"
(jv:jvs) -> do
lift $ put jvs
(case fromJsonD dict (undefined :: a) jv of
Left e -> throwError e
Right x -> return x))
-- TODO: Again, uninformative name.
-- TODO: Some code duplication here.
m2 :: (Data FromJsonD a, TranslateField a) => M.Map String JsonData -> (String -> String) -> a -> ErrorWithState String (M.Map String JsonData, [String]) a
m2 defaults transFunc dummy = do
(m, sl) <- lift get
(case sl of
[] -> throwError "Bad fromJson conversion: Not enough fields in JSON object to satisfy constructor"
(f:fs) -> do
lift $ put (m, fs)
let stripped = transFunc f
(case M.lookup stripped m of
Nothing ->
case M.lookup stripped defaults of
Nothing -> throwError $ "Bad fromJson conversion: Required field not present in JSON object: " ++ stripped
Just v ->
case fromJsonD dict dummy v of
Left e -> throwError e
Right x -> return x
Just v ->
case fromJsonD dict dummy v of
Left e -> throwError e
Right x -> return x))
-- TODO: Another uninformative name.
m3 :: (Data FromJsonD a, TranslateField a) => JsonData -> a -> ErrorWithState String Int a
m3 jsondata dummy = do
s <- get
if s > 0
then throwError "Bad fromJson conversion: Expecting JSON object or array; did not attempt automatic boxing because constructor takes more than one argument."
else do
put (s + 1)
case fromJsonD dict dummy jsondata of
Left e -> throwError e
Right x -> return x
genericFromJson :: (Data FromJsonD a, FromJson a, TranslateField a) => a -> JsonData -> Either String a
genericFromJson dummy (JDArray l) =
case datarep (dataTypeOf fromJsonProxy dummy) of
AlgRep ccs@(c:cs) -> evalArrayConstr ccs
where
evalArrayConstr = tryHead err . dropWhile isLeft . map es
es :: (Data FromJsonD a, FromJson a) => Constr -> Either String a
es c = evalState (runErrorT (fromConstrM fromJsonProxy m1 c)) (tryTail l)
tryTail = cond null (const []) tail
tryHead def = cond null (const def) head
err = Left "Bad fromJson conversion: Type with no constructors!"
AlgRep _ -> Left "Bad fromJson conversion: Type with no constructors!"
_ -> Left "Bad fromJson conversion: Non-algebraic datatype given to 'genericFromJson'"
genericFromJson dummy (JDObject m) =
case datarep (dataTypeOf fromJsonProxy dummy) of
AlgRep cs@(_:_) -> evalConstrs dummy m cs
_ -> Left "Bad fromJson conversion: Non-algebraic datatype given to 'genericFromJson'"
genericFromJson dummy jsondata =
case datarep (dataTypeOf fromJsonProxy dummy) of
AlgRep [c] -> evalState (runErrorT (gmapM fromJsonProxy (m3 jsondata) (fromConstr fromJsonProxy c))) 0
AlgRep _ -> Left "Bad fromJson conversion: Expecting JSON object or array; did not attempt automatic boxing because type has more than one constructor."
genericFromJson _ _ = Left "Bad fromJson conversion: Expecting JSON object or array"
evalConstrs :: (Data FromJsonD a, FromJson a) => a -> M.Map String JsonData -> [Constr] -> Either [Char] a
evalConstrs dummy m = tryHead err . dropWhile isLeft . map (evalConstr dummy m)
where
tryHead def = cond null (const def) head
err = Left "Bad fromJson conversion: Type with no constructors!"
evalConstr :: (Data FromJsonD a, FromJson a) => a -> M.Map String JsonData -> Constr -> Either [Char] a
evalConstr dummy m c = case constrFields c of
[] -> Left $ "Bad fromJson conversion: Attempt to convert JDObect to a non-record algebraic type"
-- TODO:
-- Can't use fromConstrM because we need to get dummy values of the
-- appropriate type for each argument of the constructor. This is unfortunate,
-- becuase it means that we get runtime errors for records with strict fields.
fs -> evalState (runErrorT (gmapM fromJsonProxy (m2 (objectDefaultsD dict dummy) (translateFieldD'' dict dummy)) (fromConstr fromJsonProxy c))) (m, fs)
constrNames :: (Data FromJsonD a, Data TranslateFieldD a) => a -> [String]
constrNames x = map showConstr (dataTypeConstrs (dataTypeOf fromJsonProxy x))
-- | The counterpart of 'enumToJson'.
enumFromJson :: (Data FromJsonD a, Data TranslateFieldD a) => (String -> String) -> a -> JsonData -> Either String a
enumFromJson transform dummy (JDString s) =
let cname = (transform s) in
if elem cname (constrNames dummy)
then
case fromConstrM fromJsonProxy Nothing (mkConstr (dataTypeOf fromJsonProxy dummy) cname [] Prefix ) of
Nothing -> Left "Error in enumFromJson"
Just x -> Right x
else Left "Constructor name not recognized in enumFromJson"
enumFromJson _ _ _ = Left "Non-string given to enumFromJson"
instance (Data FromJsonD t, TranslateField t) => FromJson t where
fromJson = genericFromJson
--
-- JSON parser.
--
-- Determine the unicode encoding of a byte stream
-- on the assumption that it begins with two ASCII characters.
getEncoding :: B.ByteString -> EncodingName
getEncoding s
| B.length s < 4 = "UTF-8" -- If the string is shorter than 4 bytes,
-- we have no way of determining the encoding.
| True =
let bs1 = B.index s 0
bs2 = B.index s 1
bs3 = B.index s 2
bs4 = B.index s 3
in
-- Little endian UTF 32/16.
if bs1 /= 0 && bs2 == 0 && bs3 == 0 && bs4 == 0
then "UTF-32LE"
else if bs1 /= 0 && bs2 == 0 && bs3 /= 0 && bs4 == 0
then "UTF-16LE"
-- Big endian UTF 32/16.
else if bs1 == 0 && bs2 == 0 && bs3 == 0 && bs4 /= 0
then "UTF-32BE"
else if bs1 == 0 && bs2 /= 0 && bs3 == 0 && bs4 /= 0
then "UTF-16BE"
-- UTF-8
else if bs1 /= 0 && bs2 /= 0 && bs3 /= 0 && bs4 /= 0
then "UTF-8" -- BOM allowed but not required for UTF-8.
-- If we can't figure it out, guess at UTF-8.
else "UTF-8"
-- Converts a ByteString to a String of unicode code points.
toHaskellString :: EncodingName -> B.ByteString -> String
toHaskellString enc source =
stripBOM $ map chr (pairBytes (B.unpack bs))
where
pairBytes :: [W.Word8] -> [Int]
pairBytes [] = []
pairBytes (c:c':cs) = ((fromIntegral c) + (fromIntegral c')*256) : (pairBytes cs)
bs = convertFuzzy Discard enc "UTF-16LE" source
stripBOM :: String -> String
stripBOM ('\0':'\0':'\xFE':'\xFF':cs) = cs
stripBOM ('\xFF':'\xFE':'\0':'\0':cs) = cs
stripBOM ('\xFE':'\xFF':cs) = cs
stripBOM ('\xFF':'\xFE':cs) = cs
stripBOM ('\xEF':'\xBB':'\xBF':cs) = cs
stripBOM cs = cs
(<|>) = (P.<|>)
-- | Converts a ByteString to an instance of JsonData (unicode encoding
-- is detected automatically).
parseJsonByteString :: B.ByteString -> Either String JsonData
parseJsonByteString bs =
let
decoded = toHaskellString (getEncoding bs) bs
in
case P.runParser (ws >> jsonValue) () "" decoded of
Left e -> Left (show e)
Right x -> Right x
-- | Converts a String (interpreted as a true unicode String) to an instance
-- of JsonData.
parseJsonString :: String -> Either String JsonData
parseJsonString s =
case P.runParser (ws >> jsonValue) () "" s of
Left e -> Left (show e)
Right x -> Right x
apply f p = do
r <- p
return (f r)
pconcat p1 p2 = do
l1 <- p1
l2 <- p2
return $ l1 ++ l2
listify :: P.Parser x -> P.Parser [x]
listify = apply (:[])
ws = P.many (P.oneOf [' ','\r','\n','\t','\f','\v'])
-- Could use the ParsecToken module, but trying a floating point number
-- then an integer is a bit inefficient (especially since integers will
-- be more common).
number :: P.Parser JsonData
number = do
neg <- (P.char '-' >> return True) <|> return False
i <- P.many1 P.digit
point <- P.option Nothing (apply Just (P.char '.' >> P.many1 P.digit))
exponent <- P.option Nothing (apply Just (P.char 'e' >> pconcat (P.option "" (listify (P.char '-'))) (P.many1 P.digit)))
let n = if point == Nothing && exponent == Nothing
then read i :: Double
else read (i ++ (if point == Nothing then "" else "." ++ fromJust point) ++
(if exponent == Nothing then "" else "e" ++ fromJust exponent)) :: Double
return . JDNumber $ if neg then negate n else n
stringChar :: Char -> P.Parser Char
stringChar opener = do
-- Fail immediately on either single or double quotes or
-- on control characters.
c <- P.satisfy (\c -> c /= opener && (ord c) > 31)
(case c of
'\\' ->
(P.char '"' >> return '"') <|>
(P.char '\'' >> return '\'') <|>
(P.char 'b' >> return '\b') <|>
(P.char 'f' >> return '\f') <|>
(P.char 'n' >> return '\n') <|>
(P.char 'r' >> return '\r') <|>
(P.char 't' >> return '\t') <|>
(do
P.char 'u'
ds <- P.count 4 P.hexDigit
return $ chr (read ("0x" ++ ds) :: Int)) <|>
(P.satisfy allowed >>= return) -- "\X" == "X" by default.
c -> return c)
string :: P.Parser String
string = do
opener <- P.char '"' <|> P.char '\'' -- JSON spec requires double quotes, but we'll be lenient.
cs <- P.many (stringChar opener)
P.char opener
return cs
jsonString = apply JDString string
kvp :: P.Parser (String, JsonData)
kvp = do
s <- string
ws
P.char ':'
ws
v <- jsonValue
return (s, v)
lexeme :: P.Parser a -> P.Parser a
lexeme p = do
r <- p
ws
return r
jsonArray :: P.Parser JsonData
jsonArray = do
P.char '['
ws
vs <- P.sepBy (lexeme jsonValue) (P.char ',' >> ws)
ws
P.char ']'
return $ JDArray vs
object :: P.Parser JsonData
object = do
P.char '{'
ws
kvps <- P.sepBy (lexeme kvp) (P.char ',' >> ws)
ws
P.char '}'
return $ JDObject $ M.fromList kvps
boolean :: P.Parser JsonData
boolean = (P.try (P.string "true") >> return (JDBool True)) <|>
(P.string "false" >> return (JDBool False))
jsonNull :: P.Parser JsonData
jsonNull = P.string "null" >> return JDNull
jsonValue = number <|> jsonString <|> jsonArray <|> object <|> boolean <|> jsonNull
--
-- Some other utilities.
--
-- | Converts a JSON String (interpreted as a true unicode string) to
-- a value of the type given by the first (dummy) argument.
fromJsonString :: FromJson a => a -> String -> Either String a
fromJsonString dummy s =
case parseJsonString s of
Left e -> Left (show e)
Right js ->
case fromJson dummy js of
Left e -> Left e
Right js -> Right js
-- | Converts a JSON ByteString (with unicode encoding automatically detected)
-- to a value of the type given by the first (dummy) argument.
fromJsonByteString :: FromJson a => a -> B.ByteString -> Either String a
fromJsonByteString dummy s =
case parseJsonByteString s of
Left e -> Left (show e)
Right js ->
case fromJson dummy js of
Left e -> Left e
Right js -> Right js
-- | Converts a value to an ASCII-only JSON String.
toJsonString :: ToJson a => a -> String
toJsonString = show . toJson
--
-- A couple of utility functions.
--
-- | Converts the first character of a string to upper case.
firstCharToUpper :: String -> String
firstCharToUpper "" = ""
firstCharToUpper (c:cs) = (toUpper c) : cs
-- | Converts the first character of a string to lower case.
firstCharToLower :: String -> String
firstCharToLower "" = ""
firstCharToLower (c:cs) = (toLower c) : cs
isLeft :: Either a b -> Bool
isLeft (Left _) = True
isLeft _ = False
isRight :: Either a b -> Bool
isRight (Right _) = True
isRight _ = False
fromLeft :: Either a b -> a
fromLeft (Left x) = x
fromRight :: Either a b -> b
fromRight (Right x) = x
cond :: (a -> Bool) -> (a -> b) -> (a -> b) -> a -> b
cond p th el a = if p a then th a else el a
|
addrummond/RJson
|
Text/RJson.hs
|
Haskell
|
bsd-3-clause
| 31,901
|
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
module Types
( Digit
, zero
, one
, two
, three
, four
, five
, six
, seven
, eight
, nine
, Account
, fromList
, Verified
, verify
, verified
) where
newtype Digit = Digit { unDigit :: Int } deriving (Enum, Eq, Ord)
instance Show Digit where
show = show . unDigit
zero, one, two, three, four, five, six, seven, eight, nine :: Digit
zero = Digit 0
one = Digit 1
two = Digit 2
three = Digit 3
four = Digit 4
five = Digit 5
six = Digit 6
seven = Digit 7
eight = Digit 8
nine = Digit 9
newtype Account = Account { account :: [Digit] } deriving (Eq)
instance Show Account where
show = concatMap show . account
fromList :: [Digit] -> Maybe Account
fromList ds | length ds == 9 = Just $ Account ds
| otherwise = Nothing
newtype Verified = Verified { verified :: Account } deriving (Eq)
instance Show Verified where
show = show . verified
verify :: Account -> Maybe Verified
verify a | isValid a = Just $ Verified a
| otherwise = Nothing
isValid :: Account -> Bool
isValid (Account ds) =
(sum $ zipWith (*) [9,8..1] (map unDigit ds)) `mod` 11 == 0
|
mbeidler/kata-bank-ocr
|
src/Types.hs
|
Haskell
|
bsd-3-clause
| 1,215
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveFunctor #-}
{-# LANGUAGE DeriveFoldable #-}
{-# LANGUAGE DeriveTraversable #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE OverloadedStrings #-}
module Data.SemVer.Range (
-- * Types
RangeOp(..)
, RangeSpec(..)
, Version(..)
, version
, VersionRange
, range
, Identifier(..)
, LatticeSyntax(..)
, (/\)
, (\/)
-- * Parsers
, parseVersion
, parseVersionRange
) where
import Control.Applicative
import Control.DeepSeq
import Control.Monad
import Control.Monad.Trans.State
import Data.Data
import Data.Foldable
import Data.String
import Data.Char
import Data.Traversable
import GHC.Generics
import Prelude hiding (or, all)
import Text.Regex.Applicative as RE
import Text.Regex.Applicative.Common as RE
eitherToMaybe :: Either e a -> Maybe a
eitherToMaybe (Right x) = Just x
eitherToMaybe (Left _) = Nothing
-- Semver
data Version = Version
{ _versionMajor :: !Int
, _versionMinor :: !Int
, _versionPatch :: !Int
, _versionRelease :: [Identifier]
} deriving (Eq, Ord, Show, Typeable, Data, Generic)
-- todo hashable
version :: Int -> Int -> Int -> Version
version x y z = Version x y z []
versionR :: RE Char Version
versionR = uncurry4 Version <$> threeR
parseVersion :: String -> Maybe Version
parseVersion = RE.match versionR
data Identifier = INum !Int
| IStr !String
deriving (Eq, Ord, Show, Data, Typeable, Generic)
-- TODO: hashable
instance IsString Identifier where
fromString str
| all isDigit str = INum $ read str
| otherwise = IStr str
identifiers :: RE Char [Identifier]
identifiers = maybe [] id <$> optional (identifiers')
identifiers' :: RE Char [Identifier]
identifiers' = (:) <$ sym '-' <*> identifier <*> many (sym '.' *> identifier)
identifier :: RE Char Identifier
identifier = INum <$> decimal <|> IStr <$> many (psym (flip Prelude.elem $ '-' : ['a'..'z'] ++ ['A'..'Z'] ++ ['0'..'9']))
---
data RangeOp = ROLT -- ^ @<@
| ROLE -- ^ @<=@
| ROGT -- ^ @>@
| ROGE -- ^ @>=@
| ROEQ -- ^ @=@
deriving (Eq, Ord, Show, Read, Typeable, Data, Generic)
instance NFData RangeOp
data LatticeSyntax a = LVar a
| LBound Bool
| LJoin (LatticeSyntax a) (LatticeSyntax a)
| LMeet (LatticeSyntax a) (LatticeSyntax a)
deriving (Eq, Ord, Read, Show, Functor, Foldable, Traversable, Typeable, Data)
infixr 3 /\
infixr 2 \/
-- | Infix version of 'LMeet'
(/\) :: LatticeSyntax a -> LatticeSyntax a -> LatticeSyntax a
(/\) = LMeet
-- | Infix version of 'LJoin'
(\/) :: LatticeSyntax a -> LatticeSyntax a -> LatticeSyntax a
(\/) = LJoin
instance Applicative LatticeSyntax where
pure = return
(<*>) = ap
instance Monad LatticeSyntax where
return = LVar
LVar x >>= f = f x
LBound b >>= _ = LBound b
LJoin a b >>= f = LJoin (a >>= f) (b >>= f)
LMeet a b >>= f = LMeet (a >>= f) (b >>= f)
freeVars :: LatticeSyntax a -> [a]
freeVars = toList
dual :: LatticeSyntax a -> LatticeSyntax a
dual (LVar v) = LVar v
dual (LBound t) = LBound $ not t
dual (LJoin a b) = LMeet (dual a) (dual b)
dual (LMeet a b) = LJoin (dual a) (dual b)
-- | Test for equivalence.
--
-- >>> equivalent (LMeet (LVar 'a') (LVar 'b')) (LMeet (LVar 'b') (LVar 'a'))
-- True
--
-- >>> equivalent (LVar 'a') (LMeet (LVar 'a') (LVar 'a'))
-- True
--
-- >>> equivalent (LMeet (LVar 'a') (LVar 'b')) (LMeet (LVar 'b') (LVar 'b'))
-- False
equivalent :: Eq a => LatticeSyntax a -> LatticeSyntax a -> Bool
equivalent a b = all (uncurry (==)) . runEval $ p
where p = (,) <$> evalLattice a <*> evalLattice b
-- | Test for preorder.
--
-- @ a ≤ b ⇔ a ∨ b ≡ b ⇔ a ≡ a ∧ b @
--
-- >>> preorder (LVar 'a' `LMeet` LVar 'b') (LVar 'a')
-- True
--
-- >>> preorder (LVar 'a') (LVar 'a' `LMeet` LVar 'b')
-- False
preorder :: Eq a => LatticeSyntax a -> LatticeSyntax a -> Bool
preorder a b = (a `LJoin` b) `equivalent` b
-- | Return `True` if for some variable assigment expression evaluates to `True`.
satisfiable :: Eq a => LatticeSyntax a -> Bool
satisfiable = or . runEval . evalLattice
newtype Eval v a = Eval { unEval :: StateT [(v, Bool)] [] a }
deriving (Functor, Applicative, Alternative, Monad, MonadPlus)
runEval :: Eval v a -> [a]
runEval act = evalStateT (unEval act) []
evalLattice :: Eq v => LatticeSyntax v -> Eval v Bool
evalLattice (LVar v) = guess v
evalLattice (LBound b) = return b
evalLattice (LJoin a b) = evalLattice a ||^ evalLattice b
evalLattice (LMeet a b) = evalLattice a &&^ evalLattice b
guess :: Eq v => v -> Eval v Bool
guess v = Eval $ do
st <- get
let remember b = put ((v, b) : st) >> return b
case lookup v st of
Just b -> return b
Nothing -> remember True <|> remember False
-- From Control.Monad.Extra of extra
-- | Like @if@, but where the test can be monadic.
ifM :: Monad m => m Bool -> m a -> m a -> m a
ifM b t f = do b' <- b; if b' then t else f
-- | The lazy '||' operator lifted to a monad. If the first
-- argument evaluates to 'True' the second argument will not
-- be evaluated.
--
-- > Just True ||^ undefined == Just True
-- > Just False ||^ Just True == Just True
-- > Just False ||^ Just False == Just False
(||^) :: Monad m => m Bool -> m Bool -> m Bool
(||^) a b = ifM a (return True) b
-- | The lazy '&&' operator lifted to a monad. If the first
-- argument evaluates to 'False' the second argument will not
-- be evaluated.
--
-- > Just False &&^ undefined == Just False
-- > Just True &&^ Just True == Just True
-- > Just True &&^ Just False == Just False
(&&^) :: Monad m => m Bool -> m Bool -> m Bool
(&&^) a b = ifM a b (return False)
--
data RangeSpec = RS !RangeOp !Version
deriving (Eq, Ord, Show, Typeable, Data, Generic)
type VersionRange = LatticeSyntax RangeSpec
range :: RangeOp -> Version -> VersionRange
range op v = pure (RS op v)
fullRange :: VersionRange
fullRange = range ROGE (version 0 0 0)
-- Range parser
scalarRangeR :: RE Char VersionRange
scalarRangeR = ge <|> gt <|> lt <|> le <|> eq
where ge = LVar . RS ROGE <$ RE.string ">=" <*> versionR
gt = LVar . RS ROGT <$ RE.string ">" <*> versionR
le = LVar . RS ROLE <$ RE.string "<=" <*> versionR
lt = LVar . RS ROLT <$ RE.string "<" <*> versionR
eq = LVar . RS ROEQ <$> versionR
separatedBy :: (a -> a -> a) -> RE c a -> RE c () -> RE c a
separatedBy f re sep = foldl' f <$> re <*> many (sep *> re)
ws :: RE Char ()
ws = void $ some $ psym isSpace
conR :: RE Char VersionRange
conR = separatedBy (/\) scalarRangeR ws
disR :: RE Char VersionRange
disR = separatedBy (\/) conR (ws *> string "||" *> ws)
starR :: RE Char VersionRange
starR = fullRange <$ string "*"
xRange1R :: RE Char VersionRange
xRange1R = f <$> RE.decimal <* sym '.' <*> RE.decimal <* string ".x"
where f x y = range ROGE (version x y 0) /\ range ROLT (version x (y + 1) 0)
xRange2R :: RE Char VersionRange
xRange2R = f <$> RE.decimal <* string ".x"
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
xRange3R :: RE Char VersionRange
xRange3R = f <$> RE.decimal <* string ".x.x"
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
threeR :: RE Char (Int, Int, Int, [Identifier])
threeR = (,,,) <$> RE.decimal <* sym '.' <*> RE.decimal <* sym '.' <*> RE.decimal <*> identifiers
uncurry4 :: (a -> b -> c -> d -> e) -> (a, b, c, d) -> e
uncurry4 f (a,b,c,d) = f a b c d
{- INLINE uncurry4 -}
twoR :: RE Char (Int, Int)
twoR = (,) <$> RE.decimal <* sym '.' <*> RE.decimal
oneR :: RE Char Int
oneR = RE.decimal
partial1R :: RE Char VersionRange
partial1R = uncurry f <$> twoR
where f x y = range ROGE (version x y 0) /\ range ROLT (version x (y + 1) 0)
partial2R :: RE Char VersionRange
partial2R = f <$> oneR
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
tilde1R :: RE Char VersionRange
tilde1R = uncurry4 f <$ sym '~' <*> threeR
where f 0 0 z i = range ROGE (Version 0 0 z i) /\ range ROLT (version 0 0 (z + 1))
f 0 y z i = range ROGE (Version 0 y z i) /\ range ROLT (version 0 (y + 1) 0)
f x y z i = range ROGE (Version x y z i) /\ range ROLT (version x (y + 1) 0)
tilde2R :: RE Char VersionRange
tilde2R = uncurry f <$ sym '~' <*> twoR
where f x y = range ROGE (version x y 0) /\ range ROLT (version x (y + 1) 0)
tilde3R :: RE Char VersionRange
tilde3R = f <$ sym '~' <*> oneR
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
caret1R :: RE Char VersionRange
caret1R = uncurry4 f <$ sym '^' <*> threeR
where f 0 0 z i = range ROGE (Version 0 0 z i) /\ range ROLT (version 0 0 (z + 1))
f 0 y z i = range ROGE (Version 0 y z i) /\ range ROLT (version 0 (y + 1) 0)
f x y z i = range ROGE (Version x y z i) /\ range ROLT (version (x + 1) 0 0)
caret2R :: RE Char VersionRange
caret2R = uncurry f <$ sym '^' <*> twoR <* optional (string ".x")
where f 0 y = range ROGE (version 0 y 0) /\ range ROLT (version 0 (y + 1) 0)
f x y = range ROGE (version x y 0) /\ range ROLT (version (x + 1) 0 0)
caret3R :: RE Char VersionRange
caret3R = f <$ sym '^' <*> oneR <* string ".x"
where f x = range ROGE (version x 0 0) /\ range ROLT (version (x + 1) 0 0)
choose :: Alternative f => [f a] -> f a
choose = Data.Foldable.foldr (<|>) empty
hyphenR :: RE Char VersionRange
hyphenR = (/\) <$> hyphenLoR <* ws <* sym '-' <* ws <*> hyphenHiR
hyphenLoR :: RE Char VersionRange
hyphenLoR = h1 <|> h2 <|> h3
where h1 = range ROGE <$> versionR
h2 = uncurry (\x y -> range ROGE (version x y 0)) <$> twoR
h3 = (\x -> range ROGE (version x 0 0)) <$> oneR
hyphenHiR :: RE Char VersionRange
hyphenHiR = h1 <|> h2 <|> h3
where h1 = range ROLE <$> versionR
h2 = uncurry (\x y -> range ROLT (version x (y + 1) 0)) <$> twoR
h3 = (\x -> range ROLT (version (x + 1) 0 0)) <$> oneR
advandedRangeR :: RE Char VersionRange
advandedRangeR = choose
[ xRange1R
, xRange2R
, xRange3R
, partial1R
, partial2R
, tilde1R
, tilde2R
, tilde3R
, caret1R
, caret2R
, caret3R
, hyphenR
]
rangeR :: RE Char VersionRange
rangeR = disR <|> starR <|> advandedRangeR <|> pure fullRange
parseVersionRange :: String -> Maybe VersionRange
parseVersionRange = RE.match rangeR
|
phadej/semver-range
|
src/Data/SemVer/Range.hs
|
Haskell
|
bsd-3-clause
| 10,363
|
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE TypeOperators #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE InstanceSigs #-}
{-# LANGUAGE DefaultSignatures #-}
{-# LANGUAGE TypeInType #-}
{-# LANGUAGE OverloadedStrings #-}
module MathFlow.Core where
import GHC.TypeLits
import Data.Singletons
import Data.Singletons.TH
import Data.Promotion.Prelude
-- |IsSubSamp // Subsampling constraint
--
-- * (f :: [Nat]) // strides for subsampling
-- * (m :: [Nat]) // dimensions of original tensor
-- * (n :: [Nat]) // dimensions of subsampled tensor
-- * :: Bool
type family IsSubSamp (f :: [Nat]) (m :: [Nat]) (n :: [Nat]) :: Bool where
IsSubSamp (1:fs) (m:ms) (n:ns) = IsSubSamp fs ms ns
IsSubSamp (f:fs) (m:ms) (n:ns) = ((n * f) :== m) :&& (IsSubSamp fs ms ns)
IsSubSamp '[] '[] '[] = 'True
IsSubSamp _ _ _ = 'False
-- |IsMatMul // A constraint for matrix multiplication
--
-- * (m :: [Nat]) // dimensions of a[..., i, k]
-- * (o :: [Nat]) // dimensions of b[..., k, j]
-- * (n :: [Nat]) // dimensions of output[..., i, j] = sum_k (a[..., i, k] * b[..., k, j]), for all indices i, j.
-- * :: Bool
type family IsMatMul (m :: [Nat]) (o :: [Nat]) (n :: [Nat]) :: Bool where
IsMatMul m o n =
Last n :== Last o :&&
Last m :== Head (Tail (Reverse o)) :&&
(Tail (Reverse n)) :== (Tail (Reverse m)) :&&
(Tail (Tail (Reverse n))) :== (Tail (Tail (Reverse o)))
-- |IsConcat // A constraint for concatination of tensor
--
-- * (m :: [Nat]) // dimensions of a[..., i, ...]
-- * (o :: [Nat]) // dimensions of b[..., k, ...]
-- * (n :: [Nat]) // dimensions of output[..., i+k, ...] = concat (a,b)
-- * :: Bool
type family IsConcat (m :: [Nat]) (o :: [Nat]) (n :: [Nat]) :: Bool where
IsConcat (m:mx) (o:ox) (n:nx) = (m :== o :&& m:== n :|| m + o :== n) :&& IsConcat mx ox nx
IsConcat '[] '[] '[] = 'True
IsConcat _ _ _ = 'False
-- |IsSameProduct // A constraint for reshaping tensor
--
-- * (m :: [Nat]) // dimensions of original tensor
-- * (n :: [Nat]) // dimensions of reshaped tensor
-- * :: Bool
type family IsSameProduct (m :: [Nat]) (n :: [Nat]) :: Bool where
IsSameProduct (m:mx) (n:nx) = m :== n :&& (Product mx :== Product nx)
IsSameProduct mx nx = Product mx :== Product nx
-- |Dependently typed tensor model
--
-- This model includes basic arithmetic operators and tensorflow functions.
data Tensor (n::[Nat]) t a =
(Num t) => TScalar t -- ^ Scalar value
| Tensor a -- ^ Transform a value to dependently typed value
| TAdd (Tensor n t a) (Tensor n t a) -- ^ + of Num
| TSub (Tensor n t a) (Tensor n t a) -- ^ - of Num
| TMul (Tensor n t a) (Tensor n t a) -- ^ * of Num
| TAbs (Tensor n t a) -- ^ abs of Num
| TSign (Tensor n t a) -- ^ signum of Num
| TRep (Tensor (Tail n) t a) -- ^ vector wise operator
| TTr (Tensor (Reverse n) t a) -- ^ tensor tansporse operator
| forall o m. (SingI o,SingI m,SingI n,IsMatMul m o n ~ 'True) => TMatMul (Tensor m t a) (Tensor o t a) -- ^ matrix multiply
| forall o m. (SingI o,SingI m,SingI n,IsConcat m o n ~ 'True) => TConcat (Tensor m t a) (Tensor o t a) -- ^ concat operator
| forall m. (SingI m,IsSameProduct m n ~ 'True) => TReshape (Tensor m t a) -- ^ reshape function
| forall o m.
(SingI o,SingI m,
Last n ~ Last o,
Last m ~ Head (Tail (Reverse o)),
(Tail (Reverse n)) ~ (Tail (Reverse m))
) =>
TConv2d (Tensor m t a) (Tensor o t a) -- ^ conv2d function
| forall f m. (SingI f, SingI m,IsSubSamp f m n ~ 'True) => TMaxPool (Sing f) (Tensor m t a) -- ^ max pool
| TSoftMax (Tensor n t a)
| TReLu (Tensor n t a)
| TNorm (Tensor n t a)
| forall f m. (SingI f,SingI m,IsSubSamp f m n ~ 'True) => TSubSamp (Sing f) (Tensor m t a) -- ^ subsampling function
| forall m t2. TApp (Tensor n t a) (Tensor m t2 a)
| TFunc String (Tensor n t a)
| TSym String
| TArgT String (Tensor n t a)
| TArgS String String
| TArgI String Integer
| TArgF String Float
| TArgD String Double
| forall f. (SingI f) => TArgSing String (Sing (f::[Nat]))
| TLabel String (Tensor n t a) -- ^ When generating code, this label is used.
(<+>) :: forall n t a m t2. (Tensor n t a) -> (Tensor m t2 a) -> (Tensor n t a)
(<+>) = TApp
infixr 4 <+>
instance (Num t) => Num (Tensor n t a) where
(+) = TAdd
(-) = TSub
(*) = TMul
abs = TAbs
signum = TSign
fromInteger = TScalar . fromInteger
-- | get dimension from tensor
--
-- >>> dim (Tensor 1 :: Tensor '[192,10] Float Int)
-- [192,10]
class Dimension a where
dim :: a -> [Integer]
instance (SingI n) => Dimension (Tensor n t a) where
dim t = dim $ ty t
where
ty :: (SingI n) => Tensor n t a -> Sing n
ty _ = sing
instance Dimension (Sing (n::[Nat])) where
dim t = fromSing t
toValue :: forall n t a. Sing (n::[Nat]) -> a -> Tensor n t a
toValue _ a = Tensor a
(%*) :: forall o m n t a. (SingI o,SingI m,SingI n,IsMatMul m o n ~ 'True)
=> Tensor m t a -> Tensor o t a -> Tensor n t a
(%*) a b = TMatMul a b
(<--) :: SingI n => String -> Tensor n t a -> Tensor n t a
(<--) = TLabel
class FromTensor a where
fromTensor :: Tensor n t a -> a
toString :: Tensor n t a -> String
run :: Tensor n t a -> IO (Int,String,String)
|
junjihashimoto/mathflow
|
src/MathFlow/Core.hs
|
Haskell
|
bsd-3-clause
| 5,390
|
{-# LANGUAGE TupleSections #-}
module FilePaths
( historyFilePath
, historyFileName
, lastRunStateFilePath
, lastRunStateFileName
, configFileName
, xdgName
, locateConfig
, xdgSyntaxDir
, syntaxDirName
, Script(..)
, locateScriptPath
, getAllScripts
)
where
import Prelude ()
import Prelude.MH
import Data.Text ( unpack )
import System.Directory ( doesFileExist
, doesDirectoryExist
, getDirectoryContents
, getPermissions
, executable
)
import System.Environment.XDG.BaseDir ( getUserConfigFile
, getAllConfigFiles
, getUserConfigDir
)
import System.FilePath ( (</>), takeBaseName )
xdgName :: String
xdgName = "matterhorn"
historyFileName :: FilePath
historyFileName = "history.txt"
lastRunStateFileName :: Text -> FilePath
lastRunStateFileName teamId = "last_run_state_" ++ unpack teamId ++ ".json"
configFileName :: FilePath
configFileName = "config.ini"
historyFilePath :: IO FilePath
historyFilePath = getUserConfigFile xdgName historyFileName
lastRunStateFilePath :: Text -> IO FilePath
lastRunStateFilePath teamId =
getUserConfigFile xdgName (lastRunStateFileName teamId)
-- | Get the XDG path to the user-specific syntax definition directory.
-- The path does not necessarily exist.
xdgSyntaxDir :: IO FilePath
xdgSyntaxDir = (</> syntaxDirName) <$> getUserConfigDir xdgName
syntaxDirName :: FilePath
syntaxDirName = "syntax"
-- | Find a specified configuration file by looking in all of the
-- supported locations.
locateConfig :: FilePath -> IO (Maybe FilePath)
locateConfig filename = do
xdgLocations <- getAllConfigFiles xdgName filename
let confLocations = ["./" <> filename] ++
xdgLocations ++
["/etc/matterhorn/" <> filename]
results <- forM confLocations $ \fp -> (fp,) <$> doesFileExist fp
return $ listToMaybe $ fst <$> filter snd results
scriptDirName :: FilePath
scriptDirName = "scripts"
data Script
= ScriptPath FilePath
| NonexecScriptPath FilePath
| ScriptNotFound
deriving (Eq, Read, Show)
toScript :: FilePath -> IO (Script)
toScript fp = do
perm <- getPermissions fp
return $ if executable perm
then ScriptPath fp
else NonexecScriptPath fp
isExecutable :: FilePath -> IO Bool
isExecutable fp = do
perm <- getPermissions fp
return (executable perm)
locateScriptPath :: FilePath -> IO Script
locateScriptPath name
| head name == '.' = return ScriptNotFound
| otherwise = do
xdgLocations <- getAllConfigFiles xdgName scriptDirName
let cmdLocations = [ xdgLoc ++ "/" ++ name
| xdgLoc <- xdgLocations
] ++ [ "/etc/matterhorn/scripts/" <> name ]
existingFiles <- filterM doesFileExist cmdLocations
executables <- mapM toScript existingFiles
return $ case executables of
(path:_) -> path
_ -> ScriptNotFound
-- | This returns a list of valid scripts, and a list of non-executable
-- scripts.
getAllScripts :: IO ([FilePath], [FilePath])
getAllScripts = do
xdgLocations <- getAllConfigFiles xdgName scriptDirName
let cmdLocations = xdgLocations ++ ["/etc/matterhorn/scripts"]
let getCommands dir = do
exists <- doesDirectoryExist dir
if exists
then map ((dir ++ "/") ++) `fmap` getDirectoryContents dir
else return []
let isNotHidden f = case f of
('.':_) -> False
[] -> False
_ -> True
allScripts <- concat `fmap` mapM getCommands cmdLocations
execs <- filterM isExecutable allScripts
nonexecs <- filterM (fmap not . isExecutable) allScripts
return ( filter isNotHidden $ map takeBaseName execs
, filter isNotHidden $ map takeBaseName nonexecs
)
|
aisamanra/matterhorn
|
src/FilePaths.hs
|
Haskell
|
bsd-3-clause
| 3,926
|
{-# OPTIONS -fglasgow-exts #-}
module GisServer.Data.S57 () where
import Data.Binary
import Data.Binary.Get
import Data.Bits
import Data.Char
import Data.Maybe
import Data.Tree
import qualified Data.Map as M
import Int
import Data.ByteString.Lazy
import GisServer.Data.Common
import GisServer.Data.ISO8211
data S57Data = UnsignedInt Int
| SignedInt Int
| ExplicitPoint Double
| ImplicitPoint Int
| CharData String
| BitField ByteString
getExplicitPoint :: Int -> Get S57Data
getExplicitPoint n =
do v <- getStringN (lexLevel 0) n
return $ ExplicitPoint $ read v
getImplicitPoint :: Int -> Get S57Data
getImplicitPoint n =
do v <- getIntN n
return $ ImplicitPoint v
getSignedInt n =
do v <- getInt False n
return $ SignedInt v
getUnsignedInt n =
do v <- getInt True n
return $ SignedInt v
getCharData :: LexicalLevel -> Maybe Int -> Get S57Data
getCharData l (Just i) =
do s <- getStringN l i
return $ CharData s
getCharData l Nothing =
do s <- getStringTill l recordTermChar
return $ CharData s
getBitField bits =
let needPad = (bits `mod` 8) /= 0
bytes = bits `div` 8
bytes' = if (needPad) then bytes + 1 else bytes
in do bs <- getLazyByteString $ fromIntegral bytes'
return $ BitField bs
fieldParser :: LogicRecord -> M.Map String (Get S57Data)
fieldParser r =
let keys = M.keys $ dir
dir = lr_directory r
field k = snd $ fromJust $ M.lookup k dir
in M.fromList []
|
alios/gisserver
|
GisServer/Data/S57.hs
|
Haskell
|
bsd-3-clause
| 1,560
|
{-
(c) The GRASP/AQUA Project, Glasgow University, 1992-1998
\section[RnSource]{Main pass of renamer}
-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE ScopedTypeVariables #-}
module RnTypes (
-- Type related stuff
rnHsType, rnLHsType, rnLHsTypes, rnContext,
rnHsKind, rnLHsKind, rnLHsMaybeKind,
rnHsSigType, rnLHsInstType, rnConDeclFields,
newTyVarNameRn, rnLHsTypeWithWildCards,
rnHsSigTypeWithWildCards,
-- Precence related stuff
mkOpAppRn, mkNegAppRn, mkOpFormRn, mkConOpPatRn,
checkPrecMatch, checkSectionPrec,
-- Binding related stuff
warnContextQuantification, warnUnusedForAlls,
bindSigTyVarsFV, bindHsTyVars, rnHsBndrSig,
extractHsTyRdrTyVars, extractHsTysRdrTyVars,
extractRdrKindSigVars, extractDataDefnKindVars,
filterInScope
) where
import {-# SOURCE #-} RnSplice( rnSpliceType )
import DynFlags
import HsSyn
import RnHsDoc ( rnLHsDoc, rnMbLHsDoc )
import RnEnv
import TcRnMonad
import RdrName
import PrelNames
import TysPrim ( funTyConName )
import Name
import SrcLoc
import NameSet
import Util
import BasicTypes ( compareFixity, funTyFixity, negateFixity,
Fixity(..), FixityDirection(..) )
import Outputable
import FastString
import Maybes
import Data.List ( nub, nubBy, deleteFirstsBy )
import Control.Monad ( unless, when )
#if __GLASGOW_HASKELL__ < 709
import Data.Monoid ( mappend, mempty, mconcat )
#endif
#include "HsVersions.h"
{-
These type renamers are in a separate module, rather than in (say) RnSource,
to break several loop.
*********************************************************
* *
\subsection{Renaming types}
* *
*********************************************************
-}
rnHsSigType :: SDoc -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
-- rnHsSigType is used for source-language type signatures,
-- which use *implicit* universal quantification.
rnHsSigType doc_str ty = rnLHsType (TypeSigCtx doc_str) ty
rnLHsInstType :: SDoc -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
-- Rename the type in an instance or standalone deriving decl
rnLHsInstType doc_str ty
= do { (ty', fvs) <- rnLHsType (GenericCtx doc_str) ty
; unless good_inst_ty (addErrAt (getLoc ty) (badInstTy ty))
; return (ty', fvs) }
where
good_inst_ty
| Just (_, _, L _ cls, _) <-
splitLHsInstDeclTy_maybe (flattenTopLevelLHsForAllTy ty)
, isTcOcc (rdrNameOcc cls) = True
| otherwise = False
badInstTy :: LHsType RdrName -> SDoc
badInstTy ty = ptext (sLit "Malformed instance:") <+> ppr ty
{-
rnHsType is here because we call it from loadInstDecl, and I didn't
want a gratuitous knot.
Note [Context quantification]
-----------------------------
Variables in type signatures are implicitly quantified
when (1) they are in a type signature not beginning
with "forall" or (2) in any qualified type T => R.
We are phasing out (2) since it leads to inconsistencies
(Trac #4426):
data A = A (a -> a) is an error
data A = A (Eq a => a -> a) binds "a"
data A = A (Eq a => a -> b) binds "a" and "b"
data A = A (() => a -> b) binds "a" and "b"
f :: forall a. a -> b is an error
f :: forall a. () => a -> b is an error
f :: forall a. a -> (() => b) binds "a" and "b"
The -fwarn-context-quantification flag warns about
this situation. See rnHsTyKi for case HsForAllTy Qualified.
-}
rnLHsTyKi :: Bool -- True <=> renaming a type, False <=> a kind
-> HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsTyKi isType doc (L loc ty)
= setSrcSpan loc $
do { (ty', fvs) <- rnHsTyKi isType doc ty
; return (L loc ty', fvs) }
rnLHsType :: HsDocContext -> LHsType RdrName -> RnM (LHsType Name, FreeVars)
rnLHsType = rnLHsTyKi True
rnLHsKind :: HsDocContext -> LHsKind RdrName -> RnM (LHsKind Name, FreeVars)
rnLHsKind = rnLHsTyKi False
rnLHsMaybeKind :: HsDocContext -> Maybe (LHsKind RdrName)
-> RnM (Maybe (LHsKind Name), FreeVars)
rnLHsMaybeKind _ Nothing
= return (Nothing, emptyFVs)
rnLHsMaybeKind doc (Just kind)
= do { (kind', fvs) <- rnLHsKind doc kind
; return (Just kind', fvs) }
rnHsType :: HsDocContext -> HsType RdrName -> RnM (HsType Name, FreeVars)
rnHsType = rnHsTyKi True
rnHsKind :: HsDocContext -> HsKind RdrName -> RnM (HsKind Name, FreeVars)
rnHsKind = rnHsTyKi False
rnHsTyKi :: Bool -> HsDocContext -> HsType RdrName -> RnM (HsType Name, FreeVars)
rnHsTyKi isType doc ty@HsForAllTy{}
= rnHsTyKiForAll isType doc (flattenTopLevelHsForAllTy ty)
rnHsTyKi isType _ (HsTyVar rdr_name)
= do { name <- rnTyVar isType rdr_name
; return (HsTyVar name, unitFV name) }
-- If we see (forall a . ty), without foralls on, the forall will give
-- a sensible error message, but we don't want to complain about the dot too
-- Hence the jiggery pokery with ty1
rnHsTyKi isType doc ty@(HsOpTy ty1 (wrapper, L loc op) ty2)
= ASSERT( isType ) setSrcSpan loc $
do { ops_ok <- xoptM Opt_TypeOperators
; op' <- if ops_ok
then rnTyVar isType op
else do { addErr (opTyErr op ty)
; return (mkUnboundName op) } -- Avoid double complaint
; let l_op' = L loc op'
; fix <- lookupTyFixityRn l_op'
; (ty1', fvs1) <- rnLHsType doc ty1
; (ty2', fvs2) <- rnLHsType doc ty2
; res_ty <- mkHsOpTyRn (\t1 t2 -> HsOpTy t1 (wrapper, l_op') t2)
op' fix ty1' ty2'
; return (res_ty, (fvs1 `plusFV` fvs2) `addOneFV` op') }
rnHsTyKi isType doc (HsParTy ty)
= do { (ty', fvs) <- rnLHsTyKi isType doc ty
; return (HsParTy ty', fvs) }
rnHsTyKi isType doc (HsBangTy b ty)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; return (HsBangTy b ty', fvs) }
rnHsTyKi _ doc ty@(HsRecTy flds)
= do { addErr (hang (ptext (sLit "Record syntax is illegal here:"))
2 (ppr ty))
; (flds', fvs) <- rnConDeclFields doc flds
; return (HsRecTy flds', fvs) }
rnHsTyKi isType doc (HsFunTy ty1 ty2)
= do { (ty1', fvs1) <- rnLHsTyKi isType doc ty1
-- Might find a for-all as the arg of a function type
; (ty2', fvs2) <- rnLHsTyKi isType doc ty2
-- Or as the result. This happens when reading Prelude.hi
-- when we find return :: forall m. Monad m -> forall a. a -> m a
-- Check for fixity rearrangements
; res_ty <- if isType
then mkHsOpTyRn HsFunTy funTyConName funTyFixity ty1' ty2'
else return (HsFunTy ty1' ty2')
; return (res_ty, fvs1 `plusFV` fvs2) }
rnHsTyKi isType doc listTy@(HsListTy ty)
= do { data_kinds <- xoptM Opt_DataKinds
; unless (data_kinds || isType) (addErr (dataKindsErr isType listTy))
; (ty', fvs) <- rnLHsTyKi isType doc ty
; return (HsListTy ty', fvs) }
rnHsTyKi isType doc (HsKindSig ty k)
= ASSERT( isType )
do { kind_sigs_ok <- xoptM Opt_KindSignatures
; unless kind_sigs_ok (badSigErr False doc ty)
; (ty', fvs1) <- rnLHsType doc ty
; (k', fvs2) <- rnLHsKind doc k
; return (HsKindSig ty' k', fvs1 `plusFV` fvs2) }
rnHsTyKi isType doc (HsPArrTy ty)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; return (HsPArrTy ty', fvs) }
-- Unboxed tuples are allowed to have poly-typed arguments. These
-- sometimes crop up as a result of CPR worker-wrappering dictionaries.
rnHsTyKi isType doc tupleTy@(HsTupleTy tup_con tys)
= do { data_kinds <- xoptM Opt_DataKinds
; unless (data_kinds || isType) (addErr (dataKindsErr isType tupleTy))
; (tys', fvs) <- mapFvRn (rnLHsTyKi isType doc) tys
; return (HsTupleTy tup_con tys', fvs) }
-- Ensure that a type-level integer is nonnegative (#8306, #8412)
rnHsTyKi isType _ tyLit@(HsTyLit t)
= do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds (addErr (dataKindsErr isType tyLit))
; when (negLit t) (addErr negLitErr)
; return (HsTyLit t, emptyFVs) }
where
negLit (HsStrTy _ _) = False
negLit (HsNumTy _ i) = i < 0
negLitErr = ptext (sLit "Illegal literal in type (type literals must not be negative):") <+> ppr tyLit
rnHsTyKi isType doc (HsAppTy ty1 ty2)
= do { (ty1', fvs1) <- rnLHsTyKi isType doc ty1
; (ty2', fvs2) <- rnLHsTyKi isType doc ty2
; return (HsAppTy ty1' ty2', fvs1 `plusFV` fvs2) }
rnHsTyKi isType doc (HsIParamTy n ty)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; return (HsIParamTy n ty', fvs) }
rnHsTyKi isType doc (HsEqTy ty1 ty2)
= ASSERT( isType )
do { (ty1', fvs1) <- rnLHsType doc ty1
; (ty2', fvs2) <- rnLHsType doc ty2
; return (HsEqTy ty1' ty2', fvs1 `plusFV` fvs2) }
rnHsTyKi isType _ (HsSpliceTy sp k)
= ASSERT( isType )
rnSpliceType sp k
rnHsTyKi isType doc (HsDocTy ty haddock_doc)
= ASSERT( isType )
do { (ty', fvs) <- rnLHsType doc ty
; haddock_doc' <- rnLHsDoc haddock_doc
; return (HsDocTy ty' haddock_doc', fvs) }
rnHsTyKi isType _ (HsCoreTy ty)
= ASSERT( isType )
return (HsCoreTy ty, emptyFVs)
-- The emptyFVs probably isn't quite right
-- but I don't think it matters
rnHsTyKi _ _ (HsWrapTy {})
= panic "rnHsTyKi"
rnHsTyKi isType doc ty@(HsExplicitListTy k tys)
= ASSERT( isType )
do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds (addErr (dataKindsErr isType ty))
; (tys', fvs) <- rnLHsTypes doc tys
; return (HsExplicitListTy k tys', fvs) }
rnHsTyKi isType doc ty@(HsExplicitTupleTy kis tys)
= ASSERT( isType )
do { data_kinds <- xoptM Opt_DataKinds
; unless data_kinds (addErr (dataKindsErr isType ty))
; (tys', fvs) <- rnLHsTypes doc tys
; return (HsExplicitTupleTy kis tys', fvs) }
rnHsTyKi isType _doc (HsWildCardTy (AnonWildCard PlaceHolder))
= ASSERT( isType )
do { loc <- getSrcSpanM
; uniq <- newUnique
; let name = mkInternalName uniq (mkTyVarOcc "_") loc
; return (HsWildCardTy (AnonWildCard name), emptyFVs) }
-- emptyFVs: this occurrence does not refer to a
-- binding, so don't treat it as a free variable
rnHsTyKi isType doc (HsWildCardTy (NamedWildCard rdr_name))
= ASSERT( isType )
do { not_in_scope <- isNothing `fmap` lookupOccRn_maybe rdr_name
; when not_in_scope $
-- When the named wild card is not in scope, it means it shouldn't be
-- there in the first place, i.e. rnHsSigTypeWithWildCards wasn't
-- used, so fail.
failWith $ text "Unexpected wild card:" <+> quotes (ppr rdr_name) $$
docOfHsDocContext doc
; name <- rnTyVar isType rdr_name
; return (HsWildCardTy (NamedWildCard name), emptyFVs) }
-- emptyFVs: this occurrence does not refer to a
-- binding, so don't treat it as a free variable
--------------
rnHsTyKiForAll :: Bool -> HsDocContext -> HsType RdrName
-> RnM (HsType Name, FreeVars)
rnHsTyKiForAll isType doc (HsForAllTy Implicit extra _ lctxt@(L _ ctxt) ty)
= ASSERT( isType ) do
-- Implicit quantifiction in source code (no kinds on tyvars)
-- Given the signature C => T we universally quantify
-- over FV(T) \ {in-scope-tyvars}
rdr_env <- getLocalRdrEnv
loc <- getSrcSpanM
let
(forall_kvs, forall_tvs) = filterInScope rdr_env $
extractHsTysRdrTyVars (ty:ctxt)
-- In for-all types we don't bring in scope
-- kind variables mentioned in kind signatures
-- (Well, not yet anyway....)
-- f :: Int -> T (a::k) -- Not allowed
-- The filterInScope is to ensure that we don't quantify over
-- type variables that are in scope; when GlasgowExts is off,
-- there usually won't be any, except for class signatures:
-- class C a where { op :: a -> a }
tyvar_bndrs = userHsTyVarBndrs loc forall_tvs
rnForAll doc Implicit extra forall_kvs (mkHsQTvs tyvar_bndrs) lctxt ty
rnHsTyKiForAll isType doc
fulltype@(HsForAllTy Qualified extra _ lctxt@(L _ ctxt) ty)
= ASSERT( isType ) do
rdr_env <- getLocalRdrEnv
loc <- getSrcSpanM
let
(forall_kvs, forall_tvs) = filterInScope rdr_env $
extractHsTysRdrTyVars (ty:ctxt)
tyvar_bndrs = userHsTyVarBndrs loc forall_tvs
in_type_doc = ptext (sLit "In the type") <+> quotes (ppr fulltype)
-- See Note [Context quantification]
warnContextQuantification (in_type_doc $$ docOfHsDocContext doc) tyvar_bndrs
rnForAll doc Implicit extra forall_kvs (mkHsQTvs tyvar_bndrs) lctxt ty
rnHsTyKiForAll isType doc
ty@(HsForAllTy Explicit extra forall_tyvars lctxt@(L _ ctxt) tau)
= ASSERT( isType ) do { -- Explicit quantification.
-- Check that the forall'd tyvars are actually
-- mentioned in the type, and produce a warning if not
let (kvs, mentioned) = extractHsTysRdrTyVars (tau:ctxt)
in_type_doc = ptext (sLit "In the type") <+> quotes (ppr ty)
; warnUnusedForAlls (in_type_doc $$ docOfHsDocContext doc)
forall_tyvars mentioned
; traceRn (text "rnHsTyKiForAll:Exlicit" <+> vcat
[ppr forall_tyvars, ppr lctxt,ppr tau ])
; rnForAll doc Explicit extra kvs forall_tyvars lctxt tau }
-- The following should never happen but keeps the completeness checker happy
rnHsTyKiForAll isType doc ty = rnHsTyKi isType doc ty
--------------
rnTyVar :: Bool -> RdrName -> RnM Name
rnTyVar is_type rdr_name
| is_type = lookupTypeOccRn rdr_name
| otherwise = lookupKindOccRn rdr_name
--------------
rnLHsTypes :: HsDocContext -> [LHsType RdrName]
-> RnM ([LHsType Name], FreeVars)
rnLHsTypes doc tys = mapFvRn (rnLHsType doc) tys
rnForAll :: HsDocContext -> HsExplicitFlag
-> Maybe SrcSpan -- Location of an extra-constraints wildcard
-> [RdrName] -- Kind variables
-> LHsTyVarBndrs RdrName -- Type variables
-> LHsContext RdrName -> LHsType RdrName
-> RnM (HsType Name, FreeVars)
rnForAll doc exp extra kvs forall_tyvars ctxt ty
| null kvs, null (hsQTvBndrs forall_tyvars), null (unLoc ctxt), isNothing extra
= rnHsType doc (unLoc ty)
-- One reason for this case is that a type like Int#
-- starts off as (HsForAllTy Implicit Nothing [] Int), in case
-- there is some quantification. Now that we have quantified
-- and discovered there are no type variables, it's nicer to turn
-- it into plain Int. If it were Int# instead of Int, we'd actually
-- get an error, because the body of a genuine for-all is
-- of kind *.
| otherwise
= bindHsTyVars doc Nothing kvs forall_tyvars $ \ new_tyvars ->
do { (new_ctxt, fvs1) <- rnContext doc ctxt
; (new_ty, fvs2) <- rnLHsType doc ty
; return (HsForAllTy exp extra new_tyvars new_ctxt new_ty, fvs1 `plusFV` fvs2) }
-- Retain the same implicit/explicit flag as before
-- so that we can later print it correctly
---------------
bindSigTyVarsFV :: [Name]
-> RnM (a, FreeVars)
-> RnM (a, FreeVars)
-- Used just before renaming the defn of a function
-- with a separate type signature, to bring its tyvars into scope
-- With no -XScopedTypeVariables, this is a no-op
bindSigTyVarsFV tvs thing_inside
= do { scoped_tyvars <- xoptM Opt_ScopedTypeVariables
; if not scoped_tyvars then
thing_inside
else
bindLocalNamesFV tvs thing_inside }
---------------
bindHsTyVars :: HsDocContext
-> Maybe a -- Just _ => an associated type decl
-> [RdrName] -- Kind variables from scope
-> LHsTyVarBndrs RdrName -- Type variables
-> (LHsTyVarBndrs Name -> RnM (b, FreeVars))
-> RnM (b, FreeVars)
-- (a) Bring kind variables into scope
-- both (i) passed in (kv_bndrs)
-- and (ii) mentioned in the kinds of tv_bndrs
-- (b) Bring type variables into scope
bindHsTyVars doc mb_assoc kv_bndrs tv_bndrs thing_inside
= do { rdr_env <- getLocalRdrEnv
; let tvs = hsQTvBndrs tv_bndrs
kvs_from_tv_bndrs = [ kv | L _ (KindedTyVar _ kind) <- tvs
, let (_, kvs) = extractHsTyRdrTyVars kind
, kv <- kvs ]
all_kvs' = nub (kv_bndrs ++ kvs_from_tv_bndrs)
all_kvs = filterOut (`elemLocalRdrEnv` rdr_env) all_kvs'
overlap_kvs = [ kv | kv <- all_kvs, any ((==) kv . hsLTyVarName) tvs ]
-- These variables appear both as kind and type variables
-- in the same declaration; eg type family T (x :: *) (y :: x)
-- We disallow this: too confusing!
; poly_kind <- xoptM Opt_PolyKinds
; unless (poly_kind || null all_kvs)
(addErr (badKindBndrs doc all_kvs))
; unless (null overlap_kvs)
(addErr (overlappingKindVars doc overlap_kvs))
; loc <- getSrcSpanM
; kv_names <- mapM (newLocalBndrRn . L loc) all_kvs
; bindLocalNamesFV kv_names $
do { let tv_names_w_loc = hsLTyVarLocNames tv_bndrs
rn_tv_bndr :: LHsTyVarBndr RdrName -> RnM (LHsTyVarBndr Name, FreeVars)
rn_tv_bndr (L loc (UserTyVar rdr))
= do { nm <- newTyVarNameRn mb_assoc rdr_env loc rdr
; return (L loc (UserTyVar nm), emptyFVs) }
rn_tv_bndr (L loc (KindedTyVar (L lv rdr) kind))
= do { sig_ok <- xoptM Opt_KindSignatures
; unless sig_ok (badSigErr False doc kind)
; nm <- newTyVarNameRn mb_assoc rdr_env loc rdr
; (kind', fvs) <- rnLHsKind doc kind
; return (L loc (KindedTyVar (L lv nm) kind'), fvs) }
-- Check for duplicate or shadowed tyvar bindrs
; checkDupRdrNames tv_names_w_loc
; when (isNothing mb_assoc) (checkShadowedRdrNames tv_names_w_loc)
; (tv_bndrs', fvs1) <- mapFvRn rn_tv_bndr tvs
; (res, fvs2) <- bindLocalNamesFV (map hsLTyVarName tv_bndrs') $
do { inner_rdr_env <- getLocalRdrEnv
; traceRn (text "bhtv" <+> vcat
[ ppr tvs, ppr kv_bndrs, ppr kvs_from_tv_bndrs
, ppr $ map (`elemLocalRdrEnv` rdr_env) all_kvs'
, ppr $ map (getUnique . rdrNameOcc) all_kvs'
, ppr all_kvs, ppr rdr_env, ppr inner_rdr_env ])
; thing_inside (HsQTvs { hsq_tvs = tv_bndrs', hsq_kvs = kv_names }) }
; return (res, fvs1 `plusFV` fvs2) } }
newTyVarNameRn :: Maybe a -> LocalRdrEnv -> SrcSpan -> RdrName -> RnM Name
newTyVarNameRn mb_assoc rdr_env loc rdr
| Just _ <- mb_assoc -- Use the same Name as the parent class decl
, Just n <- lookupLocalRdrEnv rdr_env rdr
= return n
| otherwise
= newLocalBndrRn (L loc rdr)
--------------------------------
rnHsBndrSig :: HsDocContext
-> HsWithBndrs RdrName (LHsType RdrName)
-> (HsWithBndrs Name (LHsType Name) -> RnM (a, FreeVars))
-> RnM (a, FreeVars)
rnHsBndrSig doc (HsWB { hswb_cts = ty@(L loc _) }) thing_inside
= do { sig_ok <- xoptM Opt_ScopedTypeVariables
; unless sig_ok (badSigErr True doc ty)
; let (kv_bndrs, tv_bndrs) = extractHsTyRdrTyVars ty
; name_env <- getLocalRdrEnv
; tv_names <- newLocalBndrsRn [L loc tv | tv <- tv_bndrs
, not (tv `elemLocalRdrEnv` name_env) ]
; kv_names <- newLocalBndrsRn [L loc kv | kv <- kv_bndrs
, not (kv `elemLocalRdrEnv` name_env) ]
; bindLocalNamesFV kv_names $
bindLocalNamesFV tv_names $
do { (ty', fvs1, wcs) <- rnLHsTypeWithWildCards doc ty
; (res, fvs2) <- thing_inside (HsWB { hswb_cts = ty', hswb_kvs = kv_names,
hswb_tvs = tv_names, hswb_wcs = wcs })
; return (res, fvs1 `plusFV` fvs2) } }
overlappingKindVars :: HsDocContext -> [RdrName] -> SDoc
overlappingKindVars doc kvs
= vcat [ ptext (sLit "Kind variable") <> plural kvs <+>
ptext (sLit "also used as type variable") <> plural kvs
<> colon <+> pprQuotedList kvs
, docOfHsDocContext doc ]
badKindBndrs :: HsDocContext -> [RdrName] -> SDoc
badKindBndrs doc kvs
= vcat [ hang (ptext (sLit "Unexpected kind variable") <> plural kvs
<+> pprQuotedList kvs)
2 (ptext (sLit "Perhaps you intended to use PolyKinds"))
, docOfHsDocContext doc ]
badSigErr :: Bool -> HsDocContext -> LHsType RdrName -> TcM ()
badSigErr is_type doc (L loc ty)
= setSrcSpan loc $ addErr $
vcat [ hang (ptext (sLit "Illegal") <+> what
<+> ptext (sLit "signature:") <+> quotes (ppr ty))
2 (ptext (sLit "Perhaps you intended to use") <+> flag)
, docOfHsDocContext doc ]
where
what | is_type = ptext (sLit "type")
| otherwise = ptext (sLit "kind")
flag | is_type = ptext (sLit "ScopedTypeVariables")
| otherwise = ptext (sLit "KindSignatures")
dataKindsErr :: Bool -> HsType RdrName -> SDoc
dataKindsErr is_type thing
= hang (ptext (sLit "Illegal") <+> what <> colon <+> quotes (ppr thing))
2 (ptext (sLit "Perhaps you intended to use DataKinds"))
where
what | is_type = ptext (sLit "type")
| otherwise = ptext (sLit "kind")
--------------------------------
-- | Variant of @rnHsSigType@ that supports wild cards. Also returns the wild
-- cards to bind.
rnHsSigTypeWithWildCards :: SDoc -> LHsType RdrName
-> RnM (LHsType Name, FreeVars, [Name])
rnHsSigTypeWithWildCards doc_str = rnLHsTypeWithWildCards (TypeSigCtx doc_str)
-- | Variant of @rnLHsType@ that supports wild cards. The third element of the
-- tuple consists of the freshly generated names of the anonymous wild cards
-- occurring in the type, as well as the names of the named wild cards in the
-- type that are not yet in scope.
rnLHsTypeWithWildCards :: HsDocContext -> LHsType RdrName
-> RnM (LHsType Name, FreeVars, [Name])
rnLHsTypeWithWildCards doc ty
= do { -- When there is a wild card at the end of the context, remove it and
-- add its location as the extra-constraints wild card in the
-- HsForAllTy.
let ty' = extractExtraCtsWc `fmap` flattenTopLevelLHsForAllTy ty
; checkValidPartialType doc ty'
; rdr_env <- getLocalRdrEnv
-- Filter out named wildcards that are already in scope
; let (_, wcs) = collectWildCards ty'
nwcs = [L loc n | L loc (NamedWildCard n) <- wcs
, not (elemLocalRdrEnv n rdr_env) ]
; bindLocatedLocalsRn nwcs $ \nwcs' -> do {
(ty'', fvs) <- rnLHsType doc ty'
-- Add the anonymous wildcards that have been given names during
-- renaming
; let (_, wcs') = collectWildCards ty''
awcs = filter (isAnonWildCard . unLoc) wcs'
; return (ty'', fvs, nwcs' ++ map (HsSyn.wildCardName . unLoc) awcs) } }
where
extractExtraCtsWc (HsForAllTy flag _ bndrs (L l ctxt) ty)
| Just (ctxt', ct) <- snocView ctxt
, L lx (HsWildCardTy (AnonWildCard _)) <- ignoreParens ct
= HsForAllTy flag (Just lx) bndrs (L l ctxt') ty
extractExtraCtsWc ty = ty
-- | Extract all wild cards from a type. The named and anonymous
-- extra-constraints wild cards are returned separately to be able to give
-- more accurate error messages.
collectWildCards
:: Eq name => LHsType name
-> ([Located (HsWildCardInfo name)], -- extra-constraints wild cards
[Located (HsWildCardInfo name)]) -- wild cards
collectWildCards lty = (nubBy sameWildCard extra, nubBy sameWildCard wcs)
where
(extra, wcs) = go lty
go (L loc ty) = case ty of
HsAppTy ty1 ty2 -> go ty1 `mappend` go ty2
HsFunTy ty1 ty2 -> go ty1 `mappend` go ty2
HsListTy ty -> go ty
HsPArrTy ty -> go ty
HsTupleTy _ tys -> gos tys
HsOpTy ty1 _ ty2 -> go ty1 `mappend` go ty2
HsParTy ty -> go ty
HsIParamTy _ ty -> go ty
HsEqTy ty1 ty2 -> go ty1 `mappend` go ty2
HsKindSig ty kind -> go ty `mappend` go kind
HsDocTy ty _ -> go ty
HsBangTy _ ty -> go ty
HsRecTy flds -> gos $ map (cd_fld_type . unLoc) flds
HsExplicitListTy _ tys -> gos tys
HsExplicitTupleTy _ tys -> gos tys
HsWrapTy _ ty -> go (L loc ty)
-- Interesting cases
HsWildCardTy wc -> ([], [L loc wc])
HsForAllTy _ _ _ (L _ ctxt) ty -> ctxtWcs `mappend` go ty
where
ctxt' = map ignoreParens ctxt
extraWcs = [L l wc | L l (HsWildCardTy wc) <- ctxt']
(_, wcs) = gos ctxt'
-- Remove extra-constraints wild cards from wcs
ctxtWcs = (extraWcs, deleteFirstsBy sameWildCard
(nubBy sameWildCard wcs) extraWcs)
-- HsQuasiQuoteTy, HsSpliceTy, HsCoreTy, HsTyLit
_ -> mempty
gos = mconcat . map go
-- | Check the validity of a partial type signature. The following things are
-- checked:
--
-- * Named extra-constraints wild cards aren't allowed,
-- e.g. invalid: @(Show a, _x) => a -> String@.
--
-- * There is only one extra-constraints wild card in the context and it must
-- come last, e.g. invalid: @(_, Show a) => a -> String@
-- or @(_, Show a, _) => a -> String@.
--
-- * There should be no unnamed wild cards in the context.
--
-- * An extra-constraints wild card can only occur in the top-level context.
-- This would be invalid: @(Eq a, _) => a -> (Num a, _) => a -> Bool@.
--
-- * Named wild cards occurring in the context must also occur in the monotype.
--
-- When an invalid wild card is found, we fail with an error.
checkValidPartialType :: HsDocContext -> LHsType RdrName -> RnM ()
checkValidPartialType doc lty
= do { whenNonEmpty isNamedWildCard inExtra $ \(L loc _) ->
failAt loc $ typeDoc $$
text "An extra-constraints wild card cannot be named" $$
docOfHsDocContext doc
; whenNonEmpty isAnonWildCard extraTopLevel $ \(L loc _) ->
failAt loc $ typeDoc $$
-- If there was a valid extra-constraints wild card, it should have
-- already been removed and its location should be stored in the
-- HsForAllTy
(if isJust extra
then text "Only a single extra-constraints wild card is allowed"
else fcat [ text "An extra-constraints wild card must occur"
, text "at the end of the constraints" ]) $$
docOfHsDocContext doc
; whenNonEmpty isAnonWildCard inCtxt $ \(L loc _) ->
failAt loc $ typeDoc $$
text "Anonymous wild cards are not allowed in constraints" $$
docOfHsDocContext doc
; whenNonEmpty isAnonWildCard nestedExtra $ \(L loc _) ->
failAt loc $ typeDoc $$
fcat [ text "An extra-constraints wild card is only allowed"
, text "in the top-level context" ] $$
docOfHsDocContext doc
; whenNonEmpty isNamedWildCard inCtxtNotInTau $ \(L loc name) ->
failAt loc $ typeDoc $$
fcat [ text "The named wild card" <+> quotes (ppr name) <> space
, text "is only allowed in the constraints"
, text "when it also occurs in the rest of the type" ] $$
docOfHsDocContext doc }
where
typeDoc = hang (text "Invalid partial type:") 2 (ppr lty)
(extra, ctxt, tau) = splitPartialType lty
(inExtra, _) = collectWildCards lty
(nestedExtra, inTau) = collectWildCards tau
(_, inCtxt) = mconcat $ map collectWildCards ctxt
inCtxtNotInTau = deleteFirstsBy sameWildCard inCtxt inTau
extraTopLevel = deleteFirstsBy sameWildCard inExtra nestedExtra
splitPartialType (L _ (HsForAllTy _ extra _ (L _ ctxt) ty))
= (extra, map ignoreParens ctxt, ty)
splitPartialType ty = (Nothing, [], ty)
whenNonEmpty test wcs f
= whenIsJust (listToMaybe $ filter (test . unLoc) wcs) f
{-
*********************************************************
* *
\subsection{Contexts and predicates}
* *
*********************************************************
-}
rnConDeclFields :: HsDocContext -> [LConDeclField RdrName]
-> RnM ([LConDeclField Name], FreeVars)
rnConDeclFields doc fields = mapFvRn (rnField doc) fields
rnField :: HsDocContext -> LConDeclField RdrName
-> RnM (LConDeclField Name, FreeVars)
rnField doc (L l (ConDeclField names ty haddock_doc))
= do { new_names <- mapM lookupLocatedTopBndrRn names
; (new_ty, fvs) <- rnLHsType doc ty
; new_haddock_doc <- rnMbLHsDoc haddock_doc
; return (L l (ConDeclField new_names new_ty new_haddock_doc), fvs) }
rnContext :: HsDocContext -> LHsContext RdrName -> RnM (LHsContext Name, FreeVars)
rnContext doc (L loc cxt)
= do { (cxt', fvs) <- rnLHsTypes doc cxt
; return (L loc cxt', fvs) }
{-
************************************************************************
* *
Fixities and precedence parsing
* *
************************************************************************
@mkOpAppRn@ deals with operator fixities. The argument expressions
are assumed to be already correctly arranged. It needs the fixities
recorded in the OpApp nodes, because fixity info applies to the things
the programmer actually wrote, so you can't find it out from the Name.
Furthermore, the second argument is guaranteed not to be another
operator application. Why? Because the parser parses all
operator appications left-associatively, EXCEPT negation, which
we need to handle specially.
Infix types are read in a *right-associative* way, so that
a `op` b `op` c
is always read in as
a `op` (b `op` c)
mkHsOpTyRn rearranges where necessary. The two arguments
have already been renamed and rearranged. It's made rather tiresome
by the presence of ->, which is a separate syntactic construct.
-}
---------------
-- Building (ty1 `op1` (ty21 `op2` ty22))
mkHsOpTyRn :: (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name -> LHsType Name
-> RnM (HsType Name)
mkHsOpTyRn mk1 pp_op1 fix1 ty1 (L loc2 (HsOpTy ty21 (w2, op2) ty22))
= do { fix2 <- lookupTyFixityRn op2
; mk_hs_op_ty mk1 pp_op1 fix1 ty1
(\t1 t2 -> HsOpTy t1 (w2, op2) t2)
(unLoc op2) fix2 ty21 ty22 loc2 }
mkHsOpTyRn mk1 pp_op1 fix1 ty1 (L loc2 (HsFunTy ty21 ty22))
= mk_hs_op_ty mk1 pp_op1 fix1 ty1
HsFunTy funTyConName funTyFixity ty21 ty22 loc2
mkHsOpTyRn mk1 _ _ ty1 ty2 -- Default case, no rearrangment
= return (mk1 ty1 ty2)
---------------
mk_hs_op_ty :: (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name
-> (LHsType Name -> LHsType Name -> HsType Name)
-> Name -> Fixity -> LHsType Name -> LHsType Name -> SrcSpan
-> RnM (HsType Name)
mk_hs_op_ty mk1 op1 fix1 ty1
mk2 op2 fix2 ty21 ty22 loc2
| nofix_error = do { precParseErr (op1,fix1) (op2,fix2)
; return (mk1 ty1 (L loc2 (mk2 ty21 ty22))) }
| associate_right = return (mk1 ty1 (L loc2 (mk2 ty21 ty22)))
| otherwise = do { -- Rearrange to ((ty1 `op1` ty21) `op2` ty22)
new_ty <- mkHsOpTyRn mk1 op1 fix1 ty1 ty21
; return (mk2 (noLoc new_ty) ty22) }
where
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
mkOpAppRn :: LHsExpr Name -- Left operand; already rearranged
-> LHsExpr Name -> Fixity -- Operator and fixity
-> LHsExpr Name -- Right operand (not an OpApp, but might
-- be a NegApp)
-> RnM (HsExpr Name)
-- (e11 `op1` e12) `op2` e2
mkOpAppRn e1@(L _ (OpApp e11 op1 fix1 e12)) op2 fix2 e2
| nofix_error
= do precParseErr (get_op op1,fix1) (get_op op2,fix2)
return (OpApp e1 op2 fix2 e2)
| associate_right = do
new_e <- mkOpAppRn e12 op2 fix2 e2
return (OpApp e11 op1 fix1 (L loc' new_e))
where
loc'= combineLocs e12 e2
(nofix_error, associate_right) = compareFixity fix1 fix2
---------------------------
-- (- neg_arg) `op` e2
mkOpAppRn e1@(L _ (NegApp neg_arg neg_name)) op2 fix2 e2
| nofix_error
= do precParseErr (negateName,negateFixity) (get_op op2,fix2)
return (OpApp e1 op2 fix2 e2)
| associate_right
= do new_e <- mkOpAppRn neg_arg op2 fix2 e2
return (NegApp (L loc' new_e) neg_name)
where
loc' = combineLocs neg_arg e2
(nofix_error, associate_right) = compareFixity negateFixity fix2
---------------------------
-- e1 `op` - neg_arg
mkOpAppRn e1 op1 fix1 e2@(L _ (NegApp _ _)) -- NegApp can occur on the right
| not associate_right -- We *want* right association
= do precParseErr (get_op op1, fix1) (negateName, negateFixity)
return (OpApp e1 op1 fix1 e2)
where
(_, associate_right) = compareFixity fix1 negateFixity
---------------------------
-- Default case
mkOpAppRn e1 op fix e2 -- Default case, no rearrangment
= ASSERT2( right_op_ok fix (unLoc e2),
ppr e1 $$ text "---" $$ ppr op $$ text "---" $$ ppr fix $$ text "---" $$ ppr e2
)
return (OpApp e1 op fix e2)
----------------------------
get_op :: LHsExpr Name -> Name
-- An unbound name could be either HsVar or HsUnboundVra
-- See RnExpr.rnUnboundVar
get_op (L _ (HsVar n)) = n
get_op (L _ (HsUnboundVar occ)) = mkUnboundName (mkRdrUnqual occ)
get_op other = pprPanic "get_op" (ppr other)
-- Parser left-associates everything, but
-- derived instances may have correctly-associated things to
-- in the right operarand. So we just check that the right operand is OK
right_op_ok :: Fixity -> HsExpr Name -> Bool
right_op_ok fix1 (OpApp _ _ fix2 _)
= not error_please && associate_right
where
(error_please, associate_right) = compareFixity fix1 fix2
right_op_ok _ _
= True
-- Parser initially makes negation bind more tightly than any other operator
-- And "deriving" code should respect this (use HsPar if not)
mkNegAppRn :: LHsExpr id -> SyntaxExpr id -> RnM (HsExpr id)
mkNegAppRn neg_arg neg_name
= ASSERT( not_op_app (unLoc neg_arg) )
return (NegApp neg_arg neg_name)
not_op_app :: HsExpr id -> Bool
not_op_app (OpApp _ _ _ _) = False
not_op_app _ = True
---------------------------
mkOpFormRn :: LHsCmdTop Name -- Left operand; already rearranged
-> LHsExpr Name -> Fixity -- Operator and fixity
-> LHsCmdTop Name -- Right operand (not an infix)
-> RnM (HsCmd Name)
-- (e11 `op1` e12) `op2` e2
mkOpFormRn a1@(L loc (HsCmdTop (L _ (HsCmdArrForm op1 (Just fix1) [a11,a12])) _ _ _))
op2 fix2 a2
| nofix_error
= do precParseErr (get_op op1,fix1) (get_op op2,fix2)
return (HsCmdArrForm op2 (Just fix2) [a1, a2])
| associate_right
= do new_c <- mkOpFormRn a12 op2 fix2 a2
return (HsCmdArrForm op1 (Just fix1)
[a11, L loc (HsCmdTop (L loc new_c)
placeHolderType placeHolderType [])])
-- TODO: locs are wrong
where
(nofix_error, associate_right) = compareFixity fix1 fix2
-- Default case
mkOpFormRn arg1 op fix arg2 -- Default case, no rearrangment
= return (HsCmdArrForm op (Just fix) [arg1, arg2])
--------------------------------------
mkConOpPatRn :: Located Name -> Fixity -> LPat Name -> LPat Name
-> RnM (Pat Name)
mkConOpPatRn op2 fix2 p1@(L loc (ConPatIn op1 (InfixCon p11 p12))) p2
= do { fix1 <- lookupFixityRn (unLoc op1)
; let (nofix_error, associate_right) = compareFixity fix1 fix2
; if nofix_error then do
{ precParseErr (unLoc op1,fix1) (unLoc op2,fix2)
; return (ConPatIn op2 (InfixCon p1 p2)) }
else if associate_right then do
{ new_p <- mkConOpPatRn op2 fix2 p12 p2
; return (ConPatIn op1 (InfixCon p11 (L loc new_p))) } -- XXX loc right?
else return (ConPatIn op2 (InfixCon p1 p2)) }
mkConOpPatRn op _ p1 p2 -- Default case, no rearrangment
= ASSERT( not_op_pat (unLoc p2) )
return (ConPatIn op (InfixCon p1 p2))
not_op_pat :: Pat Name -> Bool
not_op_pat (ConPatIn _ (InfixCon _ _)) = False
not_op_pat _ = True
--------------------------------------
checkPrecMatch :: Name -> MatchGroup Name body -> RnM ()
-- Check precedence of a function binding written infix
-- eg a `op` b `C` c = ...
-- See comments with rnExpr (OpApp ...) about "deriving"
checkPrecMatch op (MG { mg_alts = ms })
= mapM_ check ms
where
check (L _ (Match _ (L l1 p1 : L l2 p2 :_) _ _))
= setSrcSpan (combineSrcSpans l1 l2) $
do checkPrec op p1 False
checkPrec op p2 True
check _ = return ()
-- This can happen. Consider
-- a `op` True = ...
-- op = ...
-- The infix flag comes from the first binding of the group
-- but the second eqn has no args (an error, but not discovered
-- until the type checker). So we don't want to crash on the
-- second eqn.
checkPrec :: Name -> Pat Name -> Bool -> IOEnv (Env TcGblEnv TcLclEnv) ()
checkPrec op (ConPatIn op1 (InfixCon _ _)) right = do
op_fix@(Fixity op_prec op_dir) <- lookupFixityRn op
op1_fix@(Fixity op1_prec op1_dir) <- lookupFixityRn (unLoc op1)
let
inf_ok = op1_prec > op_prec ||
(op1_prec == op_prec &&
(op1_dir == InfixR && op_dir == InfixR && right ||
op1_dir == InfixL && op_dir == InfixL && not right))
info = (op, op_fix)
info1 = (unLoc op1, op1_fix)
(infol, infor) = if right then (info, info1) else (info1, info)
unless inf_ok (precParseErr infol infor)
checkPrec _ _ _
= return ()
-- Check precedence of (arg op) or (op arg) respectively
-- If arg is itself an operator application, then either
-- (a) its precedence must be higher than that of op
-- (b) its precedency & associativity must be the same as that of op
checkSectionPrec :: FixityDirection -> HsExpr RdrName
-> LHsExpr Name -> LHsExpr Name -> RnM ()
checkSectionPrec direction section op arg
= case unLoc arg of
OpApp _ op fix _ -> go_for_it (get_op op) fix
NegApp _ _ -> go_for_it negateName negateFixity
_ -> return ()
where
op_name = get_op op
go_for_it arg_op arg_fix@(Fixity arg_prec assoc) = do
op_fix@(Fixity op_prec _) <- lookupFixityRn op_name
unless (op_prec < arg_prec
|| (op_prec == arg_prec && direction == assoc))
(sectionPrecErr (op_name, op_fix)
(arg_op, arg_fix) section)
-- Precedence-related error messages
precParseErr :: (Name, Fixity) -> (Name, Fixity) -> RnM ()
precParseErr op1@(n1,_) op2@(n2,_)
| isUnboundName n1 || isUnboundName n2
= return () -- Avoid error cascade
| otherwise
= addErr $ hang (ptext (sLit "Precedence parsing error"))
4 (hsep [ptext (sLit "cannot mix"), ppr_opfix op1, ptext (sLit "and"),
ppr_opfix op2,
ptext (sLit "in the same infix expression")])
sectionPrecErr :: (Name, Fixity) -> (Name, Fixity) -> HsExpr RdrName -> RnM ()
sectionPrecErr op@(n1,_) arg_op@(n2,_) section
| isUnboundName n1 || isUnboundName n2
= return () -- Avoid error cascade
| otherwise
= addErr $ vcat [ptext (sLit "The operator") <+> ppr_opfix op <+> ptext (sLit "of a section"),
nest 4 (sep [ptext (sLit "must have lower precedence than that of the operand,"),
nest 2 (ptext (sLit "namely") <+> ppr_opfix arg_op)]),
nest 4 (ptext (sLit "in the section:") <+> quotes (ppr section))]
ppr_opfix :: (Name, Fixity) -> SDoc
ppr_opfix (op, fixity) = pp_op <+> brackets (ppr fixity)
where
pp_op | op == negateName = ptext (sLit "prefix `-'")
| otherwise = quotes (ppr op)
{-
*********************************************************
* *
\subsection{Errors}
* *
*********************************************************
-}
warnUnusedForAlls :: SDoc -> LHsTyVarBndrs RdrName -> [RdrName] -> TcM ()
warnUnusedForAlls in_doc bound mentioned_rdrs
= whenWOptM Opt_WarnUnusedMatches $
mapM_ add_warn bound_but_not_used
where
bound_names = hsLTyVarLocNames bound
bound_but_not_used = filterOut ((`elem` mentioned_rdrs) . unLoc) bound_names
add_warn (L loc tv)
= addWarnAt loc $
vcat [ ptext (sLit "Unused quantified type variable") <+> quotes (ppr tv)
, in_doc ]
warnContextQuantification :: SDoc -> [LHsTyVarBndr RdrName] -> TcM ()
warnContextQuantification in_doc tvs
= whenWOptM Opt_WarnContextQuantification $
mapM_ add_warn tvs
where
add_warn (L loc tv)
= addWarnAt loc $
vcat [ ptext (sLit "Variable") <+> quotes (ppr tv) <+>
ptext (sLit "is implicitly quantified due to a context") $$
ptext (sLit "Use explicit forall syntax instead.") $$
ptext (sLit "This will become an error in GHC 7.12.")
, in_doc ]
opTyErr :: RdrName -> HsType RdrName -> SDoc
opTyErr op ty@(HsOpTy ty1 _ _)
= hang (ptext (sLit "Illegal operator") <+> quotes (ppr op) <+> ptext (sLit "in type") <+> quotes (ppr ty))
2 extra
where
extra | op == dot_tv_RDR && forall_head ty1
= perhapsForallMsg
| otherwise
= ptext (sLit "Use TypeOperators to allow operators in types")
forall_head (L _ (HsTyVar tv)) = tv == forall_tv_RDR
forall_head (L _ (HsAppTy ty _)) = forall_head ty
forall_head _other = False
opTyErr _ ty = pprPanic "opTyErr: Not an op" (ppr ty)
{-
************************************************************************
* *
Finding the free type variables of a (HsType RdrName)
* *
************************************************************************
Note [Kind and type-variable binders]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In a type signature we may implicitly bind type variable and, more
recently, kind variables. For example:
* f :: a -> a
f = ...
Here we need to find the free type variables of (a -> a),
so that we know what to quantify
* class C (a :: k) where ...
This binds 'k' in ..., as well as 'a'
* f (x :: a -> [a]) = ....
Here we bind 'a' in ....
* f (x :: T a -> T (b :: k)) = ...
Here we bind both 'a' and the kind variable 'k'
* type instance F (T (a :: Maybe k)) = ...a...k...
Here we want to constrain the kind of 'a', and bind 'k'.
In general we want to walk over a type, and find
* Its free type variables
* The free kind variables of any kind signatures in the type
Hence we returns a pair (kind-vars, type vars)
See also Note [HsBSig binder lists] in HsTypes
-}
type FreeKiTyVars = ([RdrName], [RdrName])
filterInScope :: LocalRdrEnv -> FreeKiTyVars -> FreeKiTyVars
filterInScope rdr_env (kvs, tvs)
= (filterOut in_scope kvs, filterOut in_scope tvs)
where
in_scope tv = tv `elemLocalRdrEnv` rdr_env
extractHsTyRdrTyVars :: LHsType RdrName -> FreeKiTyVars
-- extractHsTyRdrNames finds the free (kind, type) variables of a HsType
-- or the free (sort, kind) variables of a HsKind
-- It's used when making the for-alls explicit.
-- See Note [Kind and type-variable binders]
extractHsTyRdrTyVars ty
= case extract_lty ty ([],[]) of
(kvs, tvs) -> (nub kvs, nub tvs)
extractHsTysRdrTyVars :: [LHsType RdrName] -> FreeKiTyVars
-- See Note [Kind and type-variable binders]
extractHsTysRdrTyVars ty
= case extract_ltys ty ([],[]) of
(kvs, tvs) -> (nub kvs, nub tvs)
extractRdrKindSigVars :: Maybe (LHsKind RdrName) -> [RdrName]
extractRdrKindSigVars Nothing = []
extractRdrKindSigVars (Just k) = nub (fst (extract_lkind k ([],[])))
extractDataDefnKindVars :: HsDataDefn RdrName -> [RdrName]
-- Get the scoped kind variables mentioned free in the constructor decls
-- Eg data T a = T1 (S (a :: k) | forall (b::k). T2 (S b)
-- Here k should scope over the whole definition
extractDataDefnKindVars (HsDataDefn { dd_ctxt = ctxt, dd_kindSig = ksig
, dd_cons = cons, dd_derivs = derivs })
= fst $ extract_lctxt ctxt $
extract_mb extract_lkind ksig $
extract_mb (extract_ltys . unLoc) derivs $
foldr (extract_con . unLoc) ([],[]) cons
where
extract_con (ConDecl { con_res = ResTyGADT {} }) acc = acc
extract_con (ConDecl { con_res = ResTyH98, con_qvars = qvs
, con_cxt = ctxt, con_details = details }) acc
= extract_hs_tv_bndrs qvs acc $
extract_lctxt ctxt $
extract_ltys (hsConDeclArgTys details) ([],[])
extract_lctxt :: LHsContext RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_lctxt ctxt = extract_ltys (unLoc ctxt)
extract_ltys :: [LHsType RdrName] -> FreeKiTyVars -> FreeKiTyVars
extract_ltys tys acc = foldr extract_lty acc tys
extract_mb :: (a -> FreeKiTyVars -> FreeKiTyVars) -> Maybe a -> FreeKiTyVars -> FreeKiTyVars
extract_mb _ Nothing acc = acc
extract_mb f (Just x) acc = f x acc
extract_lkind :: LHsType RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_lkind kind (acc_kvs, acc_tvs) = case extract_lty kind ([], acc_kvs) of
(_, res_kvs) -> (res_kvs, acc_tvs)
-- Kinds shouldn't have sort signatures!
extract_lty :: LHsType RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_lty (L _ ty) acc
= case ty of
HsTyVar tv -> extract_tv tv acc
HsBangTy _ ty -> extract_lty ty acc
HsRecTy flds -> foldr (extract_lty . cd_fld_type . unLoc) acc
flds
HsAppTy ty1 ty2 -> extract_lty ty1 (extract_lty ty2 acc)
HsListTy ty -> extract_lty ty acc
HsPArrTy ty -> extract_lty ty acc
HsTupleTy _ tys -> extract_ltys tys acc
HsFunTy ty1 ty2 -> extract_lty ty1 (extract_lty ty2 acc)
HsIParamTy _ ty -> extract_lty ty acc
HsEqTy ty1 ty2 -> extract_lty ty1 (extract_lty ty2 acc)
HsOpTy ty1 (_, (L _ tv)) ty2 -> extract_tv tv (extract_lty ty1 (extract_lty ty2 acc))
HsParTy ty -> extract_lty ty acc
HsCoreTy {} -> acc -- The type is closed
HsSpliceTy {} -> acc -- Type splices mention no type variables
HsDocTy ty _ -> extract_lty ty acc
HsExplicitListTy _ tys -> extract_ltys tys acc
HsExplicitTupleTy _ tys -> extract_ltys tys acc
HsTyLit _ -> acc
HsWrapTy _ _ -> panic "extract_lty"
HsKindSig ty ki -> extract_lty ty (extract_lkind ki acc)
HsForAllTy _ _ tvs cx ty -> extract_hs_tv_bndrs tvs acc $
extract_lctxt cx $
extract_lty ty ([],[])
-- We deal with these separately in rnLHsTypeWithWildCards
HsWildCardTy _ -> acc
extract_hs_tv_bndrs :: LHsTyVarBndrs RdrName -> FreeKiTyVars
-> FreeKiTyVars -> FreeKiTyVars
extract_hs_tv_bndrs (HsQTvs { hsq_tvs = tvs })
(acc_kvs, acc_tvs) -- Note accumulator comes first
(body_kvs, body_tvs)
| null tvs
= (body_kvs ++ acc_kvs, body_tvs ++ acc_tvs)
| otherwise
= (acc_kvs ++ filterOut (`elem` local_kvs) body_kvs,
acc_tvs ++ filterOut (`elem` local_tvs) body_tvs)
where
local_tvs = map hsLTyVarName tvs
(_, local_kvs) = foldr extract_lty ([], []) [k | L _ (KindedTyVar _ k) <- tvs]
-- These kind variables are bound here if not bound further out
extract_tv :: RdrName -> FreeKiTyVars -> FreeKiTyVars
extract_tv tv acc
| isRdrTyVar tv = case acc of (kvs,tvs) -> (kvs, tv : tvs)
| otherwise = acc
|
urbanslug/ghc
|
compiler/rename/RnTypes.hs
|
Haskell
|
bsd-3-clause
| 49,512
|
{-# LANGUAGE AllowAmbiguousTypes #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE DataKinds #-}
{-# OPTIONS_GHC -Wall #-}
module Main where
import Tower.Prelude
import Tower.N
import Test.Tasty (TestName, TestTree, testGroup, defaultMain, localOption)
import Test.Tasty.QuickCheck
import Test.DocTest
-- import Test.QuickCheck
main :: IO ()
main = do
doctest ["src/Tower/Examples.hs"]
defaultMain tests
data LawArity a =
Nonary Bool |
Unary (a -> Bool) |
Binary (a -> a -> Bool) |
Ternary (a -> a -> a -> Bool) |
Ornary (a -> a -> a -> a -> Bool) |
Failiary (a -> Property)
data LawArity2 a b =
Unary2 (a -> Bool) |
Binary2 (a -> b -> Bool) |
Ternary2 (a -> a -> b -> Bool) |
Ternary2' (a -> b -> b -> Bool) |
Failiary2 (a -> Property)
type Law a = (TestName, LawArity a)
type Law2 a b = (TestName, LawArity2 a b)
testLawOf :: (Arbitrary a, Show a) => [a] -> Law a -> TestTree
testLawOf _ (name, Nonary f) = testProperty name f
testLawOf _ (name, Unary f) = testProperty name f
testLawOf _ (name, Binary f) = testProperty name f
testLawOf _ (name, Ternary f) = testProperty name f
testLawOf _ (name, Ornary f) = testProperty name f
testLawOf _ (name, Failiary f) = testProperty name f
testLawOf2 :: (Arbitrary a, Show a, Arbitrary b, Show b) =>
[(a,b)] -> Law2 a b -> TestTree
testLawOf2 _ (name, Unary2 f) = testProperty name f
testLawOf2 _ (name, Binary2 f) = testProperty name f
testLawOf2 _ (name, Ternary2 f) = testProperty name f
testLawOf2 _ (name, Ternary2' f) = testProperty name f
testLawOf2 _ (name, Failiary2 f) = testProperty name f
tests :: TestTree
tests =
testGroup "Tower"
[ testsInt
, testsFloat
, testsBool
, testsVInt
, testsVFloat
, testsMInt
, testsMFloat
, testsNInt
]
testsInt :: TestTree
testsInt = testGroup "Int"
[ testGroup "Additive" $ testLawOf ([]::[Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[Int])
<$> distributionLaws
, testGroup "Integral" $ testLawOf ([]::[Int]) <$>
integralLaws
, testGroup "Signed" $ testLawOf ([]::[Int]) <$>
signedLaws
]
testsFloat :: TestTree
testsFloat = testGroup "Float"
[ testGroup "Additive - Associative Fail" $ testLawOf ([]::[Float]) <$>
additiveLawsFail
, testGroup "Additive Group" $ testLawOf ([]::[Float]) <$>
additiveGroupLaws
, testGroup "Multiplicative - Associative Fail" $
testLawOf ([]::[Float]) <$>
multiplicativeLawsFail
, testGroup "MultiplicativeGroup" $ testLawOf ([]::[Float]) <$>
multiplicativeGroupLaws
, testGroup "Distribution - Fail" $ testLawOf ([]::[Float]) <$>
distributionLawsFail
, testGroup "Signed" $ testLawOf ([]::[Float]) <$>
signedLaws
, testGroup "Bounded Field" $ testLawOf ([]::[Float]) <$>
boundedFieldLaws
, testGroup "Metric" $ testLawOf ([]::[Float]) <$> metricFloatLaws
, testGroup "Quotient Field" $ testLawOf ([]::[Float]) <$>
quotientFieldLaws
, testGroup "Exponential Ring" $ testLawOf ([]::[Float]) <$> expRingLaws
, testGroup "Exponential Field" $ testLawOf ([]::[Float]) <$> expFieldLaws
]
testsBool :: TestTree
testsBool = testGroup "Bool"
[ testGroup "Idempotent" $ testLawOf ([]::[Bool]) <$>
idempotentLaws
, testGroup "Additive" $ testLawOf ([]::[Bool]) <$>
additiveLaws
, testGroup "Multiplicative" $ testLawOf ([]::[Bool]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[Bool])
<$> distributionLaws
]
testsVInt :: TestTree
testsVInt = testGroup "V 6 Int"
[ testGroup "Additive" $ testLawOf ([]::[V 6 Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[V 6 Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[V 6 Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[V 6 Int])
<$> distributionLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(V 6 Int, Int)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(V 6 Int, Int)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $ testLawOf2 ([]::[(V 6 Int, Int)]) <$>
multiplicativeModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[V 6 Int]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[V 6 Int]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ testLawOf ([]::[V 6 Int]) <$>
multiplicativeBasisLaws
]
testsMInt :: TestTree
testsMInt = testGroup "M 4 3 Int"
[ testGroup "Additive" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[M 4 3 Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[M 4 3 Int])
<$> distributionLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(M 4 3 Int, Int)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(M 4 3 Int, Int)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $ testLawOf2 ([]::[(M 4 3 Int, Int)]) <$>
multiplicativeModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[M 4 3 Int]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ testLawOf ([]::[M 4 3 Int]) <$>
multiplicativeBasisLaws
]
testsNInt :: TestTree
testsNInt = testGroup "N [2,3,2] Int"
[ testGroup "Additive" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveLaws
, testGroup "Additive Group" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveGroupLaws
, testGroup "Multiplicative" $ testLawOf ([]::[N [2,3,2] Int]) <$>
multiplicativeLaws
, testGroup "Distribution" $ testLawOf ([]::[N [2,3,2] Int])
<$> distributionLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(N [2,3,2] Int, Int)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(N [2,3,2] Int, Int)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $ testLawOf2 ([]::[(N [2,3,2] Int, Int)]) <$>
multiplicativeModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[N [2,3,2] Int]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ testLawOf ([]::[N [2,3,2] Int]) <$>
multiplicativeBasisLaws
]
testsVFloat :: TestTree
testsVFloat = testGroup "V 6 Float"
[ testGroup "Additive - Associative" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[V 6 Float]) <$>
additiveLawsFail
, testGroup "Additive Group" $
testLawOf ([]::[V 6 Float]) <$>
additiveGroupLaws
, testGroup "Multiplicative - Associative" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[V 6 Float]) <$>
multiplicativeLawsFail
, testGroup "MultiplicativeGroup" $ testLawOf ([]::[V 6 Float]) <$>
multiplicativeGroupLaws
, testGroup "Distribution" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[V 6 Float]) <$>
distributionLawsFail
, testGroup "Signed" $ testLawOf ([]::[V 6 Float]) <$>
signedLaws
, testGroup "Metric" $ testLawOf ([]::[V 6 Float]) <$> metricRepFloatLaws
, testGroup "Exponential Ring" $ testLawOf ([]::[V 6 Float]) <$> expRingRepLaws
, testGroup "Exponential Field" $ testLawOf ([]::[V 6 Float]) <$> expFieldRepLaws
, testGroup "Additive Module" $ localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
additiveModuleLawsFail
, testGroup "Additive Group Module" $ localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
additiveGroupModuleLawsFail
, testGroup "Multiplicative Module" $ localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
multiplicativeModuleLawsFail
, testGroup "Multiplicative Group Module" $
testLawOf2 ([]::[(V 6 Float, Float)]) <$>
multiplicativeGroupModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[V 6 Float]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[V 6 Float]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ localOption (QuickCheckTests 1000) .
testLawOf ([]::[V 6 Float]) <$>
multiplicativeBasisLawsFail
, testGroup "Multiplicative Group Basis" $ testLawOf ([]::[V 6 Float]) <$>
multiplicativeGroupBasisLaws
, testGroup "Banach" $ testLawOf2 ([]::[(V 6 Float, Float)]) <$>
banachLaws
]
testsMFloat :: TestTree
testsMFloat = testGroup "M 4 3 Float"
[ testGroup "Additive - Associative - Failure" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[M 4 3 Float]) <$>
additiveLawsFail
, testGroup "Additive Group" $ testLawOf ([]::[M 4 3 Float]) <$>
additiveGroupLaws
, testGroup "Multiplicative - Associative Failure" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeLawsFail
, testGroup "MultiplicativeGroup" $ testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeGroupLaws
, testGroup "Distribution - Fail" $
localOption (QuickCheckTests 1000) . testLawOf ([]::[M 4 3 Float]) <$>
distributionLawsFail
, testGroup "Signed" $ testLawOf ([]::[M 4 3 Float]) <$>
signedLaws
, testGroup "Metric" $ testLawOf ([]::[M 4 3 Float]) <$> metricRepFloatLaws
, testGroup "Exponential Ring" $ testLawOf ([]::[M 4 3 Float]) <$> expRingRepLaws
, testGroup "Exponential Field" $ testLawOf ([]::[M 4 3 Float]) <$> expFieldRepLaws
, testGroup "Additive Module" $ testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
additiveModuleLaws
, testGroup "Additive Group Module" $ testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
additiveGroupModuleLaws
, testGroup "Multiplicative Module" $
localOption (QuickCheckTests 1000) .
testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
multiplicativeModuleLawsFail
, testGroup "Multiplicative Group Module" $ testLawOf2 ([]::[(M 4 3 Float, Float)]) <$>
multiplicativeGroupModuleLaws
, testGroup "Additive Basis" $ testLawOf ([]::[M 4 3 Float]) <$>
additiveBasisLaws
, testGroup "Additive Group Basis" $ testLawOf ([]::[M 4 3 Float]) <$>
additiveGroupBasisLaws
, testGroup "Multiplicative Basis" $ localOption (QuickCheckTests 1000) .
testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeBasisLawsFail
, testGroup "Multiplicative Group Basis" $ testLawOf ([]::[M 4 3 Float]) <$>
multiplicativeGroupBasisLaws
]
idempotentLaws ::
( Eq a
, Additive a
, Multiplicative a
) => [Law a]
idempotentLaws =
[ ( "idempotent: a + a == a"
, Unary (\a -> a + a == a))
, ( "idempotent: a * a == a"
, Unary (\a -> a * a == a))
]
additiveLaws ::
( Eq a
, Additive a
) => [Law a]
additiveLaws =
[ ( "associative: (a + b) + c = a + (b + c)"
, Ternary (\a b c -> (a + b) + c == a + (b + c)))
, ("left id: zero + a = a", Unary (\a -> zero + a == a))
, ("right id: a + zero = a", Unary (\a -> a + zero == a))
, ("commutative: a + b == b + a", Binary (\a b -> a + b == b + a))
]
additiveLawsApprox ::
( Eq a
, Additive a
, Epsilon a
) => [Law a]
additiveLawsApprox =
[ ( "associative: (a + b) + c ≈ a + (b + c)"
, Ternary (\a b c -> (a + b) + c ≈ a + (b + c)))
, ("left id: zero + a = a", Unary (\a -> zero + a == a))
, ("right id: a + zero = a", Unary (\a -> a + zero == a))
, ("commutative: a + b == b + a", Binary (\a b -> a + b == b + a))
]
additiveLawsFail ::
( Eq a
, Additive a
, Show a
, Arbitrary a
) => [Law a]
additiveLawsFail =
[ ( "associative: (a + b) + c = a + (b + c)"
, Failiary $ expectFailure . (\a b c -> (a + b) + c == a + (b + c)))
, ("left id: zero + a = a", Unary (\a -> zero + a == a))
, ("right id: a + zero = a", Unary (\a -> a + zero == a))
, ("commutative: a + b == b + a", Binary (\a b -> a + b == b + a))
]
additiveGroupLaws ::
( Eq a
, AdditiveGroup a
) => [Law a]
additiveGroupLaws =
[ ("minus: a - a = zero", Unary (\a -> (a - a) == zero))
, ("negate minus: negate a == zero - a", Unary (\a -> negate a == zero - a))
, ("negate cancel: negate a + a == zero", Unary (\a -> negate a + a == zero))
]
multiplicativeLaws ::
( Eq a
, Multiplicative a
) => [Law a]
multiplicativeLaws =
[ ( "associative: (a * b) * c = a * (b * c)"
, Ternary (\a b c -> (a * b) * c == a * (b * c)))
, ("left id: one * a = a", Unary (\a -> one * a == a))
, ("right id: a * one = a", Unary (\a -> a * one == a))
, ("commutative: a * b == b * a", Binary (\a b -> a * b == b * a))
]
multiplicativeLawsApprox ::
( Eq a
, Epsilon a
, Multiplicative a
) => [Law a]
multiplicativeLawsApprox =
[ ("associative: (a * b) * c ≈ a * (b * c)"
, Ternary (\a b c -> (a * b) * c ≈ a * (b * c)))
, ("left id: one * a = a", Unary (\a -> one * a == a))
, ("right id: a * one = a", Unary (\a -> a * one == a))
, ("commutative: a * b == b * a", Binary (\a b -> a * b == b * a))
]
multiplicativeLawsFail ::
( Eq a
, Show a
, Arbitrary a
, Multiplicative a
) => [Law a]
multiplicativeLawsFail =
[ ("associative: (a * b) * c = a * (b * c)"
, Failiary $ expectFailure . (\a b c -> (a * b) * c == a * (b * c)))
, ("left id: one * a = a", Unary (\a -> one * a == a))
, ("right id: a * one = a", Unary (\a -> a * one == a))
, ("commutative: a * b == b * a", Binary (\a b -> a * b == b * a))
]
multiplicativeGroupLaws ::
( Epsilon a
, Eq a
, MultiplicativeGroup a
) => [Law a]
multiplicativeGroupLaws =
[ ( "divide: a == zero || a / a ≈ one", Unary (\a -> a == zero || (a / a) ≈ one))
, ( "recip divide: recip a == one / a", Unary (\a -> recip a == one / a))
, ( "recip left: a == zero || recip a * a ≈ one"
, Unary (\a -> a == zero || recip a * a ≈ one))
, ( "recip right: a == zero || a * recip a ≈ one"
, Unary (\a -> a == zero || a * recip a ≈ one))
]
distributionLaws ::
( Eq a
, Distribution a
) => [Law a]
distributionLaws =
[ ("annihilation: a * zero == zero", Unary (\a -> a `times` zero == zero))
, ("left distributivity: a * (b + c) == a * b + a * c"
, Ternary (\a b c -> a `times` (b + c) == a `times` b + a `times` c))
, ("right distributivity: (a + b) * c == a * c + b * c"
, Ternary (\a b c -> (a + b) `times` c == a `times` c + b `times` c))
]
distributionLawsApprox ::
( Epsilon a
, Eq a
, Distribution a
) => [Law a]
distributionLawsApprox =
[ ("annihilation: a * zero == zero", Unary (\a -> a `times` zero == zero))
, ("left distributivity: a * (b + c) ≈ a * b + a * c"
, Ternary (\a b c -> a `times` (b + c) ≈ a `times` b + a `times` c))
, ("right distributivity: (a + b) * c ≈ a * c + b * c"
, Ternary (\a b c -> (a + b) `times` c ≈ a `times` c + b `times` c))
]
distributionLawsFail ::
( Show a
, Arbitrary a
, Epsilon a
, Eq a
, Distribution a
) => [Law a]
distributionLawsFail =
[ ("annihilation: a * zero == zero", Unary (\a -> a `times` zero == zero))
, ("left distributivity: a * (b + c) = a * b + a * c"
, Failiary $ expectFailure .
(\a b c -> a `times` (b + c) == a `times` b + a `times` c))
, ("right distributivity: (a + b) * c = a * c + b * c"
, Failiary $ expectFailure . (\a b c -> (a + b) `times` c == a `times` c + b `times` c))
]
signedLaws ::
( Eq a
, Signed a
) => [Law a]
signedLaws =
[ ("sign a * abs a == a", Unary (\a -> sign a `times` abs a == a))
]
integralLaws ::
( Eq a
, Integral a
, FromInteger a
, ToInteger a
) => [Law a]
integralLaws =
[ ( "integral divmod: b == zero || b * (a `div` b) + (a `mod` b) == a"
, Binary (\a b -> b == zero || b `times` (a `div` b) + (a `mod` b) == a))
, ( "fromIntegral a = a"
, Unary (\a -> fromIntegral a == a))
]
boundedFieldLaws ::
( Ord a
, BoundedField a
) => [Law a]
boundedFieldLaws =
[ ("infinity laws"
, Unary (\a ->
((one :: Float)/zero + infinity == infinity) &&
(infinity + a == infinity) &&
isNaN ((infinity :: Float) - infinity) &&
isNaN ((infinity :: Float) / infinity) &&
isNaN (nan + a) &&
(zero :: Float)/zero /= nan))
]
prettyPositive :: (Epsilon a, Ord a) => a -> Bool
prettyPositive a = not (nearZero a) && a > zero
kindaPositive :: (Epsilon a, Ord a) => a -> Bool
kindaPositive a = nearZero a || a > zero
metricRepFloatLaws ::
( Representable r
, Foldable r
) => [Law (r Float)]
metricRepFloatLaws =
[ ( "positive"
, Binary (\a b -> distance a b >= (zero::Float)))
, ( "zero if equal"
, Unary (\a -> distance a a == (zero::Float)))
, ( "associative"
, Binary (\a b -> distance a b ≈ (distance b a :: Float)))
, ( "triangle rule - sum of distances > distance"
, Ternary
(\a b c ->
kindaPositive
(distance a c + distance b c - (distance a b :: Float)) &&
kindaPositive
(distance a b + distance b c - (distance a c :: Float)) &&
kindaPositive
(distance a b + distance a c - (distance b c :: Float))))
]
metricFloatLaws ::
(
) => [Law Float]
metricFloatLaws =
[ ( "positive"
, Binary (\a b -> (distance a b :: Float) >= zero))
, ("zero if equal"
, Unary (\a -> (distance a a :: Float) == zero))
, ( "associative"
, Binary (\a b -> (distance a b :: Float) ≈ (distance b a :: Float)))
, ( "triangle rule - sum of distances > distance"
, Ternary (\a b c ->
(abs a > 10.0) ||
(abs b > 10.0) ||
(abs c > 10.0) ||
kindaPositive (distance a c + distance b c - (distance a b :: Float)) &&
kindaPositive (distance a b + distance b c - (distance a c :: Float)) &&
kindaPositive (distance a b + distance a c - (distance b c :: Float))))
]
quotientFieldLaws ::
( Ord a
, Field a
, QuotientField a
, FromInteger a
) => [Law a]
quotientFieldLaws =
[ ("x-1 < floor <= x <= ceiling < x+1"
, Unary (\a ->
((a - one) < fromIntegral (floor a)) &&
(fromIntegral (floor a) <= a) &&
(a <= fromIntegral (ceiling a)) &&
(fromIntegral (ceiling a) < a + one)))
, ("round == floor (x + 1/2)"
, Unary (\a -> round a == floor (a + one/(one+one))
))
]
expRingLaws ::
( ExpRing a
, Epsilon a
, Ord a
) => [Law a]
expRingLaws =
[ ("for +ive b, a != 0,1: a ** logBase a b ≈ b"
, Binary (\a b ->
( not (prettyPositive b) ||
not (nearZero (a - zero)) ||
(a == one) ||
(a == zero && nearZero (logBase a b)) ||
(a ** logBase a b ≈ b))))
]
expRingRepLaws ::
( Representable r
, Foldable r
, ExpRing a
, Epsilon a
, Ord a
) => [Law (r a)]
expRingRepLaws =
[ ("for +ive b, a != 0,1: a ** logBase a b ≈ b"
, Binary (\a b ->
( not (all prettyPositive b) ||
not (all nearZero a) ||
all (==one) a ||
(all (==zero) a && all nearZero (logBase a b)) ||
(a ** logBase a b ≈ b))))
]
expFieldLaws ::
( ExpField a
, Epsilon a
, Fractional a
, Ord a
) => [Law a]
expFieldLaws =
[ ("sqrt . (**2) ≈ id"
, Unary (\a -> not (prettyPositive a) || (a > 10.0) ||
(sqrt . (**(one+one)) $ a) ≈ a &&
((**(one+one)) . sqrt $ a) ≈ a))
, ("log . exp ≈ id"
, Unary (\a -> not (prettyPositive a) || (a > 10.0) ||
(log . exp $ a) ≈ a &&
(exp . log $ a) ≈ a))
]
expFieldRepLaws ::
( Representable r
, Foldable r
, ExpField a
, Epsilon a
, Fractional a
, Ord a
) => [Law (r a)]
expFieldRepLaws =
[ ("sqrt . (**2) ≈ id"
, Unary (\a -> not (all prettyPositive a) || any (>10.0) a ||
(sqrt . (**(one+one)) $ a) ≈ a &&
((**(one+one)) . sqrt $ a) ≈ a))
, ("log . exp ≈ id"
, Unary (\a -> not (all prettyPositive a) || any (>10.0) a ||
(log . exp $ a) ≈ a &&
(exp . log $ a) ≈ a))
]
additiveModuleLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, AdditiveModule r a
) => [Law2 (r a) a]
additiveModuleLaws =
[
("additive module associative: (a + b) .+ c ≈ a + (b .+ c)"
, Ternary2 (\a b c -> (a + b) .+ c ≈ a + (b .+ c)))
, ("additive module commutative: (a + b) .+ c ≈ (a .+ c) + b"
, Ternary2 (\a b c -> (a + b) .+ c ≈ (a .+ c) + b))
, ("additive module unital: a .+ zero == a"
, Unary2 (\a -> a .+ zero == a))
, ("module additive equivalence: a .+ b ≈ b +. a"
, Binary2 (\a b -> a .+ b ≈ b +. a))
]
additiveModuleLawsFail ::
( Eq (r a)
, Show a
, Arbitrary a
, Show (r a)
, Arbitrary (r a)
, Epsilon a
, AdditiveModule r a
) => [Law2 (r a) a]
additiveModuleLawsFail =
[
("additive module associative: (a + b) .+ c == a + (b .+ c)"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .+ c == a + (b .+ c)))
, ("additive module commutative: (a + b) .+ c == (a .+ c) + b"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .+ c == (a .+ c) + b))
, ("additive module unital: a .+ zero == a"
, Unary2 (\a -> a .+ zero == a))
, ("module additive equivalence: a .+ b == b +. a"
, Binary2 (\a b -> a .+ b == b +. a))
]
additiveGroupModuleLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, AdditiveGroupModule r a
) => [Law2 (r a) a]
additiveGroupModuleLaws =
[
("additive group module associative: (a + b) .- c ≈ a + (b .- c)"
, Ternary2 (\a b c -> (a + b) .- c ≈ a + (b .- c)))
, ("additive group module commutative: (a + b) .- c ≈ (a .- c) + b"
, Ternary2 (\a b c -> (a + b) .- c ≈ (a .- c) + b))
, ("additive group module unital: a .- zero == a"
, Unary2 (\a -> a .- zero == a))
, ("additive group module basis unital: a .- zero ≈ pureRep a"
, Binary2 (\a b -> b -. (a-a) ≈ pureRep b))
, ("module additive group equivalence: a .- b ≈ negate b +. a"
, Binary2 (\a b -> a .- b ≈ negate b +. a))
]
additiveGroupModuleLawsFail ::
( Eq (r a)
, Show a
, Arbitrary a
, Show (r a)
, Arbitrary (r a)
, Epsilon a
, Foldable r
, AdditiveGroupModule r a
) => [Law2 (r a) a]
additiveGroupModuleLawsFail =
[
("additive group module associative: (a + b) .- c == a + (b .- c)"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .- c == a + (b .- c)))
, ("additive group module commutative: (a + b) .- c == (a .- c) + b"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .- c == (a .- c) + b))
, ("additive group module unital: a .- zero == a"
, Unary2 (\a -> a .- zero == a))
, ("additive group module basis unital: a .- zero == pureRep a"
, Binary2 (\a b -> b -. (a-a) == pureRep b))
, ("module additive group equivalence: a .- b ≈ negate b +. a"
, Binary2 (\a b -> a .- b ≈ negate b +. a))
]
multiplicativeModuleLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, AdditiveModule r a
, MultiplicativeModule r a
) => [Law2 (r a) a]
multiplicativeModuleLaws =
[ ("multiplicative module associative: (a * b) .* c ≈ a * (b .* c)"
, Ternary2 (\a b c -> (a * b) .* c ≈ a * (b .* c)))
, ("multiplicative module commutative: (a * b) .* c ≈ (a .* c) * b"
, Ternary2 (\a b c -> (a * b) .* c ≈ a * (b .* c)))
, ("multiplicative module unital: a .* one == a"
, Unary2 (\a -> a .* one == a))
, ("module right distribution: (a + b) .* c ≈ (a .* c) + (b .* c)"
, Ternary2 (\a b c -> (a + b) .* c ≈ (a .* c) + (b .* c)))
, ("module left distribution: c *. (a + b) ≈ (c *. a) + (c *. b)"
, Ternary2 (\a b c -> c *. (a + b) ≈ (c *. a) + (c *. b)))
, ("annihilation: a .* zero == zero", Unary2 (\a -> a .* zero == zero))
, ("module multiplicative equivalence: a .* b ≈ b *. a"
, Binary2 (\a b -> a .* b ≈ b *. a))
]
multiplicativeModuleLawsFail ::
( Eq (r a)
, Epsilon a
, Show a
, Arbitrary a
, Show (r a)
, Arbitrary (r a)
, Foldable r
, AdditiveModule r a
, MultiplicativeModule r a
) => [Law2 (r a) a]
multiplicativeModuleLawsFail =
[ ("multiplicative module associative: (a * b) .* c == a * (b .* c)"
, Failiary2 $ expectFailure . (\a b c -> (a * b) .* c == a * (b .* c)))
, ("multiplicative module commutative: (a * b) .* c == (a .* c) * b"
, Failiary2 $ expectFailure . (\a b c -> (a * b) .* c == a * (b .* c)))
, ("multiplicative module unital: a .* one == a"
, Unary2 (\a -> a .* one == a))
, ("module right distribution: (a + b) .* c == (a .* c) + (b .* c)"
, Failiary2 $ expectFailure . (\a b c -> (a + b) .* c == (a .* c) + (b .* c)))
, ("module left distribution: c *. (a + b) == (c *. a) + (c *. b)"
, Failiary2 $ expectFailure . (\a b c -> c *. (a + b) == (c *. a) + (c *. b)))
, ("annihilation: a .* zero == zero", Unary2 (\a -> a .* zero == zero))
, ("module multiplicative equivalence: a .* b ≈ b *. a"
, Binary2 (\a b -> a .* b ≈ b *. a))
]
multiplicativeGroupModuleLaws ::
( Eq (r a)
, Eq a
, Epsilon a
, Foldable r
, MultiplicativeGroupModule r a
) => [Law2 (r a) a]
multiplicativeGroupModuleLaws =
[
("multiplicative group module associative: (a * b) ./ c ≈ a * (b ./ c)"
, Ternary2 (\a b c -> c==zero || (a * b) ./ c ≈ a * (b ./ c)))
, ("multiplicative group module commutative: (a * b) ./ c ≈ (a ./ c) * b"
, Ternary2 (\a b c -> c==zero || (a * b) ./ c ≈ (a ./ c) * b))
, ("multiplicative group module unital: a ./ one == a"
, Unary2 (\a -> nearZero a || a ./ one == a))
, ("multiplicative group module basis unital: a /. one ≈ pureRep a"
, Binary2 (\a b -> a==zero || b /. (a/a) ≈ pureRep b))
, ("module multiplicative group equivalence: a ./ b ≈ recip b *. a"
, Binary2 (\a b -> b==zero || a ./ b ≈ recip b *. a))
]
multiplicativeGroupModuleLawsFail ::
( Eq a
, Show a
, Arbitrary a
, Eq (r a)
, Show (r a)
, Arbitrary (r a)
, Epsilon a
, Foldable r
, MultiplicativeGroupModule r a
) => [Law2 (r a) a]
multiplicativeGroupModuleLawsFail =
[
("multiplicative group module associative: (a * b) ./ c == a * (b ./ c)"
, Failiary2 $ expectFailure .
(\a b c -> c==zero || (a * b) ./ c == a * (b ./ c)))
, ("multiplicative group module commutative: (a * b) ./ c ≈ (a ./ c) * b"
, Ternary2 (\a b c -> c==zero || (a * b) ./ c ≈ (a ./ c) * b))
, ("multiplicative group module unital: a ./ one == a"
, Unary2 (\a -> nearZero a || a ./ one == a))
, ("multiplicative group module basis unital: a /. one ≈ pureRep a"
, Binary2 (\a b -> a==zero || b /. (a/a) ≈ pureRep b))
, ("module multiplicative group equivalence: a ./ b ≈ recip b *. a"
, Binary2 (\a b -> b==zero || a ./ b ≈ recip b *. a))
]
additiveBasisLaws ::
( Eq (r a)
, Foldable r
, Epsilon a
, AdditiveBasis r a
) => [Law (r a)]
additiveBasisLaws =
[ ( "associative: (a .+. b) .+. c ≈ a .+. (b .+. c)"
, Ternary (\a b c -> (a .+. b) .+. c ≈ a .+. (b .+. c)))
, ("left id: zero .+. a = a", Unary (\a -> zero .+. a == a))
, ("right id: a .+. zero = a", Unary (\a -> a .+. zero == a))
, ("commutative: a .+. b == b .+. a", Binary (\a b -> a .+. b == b .+. a))
]
additiveGroupBasisLaws ::
( Eq (r a)
, AdditiveGroupBasis r a
) => [Law (r a)]
additiveGroupBasisLaws =
[ ("minus: a .-. a = pureRep zero", Unary (\a -> (a .-. a) == pureRep zero))
]
multiplicativeBasisLaws ::
( Eq (r a)
, MultiplicativeBasis r a
) => [Law (r a)]
multiplicativeBasisLaws =
[ ("associative: (a .*. b) .*. c == a .*. (b .*. c)"
, Ternary (\a b c -> (a .*. b) .*. c == a .*. (b .*. c)))
, ("left id: one .*. a = a", Unary (\a -> one .*. a == a))
, ("right id: a .*. one = a", Unary (\a -> a .*. one == a))
, ("commutative: a .*. b == b .*. a", Binary (\a b -> a .*. b == b * a))
]
multiplicativeBasisLawsFail ::
( Eq (r a)
, Show (r a)
, Arbitrary (r a)
, MultiplicativeBasis r a
) => [Law (r a)]
multiplicativeBasisLawsFail =
[ ("associative: (a .*. b) .*. c == a .*. (b .*. c)"
, Failiary $ expectFailure . (\a b c -> (a .*. b) .*. c == a .*. (b .*. c)))
, ("left id: one .*. a = a", Unary (\a -> one .*. a == a))
, ("right id: a .*. one = a", Unary (\a -> a .*. one == a))
, ("commutative: a .*. b == b .*. a", Binary (\a b -> a .*. b == b * a))
]
multiplicativeGroupBasisLaws ::
( Eq (r a)
, Epsilon a
, Foldable r
, MultiplicativeGroupBasis r a
) => [Law (r a)]
multiplicativeGroupBasisLaws =
[ ("minus: a ./. a ≈ pureRep one", Unary (\a -> a==pureRep zero || (a ./. a) ≈ pureRep one))
]
banachLaws ::
( Eq (r a)
, Epsilon b
, MultiplicativeGroup b
, Banach r a
, Normed (r a) b
) => [Law2 (r a) b]
banachLaws =
[ -- Banach
( "size (normalize a) ≈ one"
, Binary2 (\a b -> a==pureRep zero || size (normalize a) ≈ (b/b)))
]
|
tonyday567/tower
|
test/test.hs
|
Haskell
|
bsd-3-clause
| 30,551
|
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE NoMonomorphismRestriction #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE UndecidableInstances #-}
-----------------------------------------------------------------------------
-- |
-- Module : Aws.DynamoDb.Core
-- Copyright : Soostone Inc, Chris Allen
-- License : BSD3
--
-- Maintainer : Ozgun Ataman <ozgun.ataman@soostone.com>
-- Stability : experimental
--
-- Shared types and utilities for DyanmoDb functionality.
----------------------------------------------------------------------------
module Aws.DynamoDb.Core
(
-- * Configuration and Regions
Region (..)
, ddbLocal
, ddbUsEast1
, ddbUsWest1
, ddbUsWest2
, ddbEuWest1
, ddbEuCentral1
, ddbApNe1
, ddbApSe1
, ddbApSe2
, ddbSaEast1
, DdbConfiguration (..)
-- * DynamoDB values
, DValue (..)
-- * Converting to/from 'DValue'
, DynVal(..)
, toValue, fromValue
, Bin (..)
-- * Defining new 'DynVal' instances
, DynData(..)
, DynBinary(..), DynNumber(..), DynString(..)
-- * Working with key/value pairs
, Attribute (..)
, parseAttributeJson
, attributeJson
, attributesJson
, attrTuple
, attr
, attrAs
, text, int, double
, PrimaryKey (..)
, hk
, hrk
-- * Working with objects (attribute collections)
, Item
, item
, attributes
, ToDynItem (..)
, FromDynItem (..)
, fromItem
, Parser (..)
, getAttr
, getAttr'
-- * Common types used by operations
, Conditions (..)
, conditionsJson
, expectsJson
, Condition (..)
, conditionJson
, CondOp (..)
, CondMerge (..)
, ConsumedCapacity (..)
, ReturnConsumption (..)
, ItemCollectionMetrics (..)
, ReturnItemCollectionMetrics (..)
, UpdateReturn (..)
, QuerySelect (..)
, querySelectJson
-- * Size estimation
, DynSize (..)
, nullAttr
-- * Responses & Errors
, DdbResponse (..)
, DdbErrCode (..)
, shouldRetry
, DdbError (..)
-- * Internal Helpers
, ddbSignQuery
, AmazonError (..)
, ddbResponseConsumer
, ddbHttp
, ddbHttps
) where
-------------------------------------------------------------------------------
import Control.Applicative
import qualified Control.Exception as C
import Control.Monad
import Control.Monad.Trans
import Control.Monad.Trans.Resource (throwM)
import Crypto.Hash
import Data.Aeson
import qualified Data.Aeson as A
import Data.Aeson.Types (Pair, parseEither)
import qualified Data.Aeson.Types as A
import qualified Data.Attoparsec.ByteString as AttoB (endOfInput)
import qualified Data.Attoparsec.Text as Atto
import Data.Byteable
import qualified Data.ByteString.Base16 as Base16
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Char8 as B
import qualified Data.CaseInsensitive as CI
import Data.Conduit
import Data.Conduit.Attoparsec (sinkParser)
import Data.Default
import Data.Function (on)
import qualified Data.HashMap.Strict as HM
import Data.Int
import Data.IORef
import Data.List
import qualified Data.Map as M
import Data.Maybe
import Data.Monoid
import Data.Proxy
import Data.Scientific
import qualified Data.Serialize as Ser
import qualified Data.Set as S
import Data.String
import Data.Tagged
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import Data.Time
import Data.Typeable
import Data.Word
import qualified Network.HTTP.Conduit as HTTP
import qualified Network.HTTP.Types as HTTP
import Safe
-------------------------------------------------------------------------------
import Aws.Core
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-- | Numeric values stored in DynamoDb. Only used in defining new
-- 'DynVal' instances.
newtype DynNumber = DynNumber { unDynNumber :: Scientific }
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | String values stored in DynamoDb. Only used in defining new
-- 'DynVal' instances.
newtype DynString = DynString { unDynString :: T.Text }
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | Binary values stored in DynamoDb. Only used in defining new
-- 'DynVal' instances.
newtype DynBinary = DynBinary { unDynBinary :: B.ByteString }
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | An internally used closed typeclass for values that have direct
-- DynamoDb representations. Based on AWS API, this is basically
-- numbers, strings and binary blobs.
--
-- This is here so that any 'DynVal' haskell value can automatically
-- be lifted to a list or a 'Set' without any instance code
-- duplication.
--
-- Do not try to create your own instances.
class Ord a => DynData a where
fromData :: a -> DValue
toData :: DValue -> Maybe a
instance DynData DynNumber where
fromData (DynNumber i) = DNum i
toData (DNum i) = Just $ DynNumber i
toData _ = Nothing
instance DynData (S.Set DynNumber) where
fromData set = DNumSet (S.map unDynNumber set)
toData (DNumSet i) = Just $ S.map DynNumber i
toData _ = Nothing
instance DynData DynString where
fromData (DynString i) = DString i
toData (DString i) = Just $ DynString i
toData _ = Nothing
instance DynData (S.Set DynString) where
fromData set = DStringSet (S.map unDynString set)
toData (DStringSet i) = Just $ S.map DynString i
toData _ = Nothing
instance DynData DynBinary where
fromData (DynBinary i) = DBinary i
toData (DBinary i) = Just $ DynBinary i
toData _ = Nothing
instance DynData (S.Set DynBinary) where
fromData set = DBinSet (S.map unDynBinary set)
toData (DBinSet i) = Just $ S.map DynBinary i
toData _ = Nothing
instance DynData DValue where
fromData = id
toData = Just
-------------------------------------------------------------------------------
-- | Class of Haskell types that can be represented as DynamoDb values.
--
-- This is the conversion layer; instantiate this class for your own
-- types and then use the 'toValue' and 'fromValue' combinators to
-- convert in application code.
--
-- Each Haskell type instantiated with this class will map to a
-- DynamoDb-supported type that most naturally represents it.
class DynData (DynRep a) => DynVal a where
-- | Which of the 'DynData' instances does this data type directly
-- map to?
type DynRep a
-- | Convert to representation
toRep :: a -> DynRep a
-- | Convert from representation
fromRep :: DynRep a -> Maybe a
-------------------------------------------------------------------------------
-- | Any singular 'DynVal' can be upgraded to a list.
instance (DynData (DynRep [a]), DynVal a) => DynVal [a] where
type DynRep [a] = S.Set (DynRep a)
fromRep set = mapM fromRep $ S.toList set
toRep as = S.fromList $ map toRep as
-------------------------------------------------------------------------------
-- | Any singular 'DynVal' can be upgraded to a 'Set'.
instance (DynData (DynRep (S.Set a)), DynVal a, Ord a) => DynVal (S.Set a) where
type DynRep (S.Set a) = S.Set (DynRep a)
fromRep set = fmap S.fromList . mapM fromRep $ S.toList set
toRep as = S.map toRep as
instance DynVal DValue where
type DynRep DValue = DValue
fromRep = Just
toRep = id
instance DynVal Int where
type DynRep Int = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int8 where
type DynRep Int8 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int16 where
type DynRep Int16 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int32 where
type DynRep Int32 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Int64 where
type DynRep Int64 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word8 where
type DynRep Word8 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word16 where
type DynRep Word16 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word32 where
type DynRep Word32 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Word64 where
type DynRep Word64 = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal Integer where
type DynRep Integer = DynNumber
fromRep (DynNumber i) = toIntegral i
toRep i = DynNumber (fromIntegral i)
instance DynVal T.Text where
type DynRep T.Text = DynString
fromRep (DynString i) = Just i
toRep i = DynString i
instance DynVal B.ByteString where
type DynRep B.ByteString = DynBinary
fromRep (DynBinary i) = Just i
toRep i = DynBinary i
instance DynVal Double where
type DynRep Double = DynNumber
fromRep (DynNumber i) = Just $ toRealFloat i
toRep i = DynNumber (fromFloatDigits i)
-------------------------------------------------------------------------------
-- | Encoded as number of days
instance DynVal Day where
type DynRep Day = DynNumber
fromRep (DynNumber i) = ModifiedJulianDay <$> (toIntegral i)
toRep (ModifiedJulianDay i) = DynNumber (fromIntegral i)
-------------------------------------------------------------------------------
-- | Losslessly encoded via 'Integer' picoseconds
instance DynVal UTCTime where
type DynRep UTCTime = DynNumber
fromRep num = fromTS <$> fromRep num
toRep x = toRep (toTS x)
-------------------------------------------------------------------------------
pico :: Rational
pico = toRational $ 10 ^ (12 :: Integer)
-------------------------------------------------------------------------------
dayPico :: Integer
dayPico = 86400 * round pico
-------------------------------------------------------------------------------
-- | Convert UTCTime to picoseconds
--
-- TODO: Optimize performance?
toTS :: UTCTime -> Integer
toTS (UTCTime (ModifiedJulianDay i) diff) = i' + diff'
where
diff' = floor (toRational diff * pico)
i' = i * dayPico
-------------------------------------------------------------------------------
-- | Convert picoseconds to UTCTime
--
-- TODO: Optimize performance?
fromTS :: Integer -> UTCTime
fromTS i = UTCTime (ModifiedJulianDay days) diff
where
(days, secs) = i `divMod` dayPico
diff = fromRational ((toRational secs) / pico)
-- | Encoded as 0 and 1.
instance DynVal Bool where
type DynRep Bool = DynNumber
fromRep (DynNumber i) = do
(i' :: Int) <- toIntegral i
case i' of
0 -> return False
1 -> return True
_ -> Nothing
toRep b = DynNumber (if b then 1 else 0)
-- | Type wrapper for binary data to be written to DynamoDB. Wrap any
-- 'Serialize' instance in there and 'DynVal' will know how to
-- automatically handle conversions in binary form.
newtype Bin a = Bin { getBin :: a }
deriving (Eq,Show,Read,Ord,Typeable,Enum)
instance (Ser.Serialize a) => DynVal (Bin a) where
type DynRep (Bin a) = DynBinary
toRep (Bin i) = DynBinary (Ser.encode i)
fromRep (DynBinary i) = either (const Nothing) (Just . Bin) $
Ser.decode i
-------------------------------------------------------------------------------
-- | Encode a Haskell value.
toValue :: DynVal a => a -> DValue
toValue a = fromData $ toRep a
-------------------------------------------------------------------------------
-- | Decode a Haskell value.
fromValue :: DynVal a => DValue -> Maybe a
fromValue d = toData d >>= fromRep
toIntegral :: (Integral a, RealFrac a1) => a1 -> Maybe a
toIntegral sc = Just $ floor sc
-- | Value types natively recognized by DynamoDb. We pretty much
-- exactly reflect the AWS API onto Haskell types.
data DValue
= DNum Scientific
| DString T.Text
| DBinary B.ByteString
-- ^ Binary data will automatically be base64 marshalled.
| DNumSet (S.Set Scientific)
| DStringSet (S.Set T.Text)
| DBinSet (S.Set B.ByteString)
-- ^ Binary data will automatically be base64 marshalled.
deriving (Eq,Show,Read,Ord,Typeable)
instance IsString DValue where
fromString t = DString (T.pack t)
-------------------------------------------------------------------------------
-- | Primary keys consist of either just a Hash key (mandatory) or a
-- hash key and a range key (optional).
data PrimaryKey = PrimaryKey {
pkHash :: Attribute
, pkRange :: Maybe Attribute
} deriving (Read,Show,Ord,Eq,Typeable)
-------------------------------------------------------------------------------
-- | Construct a hash-only primary key.
--
-- >>> hk "user-id" "ABCD"
--
-- >>> hk "user-id" (mkVal 23)
hk :: T.Text -> DValue -> PrimaryKey
hk k v = PrimaryKey (attr k v) Nothing
-------------------------------------------------------------------------------
-- | Construct a hash-and-range primary key.
hrk :: T.Text -- ^ Hash key name
-> DValue -- ^ Hash key value
-> T.Text -- ^ Range key name
-> DValue -- ^ Range key value
-> PrimaryKey
hrk k v k2 v2 = PrimaryKey (attr k v) (Just (attr k2 v2))
instance ToJSON PrimaryKey where
toJSON (PrimaryKey h Nothing) = toJSON h
toJSON (PrimaryKey h (Just r)) =
let Object p1 = toJSON h
Object p2 = toJSON r
in Object (p1 `HM.union` p2)
-- | A key-value pair
data Attribute = Attribute {
attrName :: T.Text
, attrVal :: DValue
} deriving (Read,Show,Ord,Eq,Typeable)
-- | Convert attribute to a tuple representation
attrTuple :: Attribute -> (T.Text, DValue)
attrTuple (Attribute a b) = (a,b)
-- | Convenience function for constructing key-value pairs
attr :: DynVal a => T.Text -> a -> Attribute
attr k v = Attribute k (toValue v)
-- | 'attr' with type witness to help with cases where you're manually
-- supplying values in code.
--
-- >> item [ attrAs text "name" "john" ]
attrAs :: DynVal a => Proxy a -> T.Text -> a -> Attribute
attrAs _ k v = attr k v
-- | Type witness for 'Text'. See 'attrAs'.
text :: Proxy T.Text
text = Proxy
-- | Type witness for 'Integer'. See 'attrAs'.
int :: Proxy Integer
int = Proxy
-- | Type witness for 'Double'. See 'attrAs'.
double :: Proxy Double
double = Proxy
-- | A DynamoDb object is simply a key-value dictionary.
type Item = M.Map T.Text DValue
-------------------------------------------------------------------------------
-- | Pack a list of attributes into an Item.
item :: [Attribute] -> Item
item = M.fromList . map attrTuple
-------------------------------------------------------------------------------
-- | Unpack an 'Item' into a list of attributes.
attributes :: M.Map T.Text DValue -> [Attribute]
attributes = map (\ (k, v) -> Attribute k v) . M.toList
showT :: Show a => a -> T.Text
showT = T.pack . show
instance ToJSON DValue where
toJSON (DNum i) = object ["N" .= showT i]
toJSON (DString i) = object ["S" .= i]
toJSON (DBinary i) = object ["B" .= (T.decodeUtf8 $ Base64.encode i)]
toJSON (DNumSet i) = object ["NS" .= map showT (S.toList i)]
toJSON (DStringSet i) = object ["SS" .= S.toList i]
toJSON (DBinSet i) = object ["BS" .= map (T.decodeUtf8 . Base64.encode) (S.toList i)]
toJSON x = error $ "aws: bug: DynamoDB can't handle " ++ show x
instance FromJSON DValue where
parseJSON o = do
(obj :: [(T.Text, Value)]) <- M.toList `liftM` parseJSON o
case obj of
[("N", numStr)] -> DNum <$> parseScientific numStr
[("S", str)] -> DString <$> parseJSON str
[("B", bin)] -> do
res <- (Base64.decode . T.encodeUtf8) <$> parseJSON bin
either fail (return . DBinary) res
[("NS", s)] -> do xs <- mapM parseScientific =<< parseJSON s
return $ DNumSet $ S.fromList xs
[("SS", s)] -> DStringSet <$> parseJSON s
[("BS", s)] -> do
xs <- mapM (either fail return . Base64.decode . T.encodeUtf8)
=<< parseJSON s
return $ DBinSet $ S.fromList xs
x -> fail $ "aws: unknown dynamodb value: " ++ show x
where
parseScientific (String str) =
case Atto.parseOnly Atto.scientific str of
Left e -> fail ("parseScientific failed: " ++ e)
Right a -> return a
parseScientific (Number n) = return n
parseScientific _ = fail "Unexpected JSON type in parseScientific"
instance ToJSON Attribute where
toJSON a = object $ [attributeJson a]
-------------------------------------------------------------------------------
-- | Parse a JSON object that contains attributes
parseAttributeJson :: Value -> A.Parser [Attribute]
parseAttributeJson (Object v) = mapM conv $ HM.toList v
where
conv (k, o) = Attribute k <$> parseJSON o
parseAttributeJson _ = error "Attribute JSON must be an Object"
-- | Convert into JSON object for AWS.
attributesJson :: [Attribute] -> Value
attributesJson as = object $ map attributeJson as
-- | Convert into JSON pair
attributeJson :: Attribute -> Pair
attributeJson (Attribute nm v) = nm .= v
-------------------------------------------------------------------------------
-- | Errors defined by AWS.
data DdbErrCode
= AccessDeniedException
| ConditionalCheckFailedException
| IncompleteSignatureException
| InvalidSignatureException
| LimitExceededException
| MissingAuthenticationTokenException
| ProvisionedThroughputExceededException
| ResourceInUseException
| ResourceNotFoundException
| ThrottlingException
| ValidationException
| RequestTooLarge
| InternalFailure
| InternalServerError
| ServiceUnavailableException
| SerializationException
-- ^ Raised by AWS when the request JSON is missing fields or is
-- somehow malformed.
deriving (Read,Show,Eq,Typeable)
-------------------------------------------------------------------------------
-- | Whether the action should be retried based on the received error.
shouldRetry :: DdbErrCode -> Bool
shouldRetry e = go e
where
go LimitExceededException = True
go ProvisionedThroughputExceededException = True
go ResourceInUseException = True
go ThrottlingException = True
go InternalFailure = True
go InternalServerError = True
go ServiceUnavailableException = True
go _ = False
-------------------------------------------------------------------------------
-- | Errors related to this library.
data DdbLibraryError
= UnknownDynamoErrCode T.Text
-- ^ A DynamoDB error code we do not know about.
| JsonProtocolError Value T.Text
-- ^ A JSON response we could not parse.
deriving (Show,Eq,Typeable)
-- | Potential errors raised by DynamoDB
data DdbError = DdbError {
ddbStatusCode :: Int
-- ^ 200 if successful, 400 for client errors and 500 for
-- server-side errors.
, ddbErrCode :: DdbErrCode
, ddbErrMsg :: T.Text
} deriving (Show,Eq,Typeable)
instance C.Exception DdbError
instance C.Exception DdbLibraryError
-- | Response metadata that is present in every DynamoDB response.
data DdbResponse = DdbResponse {
ddbrCrc :: Maybe T.Text
, ddbrMsgId :: Maybe T.Text
}
instance Loggable DdbResponse where
toLogText (DdbResponse id2 rid) =
"DynamoDB: request ID=" `mappend`
fromMaybe "<none>" rid `mappend`
", x-amz-id-2=" `mappend`
fromMaybe "<none>" id2
instance Monoid DdbResponse where
mempty = DdbResponse Nothing Nothing
mappend a b = DdbResponse (ddbrCrc a `mplus` ddbrCrc b) (ddbrMsgId a `mplus` ddbrMsgId b)
data Region = Region {
rUri :: B.ByteString
, rName :: B.ByteString
} deriving (Eq,Show,Read,Typeable)
data DdbConfiguration qt = DdbConfiguration {
ddbcRegion :: Region
-- ^ The regional endpoint. Ex: 'ddbUsEast'
, ddbcProtocol :: Protocol
-- ^ 'HTTP' or 'HTTPS'
, ddbcPort :: Maybe Int
-- ^ Port override (mostly for local dev connection)
} deriving (Show,Typeable)
instance Default (DdbConfiguration NormalQuery) where
def = DdbConfiguration ddbUsEast1 HTTPS Nothing
instance DefaultServiceConfiguration (DdbConfiguration NormalQuery) where
defServiceConfig = ddbHttps ddbUsEast1
debugServiceConfig = ddbHttp ddbUsEast1
-------------------------------------------------------------------------------
-- | DynamoDb local connection (for development)
ddbLocal :: Region
ddbLocal = Region "127.0.0.1" "local"
ddbUsEast1 :: Region
ddbUsEast1 = Region "dynamodb.us-east-1.amazonaws.com" "us-east-1"
ddbUsWest1 :: Region
ddbUsWest1 = Region "dynamodb.us-west-1.amazonaws.com" "us-west-1"
ddbUsWest2 :: Region
ddbUsWest2 = Region "dynamodb.us-west-2.amazonaws.com" "us-west-2"
ddbEuWest1 :: Region
ddbEuWest1 = Region "dynamodb.eu-west-1.amazonaws.com" "eu-west-1"
ddbEuCentral1 :: Region
ddbEuCentral1 = Region "dynamodb.eu-central-1.amazonaws.com" "eu-central-1"
ddbApNe1 :: Region
ddbApNe1 = Region "dynamodb.ap-northeast-1.amazonaws.com" "ap-northeast-1"
ddbApSe1 :: Region
ddbApSe1 = Region "dynamodb.ap-southeast-1.amazonaws.com" "ap-southeast-1"
ddbApSe2 :: Region
ddbApSe2 = Region "dynamodb.ap-southeast-2.amazonaws.com" "ap-southeast-2"
ddbSaEast1 :: Region
ddbSaEast1 = Region "dynamodb.sa-east-1.amazonaws.com" "sa-east-1"
ddbHttp :: Region -> DdbConfiguration NormalQuery
ddbHttp endpoint = DdbConfiguration endpoint HTTP Nothing
ddbHttps :: Region -> DdbConfiguration NormalQuery
ddbHttps endpoint = DdbConfiguration endpoint HTTPS Nothing
ddbSignQuery
:: A.ToJSON a
=> B.ByteString
-> a
-> DdbConfiguration qt
-> SignatureData
-> SignedQuery
ddbSignQuery target body di sd
= SignedQuery {
sqMethod = Post
, sqProtocol = ddbcProtocol di
, sqHost = host
, sqPort = fromMaybe (defaultPort (ddbcProtocol di)) (ddbcPort di)
, sqPath = "/"
, sqQuery = []
, sqDate = Just $ signatureTime sd
, sqAuthorization = Just auth
, sqContentType = Just "application/x-amz-json-1.0"
, sqContentMd5 = Nothing
, sqAmzHeaders = amzHeaders ++ maybe [] (\tok -> [("x-amz-security-token",tok)]) (iamToken credentials)
, sqOtherHeaders = []
, sqBody = Just $ HTTP.RequestBodyLBS bodyLBS
, sqStringToSign = canonicalRequest
}
where
credentials = signatureCredentials sd
Region{..} = ddbcRegion di
host = rUri
sigTime = fmtTime "%Y%m%dT%H%M%SZ" $ signatureTime sd
bodyLBS = A.encode body
bodyHash = Base16.encode $ toBytes (hashlazy bodyLBS :: Digest SHA256)
-- for some reason AWS doesn't want the x-amz-security-token in the canonical request
amzHeaders = [ ("x-amz-date", sigTime)
, ("x-amz-target", dyApiVersion <> target)
]
canonicalHeaders = sortBy (compare `on` fst) $ amzHeaders ++
[("host", host),
("content-type", "application/x-amz-json-1.0")]
canonicalRequest = B.concat $ intercalate ["\n"] (
[ ["POST"]
, ["/"]
, [] -- query string
] ++
map (\(a,b) -> [CI.foldedCase a,":",b]) canonicalHeaders ++
[ [] -- end headers
, intersperse ";" (map (CI.foldedCase . fst) canonicalHeaders)
, [bodyHash]
])
auth = authorizationV4 sd HmacSHA256 rName "dynamodb"
"content-type;host;x-amz-date;x-amz-target"
canonicalRequest
data AmazonError = AmazonError {
aeType :: T.Text
, aeMessage :: Maybe T.Text
}
instance FromJSON AmazonError where
parseJSON (Object v) = AmazonError
<$> v .: "__type"
<*> (Just <$> (v .: "message" <|> v .: "Message") <|> pure Nothing)
parseJSON _ = error $ "aws: unexpected AmazonError message"
-------------------------------------------------------------------------------
ddbResponseConsumer :: A.FromJSON a => IORef DdbResponse -> HTTPResponseConsumer a
ddbResponseConsumer ref resp = do
val <- HTTP.responseBody resp $$+- sinkParser (A.json' <* AttoB.endOfInput)
case statusCode of
200 -> rSuccess val
_ -> rError val
where
header = fmap T.decodeUtf8 . flip lookup (HTTP.responseHeaders resp)
amzId = header "x-amzn-RequestId"
amzCrc = header "x-amz-crc32"
meta = DdbResponse amzCrc amzId
tellMeta = liftIO $ tellMetadataRef ref meta
rSuccess val =
case A.fromJSON val of
A.Success a -> return a
A.Error err -> do
tellMeta
throwM $ JsonProtocolError val (T.pack err)
rError val = do
tellMeta
case parseEither parseJSON val of
Left e ->
throwM $ JsonProtocolError val (T.pack e)
Right err'' -> do
let e = T.drop 1 . snd . T.breakOn "#" $ aeType err''
errCode <- readErrCode e
throwM $ DdbError statusCode errCode (fromMaybe "" $ aeMessage err'')
readErrCode txt =
let txt' = T.unpack txt
in case readMay txt' of
Just e -> return $ e
Nothing -> throwM (UnknownDynamoErrCode txt)
HTTP.Status{..} = HTTP.responseStatus resp
-- | Conditions used by mutation operations ('PutItem', 'UpdateItem',
-- etc.). The default 'def' instance is empty (no condition).
data Conditions = Conditions CondMerge [Condition]
deriving (Eq,Show,Read,Ord,Typeable)
instance Default Conditions where
def = Conditions CondAnd []
expectsJson :: Conditions -> [A.Pair]
expectsJson = conditionsJson "Expected"
-- | JSON encoding of conditions parameter in various contexts.
conditionsJson :: T.Text -> Conditions -> [A.Pair]
conditionsJson key (Conditions op es) = b ++ a
where
a = if null es
then []
else [key .= object (map conditionJson es)]
b = if length (take 2 es) > 1
then ["ConditionalOperator" .= String (rendCondOp op) ]
else []
-------------------------------------------------------------------------------
rendCondOp :: CondMerge -> T.Text
rendCondOp CondAnd = "AND"
rendCondOp CondOr = "OR"
-------------------------------------------------------------------------------
-- | How to merge multiple conditions.
data CondMerge = CondAnd | CondOr
deriving (Eq,Show,Read,Ord,Typeable)
-- | A condition used by mutation operations ('PutItem', 'UpdateItem', etc.).
data Condition = Condition {
condAttr :: T.Text
-- ^ Attribute to use as the basis for this conditional
, condOp :: CondOp
-- ^ Operation on the selected attribute
} deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
-- | Conditional operation to perform on a field.
data CondOp
= DEq DValue
| NotEq DValue
| DLE DValue
| DLT DValue
| DGE DValue
| DGT DValue
| NotNull
| IsNull
| Contains DValue
| NotContains DValue
| Begins DValue
| In [DValue]
| Between DValue DValue
deriving (Eq,Show,Read,Ord,Typeable)
-------------------------------------------------------------------------------
getCondValues :: CondOp -> [DValue]
getCondValues c = case c of
DEq v -> [v]
NotEq v -> [v]
DLE v -> [v]
DLT v -> [v]
DGE v -> [v]
DGT v -> [v]
NotNull -> []
IsNull -> []
Contains v -> [v]
NotContains v -> [v]
Begins v -> [v]
In v -> v
Between a b -> [a,b]
-------------------------------------------------------------------------------
renderCondOp :: CondOp -> T.Text
renderCondOp c = case c of
DEq{} -> "EQ"
NotEq{} -> "NE"
DLE{} -> "LE"
DLT{} -> "LT"
DGE{} -> "GE"
DGT{} -> "GT"
NotNull -> "NOT_NULL"
IsNull -> "NULL"
Contains{} -> "CONTAINS"
NotContains{} -> "NOT_CONTAINS"
Begins{} -> "BEGINS_WITH"
In{} -> "IN"
Between{} -> "BETWEEN"
conditionJson :: Condition -> Pair
conditionJson Condition{..} = condAttr .= condOp
instance ToJSON CondOp where
toJSON c = object $ ("ComparisonOperator" .= String (renderCondOp c)) : valueList
where
valueList =
let vs = getCondValues c in
if null vs
then []
else ["AttributeValueList" .= vs]
-------------------------------------------------------------------------------
dyApiVersion :: B.ByteString
dyApiVersion = "DynamoDB_20120810."
-------------------------------------------------------------------------------
-- | The standard response metrics on capacity consumption.
data ConsumedCapacity = ConsumedCapacity {
capacityUnits :: Int64
, capacityGlobalIndex :: [(T.Text, Int64)]
, capacityLocalIndex :: [(T.Text, Int64)]
, capacityTableUnits :: Maybe Int64
, capacityTable :: T.Text
} deriving (Eq,Show,Read,Ord,Typeable)
instance FromJSON ConsumedCapacity where
parseJSON (Object v) = ConsumedCapacity
<$> v .: "CapacityUnits"
<*> (HM.toList <$> v .:? "GlobalSecondaryIndexes" .!= mempty)
<*> (HM.toList <$> v .:? "LocalSecondaryIndexes" .!= mempty)
<*> (v .:? "Table" >>= maybe (return Nothing) (.: "CapacityUnits"))
<*> v .: "TableName"
parseJSON _ = fail "ConsumedCapacity must be an Object."
data ReturnConsumption = RCIndexes | RCTotal | RCNone
deriving (Eq,Show,Read,Ord,Typeable)
instance ToJSON ReturnConsumption where
toJSON RCIndexes = String "INDEXES"
toJSON RCTotal = String "TOTAL"
toJSON RCNone = String "NONE"
instance Default ReturnConsumption where
def = RCNone
data ReturnItemCollectionMetrics = RICMSize | RICMNone
deriving (Eq,Show,Read,Ord,Typeable)
instance ToJSON ReturnItemCollectionMetrics where
toJSON RICMSize = String "SIZE"
toJSON RICMNone = String "NONE"
instance Default ReturnItemCollectionMetrics where
def = RICMNone
data ItemCollectionMetrics = ItemCollectionMetrics {
icmKey :: (T.Text, DValue)
, icmEstimate :: [Double]
} deriving (Eq,Show,Read,Ord,Typeable)
instance FromJSON ItemCollectionMetrics where
parseJSON (Object v) = ItemCollectionMetrics
<$> (do m <- v .: "ItemCollectionKey"
return $ head $ HM.toList m)
<*> v .: "SizeEstimateRangeGB"
parseJSON _ = fail "ItemCollectionMetrics must be an Object."
-------------------------------------------------------------------------------
-- | What to return from the current update operation
data UpdateReturn
= URNone -- ^ Return nothing
| URAllOld -- ^ Return old values
| URUpdatedOld -- ^ Return old values with a newer replacement
| URAllNew -- ^ Return new values
| URUpdatedNew -- ^ Return new values that were replacements
deriving (Eq,Show,Read,Ord,Typeable)
instance ToJSON UpdateReturn where
toJSON URNone = toJSON (String "NONE")
toJSON URAllOld = toJSON (String "ALL_OLD")
toJSON URUpdatedOld = toJSON (String "UPDATED_OLD")
toJSON URAllNew = toJSON (String "ALL_NEW")
toJSON URUpdatedNew = toJSON (String "UPDATED_NEW")
instance Default UpdateReturn where
def = URNone
-------------------------------------------------------------------------------
-- | What to return from a 'Query' or 'Scan' query.
data QuerySelect
= SelectSpecific [T.Text]
-- ^ Only return selected attributes
| SelectCount
-- ^ Return counts instead of attributes
| SelectProjected
-- ^ Return index-projected attributes
| SelectAll
-- ^ Default. Return everything.
deriving (Eq,Show,Read,Ord,Typeable)
instance Default QuerySelect where def = SelectAll
-------------------------------------------------------------------------------
querySelectJson (SelectSpecific as) =
[ "Select" .= String "SPECIFIC_ATTRIBUTES"
, "AttributesToGet" .= as]
querySelectJson SelectCount = ["Select" .= String "COUNT"]
querySelectJson SelectProjected = ["Select" .= String "ALL_PROJECTED_ATTRIBUTES"]
querySelectJson SelectAll = ["Select" .= String "ALL_ATTRIBUTES"]
-------------------------------------------------------------------------------
-- | A class to help predict DynamoDb size of values, attributes and
-- entire items. The result is given in number of bytes.
class DynSize a where
dynSize :: a -> Int
instance DynSize DValue where
dynSize (DNum _) = 8
dynSize (DString a) = T.length a
dynSize (DBinary bs) = T.length . T.decodeUtf8 $ Base64.encode bs
dynSize (DNumSet s) = 8 * S.size s
dynSize (DStringSet s) = sum $ map (dynSize . DString) $ S.toList s
dynSize (DBinSet s) = sum $ map (dynSize . DBinary) $ S.toList s
instance DynSize Attribute where
dynSize (Attribute k v) = T.length k + dynSize v
instance DynSize Item where
dynSize m = sum $ map dynSize $ attributes m
instance DynSize a => DynSize [a] where
dynSize as = sum $ map dynSize as
instance DynSize a => DynSize (Maybe a) where
dynSize = maybe 0 dynSize
instance (DynSize a, DynSize b) => DynSize (Either a b) where
dynSize = either dynSize dynSize
-------------------------------------------------------------------------------
-- | Will an attribute be considered empty by DynamoDb?
--
-- A 'PutItem' (or similar) with empty attributes will be rejected
-- with a 'ValidationException'.
nullAttr :: Attribute -> Bool
nullAttr (Attribute _ val) =
case val of
DString "" -> True
DBinary "" -> True
DNumSet s | S.null s -> True
DStringSet s | S.null s -> True
DBinSet s | S.null s -> True
_ -> False
-------------------------------------------------------------------------------
--
-- | Item Parsing
--
-------------------------------------------------------------------------------
-- | Failure continuation.
type Failure f r = String -> f r
-- | Success continuation.
type Success a f r = a -> f r
-- | A continuation-based parser type.
newtype Parser a = Parser {
runParser :: forall f r.
Failure f r
-> Success a f r
-> f r
}
instance Monad Parser where
m >>= g = Parser $ \kf ks -> let ks' a = runParser (g a) kf ks
in runParser m kf ks'
{-# INLINE (>>=) #-}
return a = Parser $ \_kf ks -> ks a
{-# INLINE return #-}
fail msg = Parser $ \kf _ks -> kf msg
{-# INLINE fail #-}
instance Functor Parser where
fmap f m = Parser $ \kf ks -> let ks' a = ks (f a)
in runParser m kf ks'
{-# INLINE fmap #-}
instance Applicative Parser where
pure = return
{-# INLINE pure #-}
(<*>) = apP
{-# INLINE (<*>) #-}
instance Alternative Parser where
empty = fail "empty"
{-# INLINE empty #-}
(<|>) = mplus
{-# INLINE (<|>) #-}
instance MonadPlus Parser where
mzero = fail "mzero"
{-# INLINE mzero #-}
mplus a b = Parser $ \kf ks -> let kf' _ = runParser b kf ks
in runParser a kf' ks
{-# INLINE mplus #-}
instance Monoid (Parser a) where
mempty = fail "mempty"
{-# INLINE mempty #-}
mappend = mplus
{-# INLINE mappend #-}
apP :: Parser (a -> b) -> Parser a -> Parser b
apP d e = do
b <- d
a <- e
return (b a)
{-# INLINE apP #-}
-------------------------------------------------------------------------------
-- | Types convertible to DynamoDb 'Item' collections.
--
-- Use 'attr' and 'attrAs' combinators to conveniently define instances.
class ToDynItem a where
toItem :: a -> Item
-------------------------------------------------------------------------------
-- | Types parseable from DynamoDb 'Item' collections.
--
-- User 'getAttr' family of functions to applicatively or monadically
-- parse into your custom types.
class FromDynItem a where
parseItem :: Item -> Parser a
instance ToDynItem Item where toItem = id
instance FromDynItem Item where parseItem = return
instance DynVal a => ToDynItem [(T.Text, a)] where
toItem as = item $ map (uncurry attr) as
instance (Typeable a, DynVal a) => FromDynItem [(T.Text, a)] where
parseItem i = mapM f $ M.toList i
where
f (k,v) = do
v' <- maybe (fail (valErr (Tagged v :: Tagged a DValue))) return $
fromValue v
return (k, v')
instance DynVal a => ToDynItem (M.Map T.Text a) where
toItem m = toItem $ M.toList m
instance (Typeable a, DynVal a) => FromDynItem (M.Map T.Text a) where
parseItem i = M.fromList <$> parseItem i
valErr :: forall a. Typeable a => Tagged a DValue -> String
valErr (Tagged dv) = "Can't convert DynamoDb value " <> show dv <>
" into type " <> (show (typeOf (undefined :: a)))
-- | Convenience combinator for parsing fields from an 'Item' returned
-- by DynamoDb.
getAttr
:: forall a. (Typeable a, DynVal a)
=> T.Text
-- ^ Attribute name
-> Item
-- ^ Item from DynamoDb
-> Parser a
getAttr k m = do
case M.lookup k m of
Nothing -> fail ("Key " <> T.unpack k <> " not found")
Just dv -> maybe (fail (valErr (Tagged dv :: Tagged a DValue))) return $ fromValue dv
-- | Parse attribute if it's present in the 'Item'. Fail if attribute
-- is present but conversion fails.
getAttr'
:: forall a. (Typeable a, DynVal a)
=> T.Text
-- ^ Attribute name
-> Item
-- ^ Item from DynamoDb
-> Parser (Maybe a)
getAttr' k m = do
case M.lookup k m of
Nothing -> return Nothing
Just dv -> return $ fromValue dv
-------------------------------------------------------------------------------
-- | Parse an 'Item' into target type using the 'FromDynItem'
-- instance.
fromItem :: FromDynItem a => Item -> Either String a
fromItem i = runParser (parseItem i) Left Right
|
frms-/aws
|
Aws/DynamoDb/Core.hs
|
Haskell
|
bsd-3-clause
| 39,556
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
module Snaplet.Authentication
( initAuthentication
, Authentication
, requireUser
, withUser
, makeSessionJSON
, module Snaplet.Authentication.Queries
, module Snaplet.Authentication.Schema
, AuthConfig(..)
) where
import Control.Lens
import Control.Monad.CatchIO hiding (Handler)
import Control.Monad.IO.Class
import Control.Monad.Reader
import Control.Monad.Trans.Either
import Crypto.BCrypt
import Data.ByteString
import Data.Monoid
import Data.Text as T
import Data.Text.Encoding
import Data.Time
import Data.UUID
import Database.Esqueleto hiding (migrate)
import qualified Database.Persist
import qualified Kashmir.Github as Github
import Kashmir.Snap.Snaplet.Random
import Kashmir.Snap.Utils
import Kashmir.UUID
import Kashmir.Web
import Snap hiding (with)
import Snaplet.Authentication.Common
import Snaplet.Authentication.Exception
import Snaplet.Authentication.PasswordReset
import Snaplet.Authentication.Queries
import Snaplet.Authentication.Schema
import Snaplet.Authentication.Session
import Snaplet.Authentication.Types
------------------------------------------------------------
githubLoginUrl :: Github.Config -> Text
githubLoginUrl config =
T.pack $
mconcat
[ view Github.authUrl config
, "?scope=user:email,read:org,admin:repo_hook,&client_id="
, view Github.clientId config
]
githubLoginHandler :: Handler b (Authentication b) ()
githubLoginHandler = do
githubConfig <- view (authConfig . github)
redirect . encodeUtf8 $ githubLoginUrl githubConfig
upsertAccountFromGithub
:: Github.Config
-> ByteString
-> UUID
-> ConnectionPool
-> IO (UTCTime, Key Account)
upsertAccountFromGithub githubConfig code uuid connection = do
accessToken <-
view Github.accessToken <$> Github.requestAccess githubConfig code
user <- runReaderT Github.getUserDetails accessToken
now <- getCurrentTime
accountKey <-
runSqlPersistMPool
(createOrUpdateGithubUser uuid now accessToken user)
connection
return (now, accountKey)
processGithubAccessToken :: Text
-> ByteString
-> Handler b (Authentication b) ()
processGithubAccessToken redirectTarget code = do
githubConfig <- view (authConfig . github)
connection <- getConnection
randomNumberGenerator <- view randomNumberGeneratorLens
uuid <- Snap.withTop randomNumberGenerator getRandom
(now, accountKey) <-
liftIO $ upsertAccountFromGithub githubConfig code uuid connection
logError $
"Upserted account key: " <> (toStrictByteString . unAccountKey) accountKey
writeAuthToken (addUTCTime twoWeeks now) (unAccountKey accountKey)
redirect $ encodeUtf8 redirectTarget
githubCallbackHandler :: Text -> Handler b (Authentication b) ()
githubCallbackHandler redirectTarget =
method GET $ requireParam "code" >>= processGithubAccessToken redirectTarget
------------------------------------------------------------
registrationHandler :: Handler b (Authentication b) ()
registrationHandler =
method POST $ do
payload <- requireBoundedJSON 1024
connection <- getConnection
randomNumberGenerator <- view randomNumberGeneratorLens
uuid <- Snap.withTop randomNumberGenerator getRandom
maybeAccount <- liftIO $ createPasswordAccount payload uuid connection
case maybeAccount of
Nothing -> handleErrorWithMessage 409 "Conflict"
Just account -> do
logError $ "Created account: " <> encodeUtf8 (T.pack (show account))
authorizedAccountResponse account
createPasswordAccount :: Registration
-> UUID
-> ConnectionPool
-> IO (Maybe Account)
createPasswordAccount payload uuid connection = do
now <- getCurrentTime
runSqlPersistMPool (createPasswordUser uuid now payload) connection
------------------------------------------------------------
processEmailPassword :: Login -> Handler b (Authentication b) ()
processEmailPassword payload = do
matchingAccount <- handleSql (lookupByEmail (loginEmail payload))
case matchingAccount of
Nothing -> unauthorized
-- Validate password.
Just (account, accountUidpwd) ->
if validatePassword
(encodeUtf8 (accountUidpwdPassword accountUidpwd))
(encodeUtf8 (loginPassword payload))
then authorizedAccountResponse account
else unauthorized
emailPasswordLoginHandler :: Handler b (Authentication b) ()
emailPasswordLoginHandler =
method POST $ do
payload <- requireBoundedJSON 1024
matchingAccount <- handleSql (lookupByEmail (loginEmail payload))
case matchingAccount of
Nothing -> unauthorized
-- Validate password.
Just (account, accountUidpwd) ->
if validatePassword
(encodeUtf8 (accountUidpwdPassword accountUidpwd))
(encodeUtf8 (loginPassword payload))
then authorizedAccountResponse account
else unauthorized
-- | Require that an authenticated AuthUser is present in the current session.
-- This function has no DB cost - only checks to see if the client has passed a valid auth token.
requireUser
:: SnapletLens b (Authentication b)
-> Handler b v a
-> (Key Account -> Handler b v a)
-> Handler b v a
requireUser aLens bad good = do
authToken <- Snap.withTop aLens readAuthToken
case authToken of
Nothing -> bad
Just t -> good (AccountKey t)
withUser
:: SnapletLens b (Authentication b)
-> (Maybe (Key Account) -> Handler b v a)
-> Handler b v a
withUser aLens handler = do
maybeKey <- Snap.withTop aLens readAuthToken
handler (AccountKey <$> maybeKey)
------------------------------------------------------------
logoutHandler
:: MonadSnap m
=> m ()
logoutHandler = do
removeAuthToken
redirect "/"
authStatusHandler :: Handler b (Authentication b) ()
authStatusHandler =
method GET $ do
logError "Looking up user details."
authToken <- readAuthToken
logError $ "Got auth token: " <> maybe "<none>" toStrictByteString authToken
case authToken of
Nothing -> removeAuthToken >> pass
Just accountId -> do
account <- handleSql (Database.Persist.get $ AccountKey accountId)
case account of
Nothing -> throw AccountNotFound
Just a -> writeJSON a
migrate :: ConnectionPool -> EitherT Text IO ConnectionPool
migrate pool = do
lift $ runSqlPersistMPool (runMigration migrateAccounts) pool
return pool
initAuthentication
:: Text
-> AuthConfig
-> SnapletLens b ConnectionPool
-> SnapletLens b RandomNumberGenerator
-> SnapletInit b (Authentication b)
initAuthentication redirectTarget _authConfig _poolLens _randomNumberGeneratorLens =
makeSnaplet "authentication" "Authentication Snaplet" Nothing $ do
addRoutes
[ ("/login/uidpwd", emailPasswordLoginHandler)
, ("/registration/uidpwd", registrationHandler)
, ("/reset/uidpwd", emailPasswordResetHandler)
, ("/reset/uidpwd/complete", emailPasswordResetCompletionHandler)
, ("/login/github", githubLoginHandler)
, ("/callback/github", githubCallbackHandler redirectTarget)
, ("/logout", logoutHandler)
, ("/status", authStatusHandler)
]
_ <- Snap.withTop _poolLens $ addPostInitHook migrate
return Authentication {..}
|
krisajenkins/snaplet-auth
|
src/Snaplet/Authentication.hs
|
Haskell
|
bsd-3-clause
| 7,436
|
----------------------------------------------------------------------------
-- |
-- Module : BWildcardExportListWithChildren
-- Copyright : (c) Sergey Vinokurov 2018
-- License : BSD3-style (see LICENSE)
-- Maintainer : serg.foo@gmail.com
----------------------------------------------------------------------------
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE QuasiQuotes #-}
{-# LANGUAGE TemplateHaskell #-}
module BWildcardExportListWithChildren
( FooB(..)
, BarB(..)
, pattern BazBP
, quuxB
, pattern FrobBP
, QuuxB(..)
, pattern QuuxBP
, commonFunc
, derivedB
) where
data FooB = FooB1
{ fooB1 :: Int
, fooB2 :: !Double
}
newtype BarB =
BarB1
{ unBarB :: [Double] }
pattern BazBP :: Double -> Double -> BarB
pattern BazBP x y = BarB1 [x, y]
quuxB :: Int -> Int
quuxB x = x
pattern FrobBP :: Int -> FooB
pattern FrobBP x = FooB1 { fooB1 = x, fooB2 = 0 }
data QuuxB =
QuuxB1 Int
| QuuxB2
pattern QuuxBP :: Int -> QuuxB
pattern QuuxBP n = QuuxB1 n
commonFunc :: Double -> Double
commonFunc x = x + x * x
$([d|
derivedB :: Int -> Int
derivedB x = x
|])
|
sergv/tags-server
|
test-data/0012resolve_reexport_import_cycles/BWildcardExportListWithChildren.hs
|
Haskell
|
bsd-3-clause
| 1,131
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
{-# LANGUAGE OverloadedStrings #-}
module Duckling.Time.HR.Corpus
( corpus ) where
import Prelude
import Data.String
import Duckling.Lang
import Duckling.Resolve
import Duckling.Time.Corpus
import Duckling.Time.Types hiding (Month)
import Duckling.TimeGrain.Types hiding (add)
import Duckling.Testing.Types hiding (examples)
corpus :: Corpus
corpus = (testContext {lang = HR}, allExamples)
allExamples :: [Example]
allExamples = concat
[ examples (datetime (2013, 2, 12, 4, 30, 0) Second)
[ "sad"
, "sada"
, "upravo sad"
, "ovaj tren"
]
, examples (datetime (2013, 2, 12, 0, 0, 0) Day)
[ "danas"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "jucer"
, "jučer"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "sutra"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "ponedjeljak"
, "pon."
, "ovaj ponedjeljak"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "ponedjeljak, 18. veljace"
, "ponedjeljak, 18. veljače"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "utorak"
, "utorak 19."
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "cetvrtak"
, "četvrtak"
, "čet"
, "cet."
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "petak"
, "pet"
, "pet."
]
, examples (datetime (2013, 2, 16, 0, 0, 0) Day)
[ "subota"
, "sub"
, "sub."
]
, examples (datetime (2013, 2, 17, 0, 0, 0) Day)
[ "nedjelja"
, "ned"
, "ned."
]
, examples (datetime (2013, 3, 1, 0, 0, 0) Day)
[ "1. ozujak"
, "1. ožujak"
, "prvi ozujka"
]
, examples (datetime (2013, 3, 3, 0, 0, 0) Day)
[ "treci ozujka"
, "treci ožujka"
]
, examples (datetime (2015, 3, 3, 0, 0, 0) Day)
[ "3. ozujka 2015"
, "treci ozujka 2015"
, "3/3/2015"
, "3/3/15"
, "2015-3-3"
, "2015-03-03"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "15ti drugi"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "15. veljace"
, "15. veljače"
, "15/02"
]
, examples (datetime (2013, 8, 8, 0, 0, 0) Day)
[ "8. kolovoza"
, "8. kolovoz"
]
, examples (datetime (2014, 10, 0, 0, 0, 0) Month)
[ "listopad 2014"
]
, examples (datetime (1974, 10, 31, 0, 0, 0) Day)
[ "31/10/1974"
, "31/10/74"
, "74-10-31"
]
, examples (datetime (2015, 4, 14, 0, 0, 0) Day)
[ "14travanj 2015"
, "14. travnja, 2015"
, "14. travanj 15"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "sljedeci utorak"
, "sljedeceg utorka"
]
, examples (datetime (2013, 2, 22, 0, 0, 0) Day)
[ "petak nakon sljedeceg"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "sljedeci ozujak"
]
, examples (datetime (2014, 3, 0, 0, 0, 0) Month)
[ "ozujak nakon sljedeceg"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "nedjelja, 10. veljace"
, "nedjelja, 10. veljače"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "Sri, 13. velj"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "ponedjeljak, veljaca 18."
, "Pon, 18. veljace"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Week)
[ "ovaj tjedan"
]
, examples (datetime (2013, 2, 4, 0, 0, 0) Week)
[ "prosli tjedan"
, "prošli tjedan"
, "prethodni tjedan"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Week)
[ "sljedeci tjedan"
]
, examples (datetime (2013, 1, 0, 0, 0, 0) Month)
[ "prethodni mjesec"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "sljedeci mjesec"
]
, examples (datetime (2013, 1, 1, 0, 0, 0) Quarter)
[ "ovaj kvartal"
, "ovo tromjesecje"
]
, examples (datetime (2013, 4, 1, 0, 0, 0) Quarter)
[ "sljedeci kvartal"
]
, examples (datetime (2013, 7, 1, 0, 0, 0) Quarter)
[ "treci kvartal"
, "3. kvartal"
, "trece tromjesecje"
, "3. tromjesečje"
]
, examples (datetime (2018, 10, 1, 0, 0, 0) Quarter)
[ "4. kvartal 2018"
, "četvrto tromjesečje 2018"
]
, examples (datetime (2012, 0, 0, 0, 0, 0) Year)
[ "prošla godina"
, "prethodna godina"
]
, examples (datetime (2013, 0, 0, 0, 0, 0) Year)
[ "ova godina"
]
, examples (datetime (2014, 0, 0, 0, 0, 0) Year)
[ "sljedece godina"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "prosle nedjelje"
, "prosli tjedan u nedjelju"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "prosli utorak"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "sljedeci utorak"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "sljedecu srijedu"
]
, examples (datetime (2013, 2, 20, 0, 0, 0) Day)
[ "sljedeci tjedan u srijedu"
, "srijeda sljedeci tjedan"
]
, examples (datetime (2013, 2, 15, 0, 0, 0) Day)
[ "sljedeci petak"
]
, examples (datetime (2013, 2, 11, 0, 0, 0) Day)
[ "ovaj tjedan u ponedjeljak"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "ovaj utorak"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Day)
[ "ova srijeda"
, "ovaj tjedan u srijedu"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "prekosutra"
]
, examples (datetime (2013, 2, 14, 17, 0, 0) Hour)
[ "prekosutra u 5 popodne"
, "prekosutra u 17"
]
, examples (datetime (2013, 2, 10, 0, 0, 0) Day)
[ "prekjucer"
, "prekjučer"
]
, examples (datetime (2013, 2, 10, 8, 0, 0) Hour)
[ "prekjučer u 8"
, "prekjučer u 8 sati"
]
, examples (datetime (2013, 3, 25, 0, 0, 0) Day)
[ "zadnji ponedjeljak u ozujku"
]
, examples (datetime (2014, 3, 30, 0, 0, 0) Day)
[ "zadnja nedjelja u ozujku 2014"
]
, examples (datetime (2013, 10, 3, 0, 0, 0) Day)
[ "treci dan u listopadu"
]
, examples (datetime (2014, 10, 6, 0, 0, 0) Week)
[ "prvi tjedan u listopadu 2014"
]
, examples (datetime (2015, 10, 31, 0, 0, 0) Day)
[ "zadnji dan u listopadu 2015"
]
, examples (datetime (2014, 9, 22, 0, 0, 0) Week)
[ "zadnji tjedan u rujnu 2014"
]
, examples (datetime (2013, 10, 1, 0, 0, 0) Day)
[ "prvi utorak u listopadu"
]
, examples (datetime (2014, 9, 16, 0, 0, 0) Day)
[ "treci utorak u rujnu 2014"
]
, examples (datetime (2014, 10, 1, 0, 0, 0) Day)
[ "prva srijeda u listopadu 2014"
]
, examples (datetime (2014, 10, 8, 0, 0, 0) Day)
[ "druga srijeda u listopadu 2014"
]
, examples (datetime (2015, 1, 13, 0, 0, 0) Day)
[ "treci utorak poslije Bozica 2014"
]
, examples (datetime (2013, 2, 13, 3, 0, 0) Hour)
[ "3 u noci"
, "u 3 ujutro"
, "u tri sata u noci"
]
, examples (datetime (2013, 2, 12, 3, 18, 0) Minute)
[ "3:18 rano"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "u 3 poslijepodne"
, "@ 15"
, "15 sati poslijepodne"
]
, examples (datetime (2013, 2, 12, 15, 0, 0) Hour)
[ "oko 3 poslijepodne"
, "otprilike u 3 poslijepodne"
, "cca 3 poslijepodne"
, "cca 15"
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "15 i 15"
, "3:15 poslijepodne"
, "15:15"
]
, examples (datetime (2013, 2, 12, 15, 15, 0) Minute)
[ "cetvrt nakon 3 poslijepodne"
]
, examples (datetime (2013, 2, 12, 15, 20, 0) Minute)
[ "3 i 20 popodne"
, "3:20 poslijepodne"
, "3:20 popodne"
, "dvadeset nakon 3 popodne"
, "15:20"
]
, examples (datetime (2013, 2, 12, 15, 30, 0) Minute)
[ "tri i po popodne"
, "pola 4 popodne"
, "15:30"
, "pola cetiri popodne"
]
, examples (datetime (2013, 2, 12, 15, 23, 24) Second)
[ "15:23:24"
]
, examples (datetime (2013, 2, 12, 11, 45, 0) Minute)
[ "petnaest do podne"
, "11:45"
, "četvrt do podneva"
]
, examples (datetime (2013, 2, 12, 20, 0, 0) Hour)
[ "8 navecer"
, "osam sati navecer"
, "danas 8 navecer"
]
, examples (datetime (2013, 9, 20, 19, 30, 0) Minute)
[ "u 7:30 popodne u pet, 20. rujna"
]
, examples (datetime (2013, 2, 16, 9, 0, 0) Hour)
[ "9 ujutro u subotu"
, "u subotu u 9 sati ujutro"
]
, examples (datetime (2014, 7, 18, 19, 0, 0) Minute)
[ "pet, srp 18., 2014, 19:00"
, "pet, srp 18., 2014 u 19:00"
]
, examples (datetime (2013, 2, 12, 4, 30, 1) Second)
[ "za jednu sekundu"
]
, examples (datetime (2013, 2, 12, 4, 31, 0) Second)
[ "za jednu minutu"
]
, examples (datetime (2013, 2, 12, 4, 32, 0) Second)
[ "za 2 minute"
, "za jos 2 minute"
, "2 minute od sad"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Second)
[ "za 60 minuta"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "oko cetvrt sata"
, "oko 1/4h"
, "oko 1/4 h"
, "oko 1/4 sata"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "za pola sata"
, "za pol sata"
, "za 1/2h"
, "za 1/2 h"
, "za 1/2 sata"
]
, examples (datetime (2013, 2, 12, 5, 15, 0) Second)
[ "za tri-cetvrt sata"
, "za 3/4h"
, "za 3/4 h"
, "za 3/4 sata"
]
, examples (datetime (2013, 2, 12, 7, 0, 0) Second)
[ "za 2.5 sata"
]
, examples (datetime (2013, 2, 12, 5, 30, 0) Minute)
[ "za jedan sat"
, "za 1h"
]
, examples (datetime (2013, 2, 12, 6, 30, 0) Minute)
[ "za par sati"
]
, examples (datetime (2013, 2, 12, 7, 30, 0) Minute)
[ "za nekoliko sati"
]
, examples (datetime (2013, 2, 13, 4, 30, 0) Minute)
[ "za 24 sata"
, "za 24h"
]
, examples (datetime (2013, 2, 13, 4, 0, 0) Hour)
[ "za 1 dan"
, "za jedan dan"
]
, examples (datetime (2016, 2, 0, 0, 0, 0) Month)
[ "3 godine od danasnjeg dana"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "za 7 dana"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "za 1 tjedan"
]
, examples (datetime (2013, 2, 12, 5, 0, 0) Second)
[ "za oko pola sata"
]
, examples (datetime (2013, 2, 5, 4, 0, 0) Hour)
[ "prije 7 dana"
]
, examples (datetime (2013, 1, 29, 4, 0, 0) Hour)
[ "prije 14 dana"
]
, examples (datetime (2013, 2, 5, 0, 0, 0) Day)
[ "prije jedan tjedan"
, "prije jednog tjedna"
]
, examples (datetime (2013, 1, 22, 0, 0, 0) Day)
[ "prije tri tjedna"
]
, examples (datetime (2012, 11, 12, 0, 0, 0) Day)
[ "prije tri mjeseca"
]
, examples (datetime (2011, 2, 0, 0, 0, 0) Month)
[ "prije dvije godine"
]
, examples (datetime (1954, 0, 0, 0, 0, 0) Year)
[ "1954"
]
, examples (datetime (2013, 2, 19, 4, 0, 0) Hour)
[ "za 7 dana"
]
, examples (datetime (2013, 2, 26, 4, 0, 0) Hour)
[ "za 14 dana"
]
, examples (datetime (2013, 2, 19, 0, 0, 0) Day)
[ "za jedan tjedan"
]
, examples (datetime (2013, 3, 5, 0, 0, 0) Day)
[ "za tri tjedna"
]
, examples (datetime (2013, 5, 12, 0, 0, 0) Day)
[ "za tri mjeseca"
]
, examples (datetime (2015, 2, 0, 0, 0, 0) Month)
[ "za dvije godine"
]
, examples (datetime (2013, 12, 0, 0, 0, 0) Month)
[ "jednu godinu poslije Bozica"
]
, examples (datetimeInterval ((2013, 6, 21, 0, 0, 0), (2013, 9, 24, 0, 0, 0)) Day)
[ "ovog ljeta"
, "ovo ljeto"
, "ljetos"
]
, examples (datetimeInterval ((2012, 12, 21, 0, 0, 0), (2013, 3, 21, 0, 0, 0)) Day)
[ "ove zime"
, "zimus"
]
, examples (datetime (2013, 12, 25, 0, 0, 0) Day)
[ "Bozic"
, "zicbo"
]
, examples (datetime (2013, 12, 31, 0, 0, 0) Day)
[ "stara godina"
]
, examples (datetime (2014, 1, 1, 0, 0, 0) Day)
[ "nova godina"
]
, examples (datetime (2013, 2, 14, 0, 0, 0) Day)
[ "valentinovo"
]
, examples (datetime (2013, 5, 12, 0, 0, 0) Day)
[ "majcin dan"
]
, examples (datetime (2013, 6, 16, 0, 0, 0) Day)
[ "dan oceva"
]
, examples (datetime (2013, 10, 31, 0, 0, 0) Day)
[ "noc vjestica"
]
, examples (datetimeInterval ((2013, 2, 12, 18, 0, 0), (2013, 2, 13, 0, 0, 0)) Hour)
[ "veceras"
, "ove veceri"
, "danas navecer"
]
, examples (datetimeInterval ((2013, 2, 8, 18, 0, 0), (2013, 2, 11, 0, 0, 0)) Hour)
[ "prosli vikend"
]
, examples (datetimeInterval ((2013, 2, 13, 18, 0, 0), (2013, 2, 14, 0, 0, 0)) Hour)
[ "sutra navecer"
]
, examples (datetimeInterval ((2013, 2, 13, 12, 0, 0), (2013, 2, 13, 14, 0, 0)) Hour)
[ "sutra rucak"
]
, examples (datetimeInterval ((2013, 2, 11, 18, 0, 0), (2013, 2, 12, 0, 0, 0)) Hour)
[ "jucer navecer"
, "prethodne veceri"
]
, examples (datetimeInterval ((2013, 2, 15, 18, 0, 0), (2013, 2, 18, 0, 0, 0)) Hour)
[ "ovaj vikend"
, "ovog vikenda"
]
, examples (datetimeInterval ((2013, 2, 18, 4, 0, 0), (2013, 2, 18, 12, 0, 0)) Hour)
[ "ponedjeljak ujutro"
]
, examples (datetimeInterval ((2013, 2, 18, 3, 0, 0), (2013, 2, 18, 9, 0, 0)) Hour)
[ "ponedjeljak rano ujutro"
, "ponedjeljak rano"
, "ponedjeljak u rane jutarnje sate"
]
, examples (datetimeInterval ((2013, 2, 15, 4, 0, 0), (2013, 2, 15, 12, 0, 0)) Hour)
[ "15. veljace ujutro"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 29, 58), (2013, 2, 12, 4, 30, 0)) Second)
[ "prosle 2 sekunde"
, "prethodne dvije sekunde"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 1), (2013, 2, 12, 4, 30, 4)) Second)
[ "sljedece 3 sekunde"
, "sljedece tri sekunde"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 28, 0), (2013, 2, 12, 4, 30, 0)) Minute)
[ "prosle 2 minute"
, "prethodne dvije minute"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 31, 0), (2013, 2, 12, 4, 34, 0)) Minute)
[ "sljedece 3 minute"
, "sljedece tri minute"
]
, examples (datetimeInterval ((2013, 2, 12, 3, 0, 0), (2013, 2, 12, 4, 0, 0)) Hour)
[ "prethodni jedan sat"
]
, examples (datetimeInterval ((2013, 2, 11, 4, 0, 0), (2013, 2, 12, 4, 0, 0)) Hour)
[ "prethodna 24 sata"
, "prethodna dvadeset i cetiri sata"
, "prethodna dvadeset i cetiri sata"
, "prethodna 24h"
]
, examples (datetimeInterval ((2013, 2, 12, 5, 0, 0), (2013, 2, 12, 8, 0, 0)) Hour)
[ "sljedeca 3 sata"
, "sljedeca tri sata"
]
, examples (datetimeInterval ((2013, 2, 10, 0, 0, 0), (2013, 2, 12, 0, 0, 0)) Day)
[ "prethodna dva dana"
, "prethodna 2 dana"
, "prosla 2 dana"
]
, examples (datetimeInterval ((2013, 2, 13, 0, 0, 0), (2013, 2, 16, 0, 0, 0)) Day)
[ "sljedeca 3 dana"
, "sljedeca tri dana"
]
, examples (datetimeInterval ((2013, 2, 13, 0, 0, 0), (2013, 2, 16, 0, 0, 0)) Day)
[ "sljedecih nekoliko dana"
]
, examples (datetimeInterval ((2013, 1, 28, 0, 0, 0), (2013, 2, 11, 0, 0, 0)) Week)
[ "prethodna 2 tjedna"
, "prethodna dva tjedna"
, "prosla 2 tjedna"
]
, examples (datetimeInterval ((2013, 2, 18, 0, 0, 0), (2013, 3, 11, 0, 0, 0)) Week)
[ "sljedeca 3 tjedna"
, "sljedeca tri tjedna"
]
, examples (datetimeInterval ((2012, 12, 0, 0, 0, 0), (2013, 2, 0, 0, 0, 0)) Month)
[ "prethodna 2 mjeseca"
, "prethodna dva mjeseca"
]
, examples (datetimeInterval ((2013, 3, 0, 0, 0, 0), (2013, 6, 0, 0, 0, 0)) Month)
[ "sljedeca 3 mjeseca"
, "sljedeca tri mjeseca"
]
, examples (datetimeInterval ((2011, 0, 0, 0, 0, 0), (2013, 0, 0, 0, 0, 0)) Year)
[ "prethodne 2 godine"
, "prethodne dvije godine"
]
, examples (datetimeInterval ((2014, 0, 0, 0, 0, 0), (2017, 0, 0, 0, 0, 0)) Year)
[ "sljedece 3 godine"
, "sljedece tri godine"
]
, examples (datetimeInterval ((2013, 7, 13, 0, 0, 0), (2013, 7, 16, 0, 0, 0)) Day)
[ "srpanj 13-15"
, "srpanj 13 do 15"
, "srpanj 13 - srpanj 15"
]
, examples (datetimeInterval ((2013, 8, 8, 0, 0, 0), (2013, 8, 13, 0, 0, 0)) Day)
[ "kol 8 - kol 12"
]
, examples (datetimeInterval ((2013, 2, 12, 9, 30, 0), (2013, 2, 12, 11, 1, 0)) Minute)
[ "9:30 - 11:00"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 30, 0), (2013, 2, 14, 11, 1, 0)) Minute)
[ "od 9:30 - 11:00 u cetvrtak"
, "između 9:30 i 11:00 u cetvrtak"
, "9:30 - 11:00 u cetvrtak"
, "izmedju 9:30 i 11:00 u cetvrtak"
, "cetvrtak od 9:30 do 11:00"
, "od 9:30 do 11:00 u cetvrtak"
, "cetvrtak od 9:30 do 11:00"
]
, examples (datetimeInterval ((2013, 2, 14, 9, 0, 0), (2013, 2, 14, 12, 0, 0)) Hour)
[ "cetvrtak od 9 do 11 ujutro"
]
, examples (datetimeInterval ((2013, 2, 12, 11, 30, 0), (2013, 2, 12, 13, 31, 0)) Minute)
[ "11:30-1:30"
]
, examples (datetime (2013, 9, 21, 13, 30, 0) Minute)
[ "1:30 poslijepodne u sub, ruj 21."
]
, examples (datetimeInterval ((2013, 2, 18, 0, 0, 0), (2013, 3, 4, 0, 0, 0)) Week)
[ "sljedeca 2 tjedna"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 14, 0, 0) Hour)
[ "nekad do 2 poslijepodne"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 2, 13, 0, 0, 0)) Second)
[ "do kraja ovog dana"
, "do kraja dana"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 3, 1, 0, 0, 0)) Second)
[ "do kraja ovog mjeseca"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 30, 0), (2013, 4, 1, 0, 0, 0)) Second)
[ "do kraja sljedeceg mjeseca"
]
, examples (datetime (2013, 2, 12, 13, 0, 0) Minute)
[ "4 poslijepodne CET"
]
, examples (datetime (2013, 2, 14, 6, 0, 0) Minute)
[ "cetvrtak 8:00 GMT"
]
, examples (datetime (2013, 2, 12, 14, 0, 0) Hour)
[ "danas u 14"
, "u 2 poslijepodne"
]
, examples (datetime (2013, 4, 25, 16, 0, 0) Hour)
[ "25/4 U 16 sati"
]
, examples (datetime (2013, 2, 13, 15, 0, 0) Hour)
[ "15 sati sutra"
]
, examples (datetimeOpenInterval After (2013, 2, 17, 4, 0, 0) Hour)
[ "nakon 5 dana"
]
, examples (datetimeOpenInterval Before (2013, 2, 12, 11, 0, 0) Hour)
[ "prije 11 sat"
]
, examples (datetimeInterval ((2013, 2, 12, 12, 0, 0), (2013, 2, 12, 20, 0, 0)) Hour)
[ "ova poslijepodne"
, "ovi popodne"
]
, examples (datetime (2013, 2, 12, 13, 30, 0) Minute)
[ "u 13:30"
, "13:30"
]
, examples (datetime (2013, 2, 12, 4, 45, 0) Second)
[ "za 15 minuta"
]
, examples (datetimeInterval ((2013, 2, 12, 13, 0, 0), (2013, 2, 12, 17, 0, 0)) Hour)
[ "poslije rucka"
]
, examples (datetime (2013, 2, 12, 10, 30, 0) Minute)
[ "10:30"
]
, examples (datetimeInterval ((2013, 2, 12, 4, 0, 0), (2013, 2, 12, 12, 0, 0)) Hour)
[ "ove jutro"
]
, examples (datetime (2013, 2, 18, 0, 0, 0) Day)
[ "sljedeci ponedjeljak"
]
, examples (datetime (2013, 2, 12, 12, 0, 0) Hour)
[ "u 12"
, "u podne"
]
, examples (datetime (2013, 2, 13, 0, 0, 0) Hour)
[ "u 12 u noci"
, "u ponoc"
]
, examples (datetime (2013, 3, 0, 0, 0, 0) Month)
[ "ozujak"
, "u ozujku"
]
]
|
rfranek/duckling
|
Duckling/Time/HR/Corpus.hs
|
Haskell
|
bsd-3-clause
| 23,505
|
{-# OPTIONS -Wall #-}
-----------------------------------------------------------------------------
-- |
-- Module : CTest.hs (executable)
-- Copyright : (c) 2008 Duncan Coutts, Benedikt Huber
-- License : BSD-style
-- Maintainer : benedikt.huber@gmail.com
-- Portability : non-portable (Data.Generics)
--
-- This is a very simple module, usable for quick tests.
--
-- It provides a wrapper for parsing C-files which haven't been preprocessed yet.
-- It is used as if gcc was called, and internally calls cpp (gcc -E) to preprocess the file.
-- It then outputs the pretty printed AST, replacing declarations from included header
-- files with a corresponding #include directive (This isn't always correct, as e.g. #define s
-- get lost. But it makes it a lot easier to focus on the relevant part of the output).
--
-- If used with a `-e str' command-line argument, the given string is parsed as an expression and pretty
-- printed. Similar for `-d str' and top-level declarations.
-------------------------------------------------------------------------------------------------------
module Main (
main
) where
import Language.C
import Language.C.Parser
import Language.C.System.GCC
import Language.C.Analysis
import Language.C.Test.Environment
import Language.C.Test.GenericAST
import Control.Monad
import System.Environment (getEnv, getArgs)
import System.Exit
import System.IO
import Data.Generics
import Text.PrettyPrint.HughesPJ
data CTestConfig =
CTestConfig {
debugFlag :: Bool,
parseOnlyFlag :: Bool,
useIncludes :: Bool,
dumpAst :: Bool,
semanticAnalysis :: Bool
}
usage :: String -> IO a
usage msg = printUsage >> exitWith (ExitFailure 2) where
printUsage = hPutStr stderr . unlines $
[ "! "++msg,"",
"Usage: ./CTest -e expression",
"Usage: ./CTest -s statement",
"Usage: ./CTest -d declaration",
"Usage: ./CTest [cpp-opts] file.(c|hc|i)",
" parses the given C source file and pretty print the AST",
"Environment Variables (some do not apply with -e,-s or -d): ",
" TMPDIR: temporary directory for preprocessing",
" NO_HEADERS_VIA_INCLUDE: do not use heuristic #include directives for pretty printing",
" DEBUG: debug flag",
" DUMP_AST: dump the ast to file dump.ast",
" NO_SEMANTIC_ANALYSIS: do not perform semantic analysis",
" PARSE_ONLY: do not pretty print"
]
bailOut :: (Show err) => err -> IO a
bailOut err = do
hPutStrLn stderr (show err)
hPutStrLn stderr "*** Exit on Error ***"
exitWith (ExitFailure 1)
main :: IO ()
main = do
tmpdir <- getEnv "TMPDIR"
dbg <- getEnvFlag "DEBUG"
parseonly <- getEnvFlag "PARSE_ONLY"
dumpast <- getEnvFlag "DUMP_AST"
no_includes <- getEnvFlag "NO_HEADERS_VIA_INCLUDE"
semantic <- liftM not (getEnvFlag "NO_SEMANTIC_ANALYSIS")
let config = CTestConfig dbg parseonly (not $ no_includes) dumpast semantic
args <- getArgs
(file,ast) <-
case args of
("-e":str:[]) -> runP config expressionP str >> exitWith ExitSuccess
("-s":str:[]) -> runP config statementP str >> exitWith ExitSuccess
("-d":str:[]) -> runP config extDeclP str >> exitWith ExitSuccess
otherArgs ->
case mungeCcArgs args of
Groked [cFile] gccOpts -> do
presult <- parseCFile (newGCC "gcc") (Just tmpdir) gccOpts cFile
either bailOut (return.((,) cFile)) presult
Groked cFiles _ -> usage $ "More than one source file given: " ++ unwords cFiles
Ignore -> usage $ "Not input files given"
Unknown reason -> usage $ "Could not process arguments: " ++ reason
output config file ast
runP :: (CNode a, Pretty a, Data a) => CTestConfig -> P a -> String -> IO ()
runP config parser str =
do
ast <- either bailOut return $ pResult
when (dumpAst config) $ writeFile "dump.ast" (gshow ast)
when (not $ parseOnlyFlag config) $ print (pretty ast)
where
is = inputStreamFromString str
pResult = execParser_ parser is (argPos)
argPos = initPos "<cmd-line-arg>"
output :: CTestConfig -> FilePath -> CTranslUnit -> IO ()
output config file ast = do
when (dumpAst config) $ writeFile "dump.ast" (gshow ast)
when (semanticAnalysis config && (not (null file))) $ do
let result = runTrav_ (analyseAST ast)
case result of
Left errs -> hPutStrLn stderr (show errs)
Right (ok,warnings) -> do mapM (hPutStrLn stderr . show) warnings
printStats file ok
when (not $ parseOnlyFlag config) $
print $ (if useIncludes config then prettyUsingInclude else pretty) ast
when (debugFlag config) $ putStrLn . comment . show . pretty . mkGenericCAST $ ast
comment str = "/*\n" ++ str ++ "\n*/"
printStats file = putStrLn . comment . show
. prettyAssocsWith "global decl stats" text (text.show)
. globalDeclStats (== file)
|
jthornber/language-c-ejt
|
test/src/CTest.hs
|
Haskell
|
bsd-3-clause
| 4,993
|
;
; HSP help managerp HELP\[Xt@C
; (檪u;vÌsÍRgƵijêÜ·)
;
%type
g£½ß
%ver
3.3
%note
llmod3.hsp,scrsvr.hspðCN[h·é
(Windows9xÌÝpÂ\Å·)
%date
2009/08/01
%author
tom
%dll
llmod3
%url
http://www5b.biglobe.ne.jp/~diamond/hsp/hsp2file.htm
%index
ss_running
XN[Z[o[ªì®µÄ¢é©VXeÉmç¹é
%group
OSVXe§ä½ß
%prm
n1
n1 : 쮵Ģé©A¢È¢©ÌtO
%inst
XN[Z[o[ªì®µÄ¢é©VXe(Windows)Émç¹Ü·B
n1É1ðãü·éÆAVXe(Windows)ÉXN[Z[o[ªì®Å 鱯ðmç¹Ü·B
n1É0ðãü·éÆAXN[Z[o[Í쮵ÄÈ¢AÆVXeÉmç¹Ü·B
^
¦ n1ð1ɵı̽ßðÀs·éÆAALT+CTRL+DEL,ALT+TAB,win{^ÈÇÌL[ªø©ÈÈèÜ·B
n1ð1ɵı̽ßðÀsµ½çAK¸n1ð0ɵÄà¤êx±Ì½ßðÀsµÄ¾³¢B
^
±Ì½ßðÄÑoµ½ãÌstatÌl
0 G[
0ÈO G[³µ
%index
ss_chkpwd
WindowsWÌpX[h`FbN_CAO
%group
OSVXe§ä½ß
%inst
WindowsWÌpX[h`FbN_CAOðÄÑoµÜ·B
½¾µARg[plÌ'æÊÌvpeB'Å'Ê߽ܰÄÞÉæéÛì'ª`FbN³êÄ¢éêÌÝÅ·B
^
±Ì½ßðÄÑoµ½ãÌstatÌl
0 LZ³ê½
0ÈO ³mÈpX[hªüͳê½
('Ê߽ܰÄÞÉæéÛì'ª`FbN³êĢȢêàÜÞ)
%index
ss_chgpwd
WindowsWÌpX[hÏX_CAO
%group
OSVXe§ä½ß
%inst
WindowsWÌpX[hÏX_CAOðÄÑoµÜ·B
^
±Ì½ßðÄÑoµ½ãÌstatÌl
0 pX[hªÏX³ê½
0ÈO LZ³ê½
%href
ss_chkpwd
|
zakki/openhsp
|
package/hsphelp/llmod3_scrsvr.hs
|
Haskell
|
bsd-3-clause
| 1,685
|
{-# LANGUAGE NoImplicitPrelude #-}
module Control.Fay
(
ap
,foldM
,zipWithM
,zipWithM_
,replicateM
)
where
import FFI
import Prelude hiding (mapM)
ap :: Fay (a -> b) -> Fay a -> Fay b
ap f x = f >>= \f' -> x >>= \x' -> return (f' x')
foldM :: (a -> b -> Fay a) -> a -> [b] -> Fay a
foldM _ x [] = return x
foldM f y (x:xs) = f y x >>= \z -> foldM f z xs
zipWithM :: (a -> b -> Fay c) -> [a] -> [b] -> Fay [c]
zipWithM = ((sequence .) .) . zipWith
zipWithM_ :: (a -> b -> Fay c) -> [a] -> [b] -> Fay ()
zipWithM_ = ((sequence_ .) .) . zipWith
replicateM :: Int -> Fay a -> Fay [a]
replicateM = (sequence .) . replicate
|
crooney/cinder
|
src/Control/Fay.hs
|
Haskell
|
bsd-3-clause
| 677
|
-- Copyright (c) 2016-present, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree.
module Duckling.Ordinal.IT.Tests
( tests ) where
import Prelude
import Data.String
import Test.Tasty
import Duckling.Dimensions.Types
import Duckling.Ordinal.IT.Corpus
import Duckling.Testing.Asserts
tests :: TestTree
tests = testGroup "IT Tests"
[ makeCorpusTest [Seal Ordinal] corpus
]
|
facebookincubator/duckling
|
tests/Duckling/Ordinal/IT/Tests.hs
|
Haskell
|
bsd-3-clause
| 504
|
module HasTorrent
(
module HasTorrent.Types,
module HasTorrent.Types.TypesHelp,
module HasTorrent.Bencode,
module HasTorrent.Network.PeerProtocol,
module HasTorrent.Network.Communicate,
module HasTorrent.Tracker,
) where
import HasTorrent.Types
import HasTorrent.Types.TypesHelp
import HasTorrent.Bencode
import HasTorrent.Network.PeerProtocol
import HasTorrent.Network.Communicate
import HasTorrent.Tracker
|
vaishious/has-torrent
|
src/HasTorrent.hs
|
Haskell
|
bsd-3-clause
| 464
|
{-# LANGUAGE GADTs, TypeFamilies, TypeOperators, EmptyDataDecls, FlexibleInstances, MultiParamTypeClasses, RankNTypes, QuasiQuotes, TemplateHaskell, ViewPatterns #-}
-------------------------------------------------------------------------
-- lambda lifting for the lambda calculus with top-level declarations
-------------------------------------------------------------------------
module LambdaLifting where
import Ctx
import HobbitLibTH
import Data.List
import Control.Monad.Reader
import Control.Monad.Cont
import Control.Monad.Identity
-- dummy datatypes for distinguishing Decl names from Lam names
data L a
data D a
-- terms with top-level names
data DTerm a where
Var :: Name (L a) -> DTerm a
DVar :: Name (D a) -> DTerm a
Lam :: Binding (L a) (DTerm b) -> DTerm (a -> b)
App :: DTerm (a -> b) -> DTerm a -> DTerm b
instance Show (DTerm a) where
show = pretty
-- top-level declarations with a "return value"
data Decls a where
DeclsBase :: DTerm a -> Decls a
DeclsCons :: DTerm b -> Binding (D b) (Decls a) -> Decls a
instance Show (Decls a) where
show = decls_pretty
-- helper functions to build terms without explicitly using nu or Var
lam :: (DTerm a -> DTerm b) -> DTerm (a -> b)
lam f = Lam $ nu (f . Var)
------------------------------------------------------------
-- pretty printing
------------------------------------------------------------
-- to make a function for MapCtx (for pretty)
newtype StringF x = StringF String
unStringF (StringF str) = str
-- pretty print terms
pretty :: DTerm a -> String
pretty t = mpretty (emptyMb t) emptyMC 0
mpretty :: Mb ctx (DTerm a) -> MapCtx StringF ctx -> Int -> String
mpretty [nuQQ| Var b |] varnames n =
mprettyName (mbNameBoundP b) varnames
mpretty [nuQQ| DVar b |] varnames n =
mprettyName (mbNameBoundP b) varnames
mpretty [nuQQ| Lam b |] varnames n =
let x = "x" ++ show n in
"(\\" ++ x ++ "." ++ mpretty (combineMb b) (varnames :> (StringF x)) (n+1) ++ ")"
mpretty [nuQQ| App b1 b2 |] varnames n =
"(" ++ mpretty b1 varnames n
++ " " ++ mpretty b2 varnames n ++ ")"
mprettyName (Left pf) varnames = unStringF (ctxLookup pf varnames)
mprettyName (Right n) varnames = "##free var: " ++ (show n) ++ "##"
-- pretty print decls
decls_pretty :: Decls a -> String
decls_pretty decls =
"[ decls:\n" ++ (mdecls_pretty (emptyMb decls) emptyMC 0) ++ "]"
mdecls_pretty :: Mb ctx (Decls a) -> MapCtx StringF ctx -> Int -> String
mdecls_pretty [nuQQ| DeclsBase t |] varnames n =
(mpretty t varnames 0) ++ "\n"
mdecls_pretty [nuQQ| DeclsCons term rest |] varnames n =
let fname = "F" ++ show n in
fname ++ " = " ++ (mpretty term varnames 0) ++ "\n\n"
++ mdecls_pretty (combineMb rest) (varnames :> (StringF fname)) (n+1)
------------------------------------------------------------
-- "peeling" lambdas off of a term
------------------------------------------------------------
type family AddArrows ctx b
type instance AddArrows CtxNil b = b
type instance AddArrows (CtxCons ctx (L a)) b = AddArrows ctx (a -> b)
data PeelRet ctx a where
PeelRet :: LCtx lam_ctx -> Mb (ctx :++: lam_ctx) (DTerm a) ->
PeelRet ctx (AddArrows lam_ctx a)
peelLambdas :: LCtx lam_ctx -> Mb (ctx :++: lam_ctx) (DTerm a) ->
PeelRet ctx (AddArrows lam_ctx a)
peelLambdas lctx [nuQQ| Lam b |] =
peelLambdas (lctx :> IsLType) (combineMb b)
peelLambdas lctx [nuQQ| b |] = PeelRet lctx b
addLams :: LCtx lam_ctx -> (MapCtx Name lam_ctx -> DTerm a) ->
DTerm (AddArrows lam_ctx a)
addLams EmptyMC k = k emptyMC
addLams (lam_ctx :> IsLType) k =
addLams lam_ctx (\names -> Lam $ nu $ \x -> k (names :> x))
------------------------------------------------------------
-- sub-contexts
------------------------------------------------------------
-- FIXME: use this type in place of functions
type SubCtx ctx' ctx = MapCtx Name ctx -> MapCtx Name ctx'
subCtxConsBoth :: SubCtx ctx' ctx -> SubCtx (CtxCons ctx' a) (CtxCons ctx a)
subCtxConsBoth subCtx = \(ctx :> x) -> subCtx ctx :> x
subCtxConsR :: SubCtx ctx' ctx -> SubCtx ctx' (CtxCons ctx a)
subCtxConsR subCtx = \(ctx :> _) -> subCtx ctx
------------------------------------------------------------
-- operations on contexts of free variables
------------------------------------------------------------
{-
-- exists a sub-context of fvs
data ExSubFVs ctx fvs where
ExSubFVs :: MapCtx (MbLName ctx) fvs' -> SubCtx fvs' fvs ->
ExSubFVs ctx fvs
-- add an FV to an ExSubFVs
exSubFVsCons :: ExSubFVs ctx fvs -> MbLName ctx a -> ExSubFVs ctx (CtxCons fvs a)
exSubFVsCons (ExSubFVs fvs subCtx) n =
ExSubFVs (fvs :> n) (subCtxConsBoth subCtx)
-- don't add the FV, just extend the type
exSubFVsWeaken :: ExSubFVs ctx fvs -> ExSubFVs ctx (CtxCons fvs a)
exSubFVsWeaken (ExSubFVs fvs subCtx) =
ExSubFVs fvs (subCtxConsR subCtx)
-- removing a name from a context of fvs
remMbLName :: MapCtx (MbLName ctx) fvs -> MbLName ctx a -> ExSubFVs ctx fvs
remMbLName EmptyMC _ = ExSubFVs EmptyMC id
remMbLName (fvs :> MbLName fv) (MbLName n) =
case mbCmpName fv n of
Just _ -> exSubFVsWeaken $ remMbLName fvs (MbLName n)
Nothing -> exSubFVsCons (remMbLName fvs (MbLName n)) (MbLName fv)
-}
type FVList ctx fvs = MapCtx (MbLName ctx) fvs
-- unioning free variable contexts: the data structure
data FVUnionRet ctx fvs1 fvs2 where
FVUnionRet :: FVList ctx fvs -> SubCtx fvs1 fvs -> SubCtx fvs2 fvs ->
FVUnionRet ctx fvs1 fvs2
fvUnion :: FVList ctx fvs1 -> FVList ctx fvs2 ->
FVUnionRet ctx fvs1 fvs2
fvUnion EmptyMC EmptyMC =
FVUnionRet EmptyMC (\_ -> EmptyMC) (\_ -> EmptyMC)
fvUnion EmptyMC (fvs2 :> fv2) =
case fvUnion EmptyMC fvs2 of
FVUnionRet fvs f1 f2 ->
case elemMC fv2 fvs of
Nothing -> FVUnionRet (fvs :> fv2) (\(xs :> x) -> f1 xs) (\(xs :> x) -> f2 xs :> x)
Just idx -> FVUnionRet fvs f1 (\xs -> f2 xs :> ctxLookup idx xs)
fvUnion (fvs1 :> fv1) fvs2 =
case fvUnion fvs1 fvs2 of
FVUnionRet fvs f1 f2 ->
case elemMC fv1 fvs of
Nothing -> FVUnionRet (fvs :> fv1) (\(xs :> x) -> f1 xs :> x) (\(xs :> x) -> f2 xs)
Just idx -> FVUnionRet fvs (\xs -> f1 xs :> ctxLookup idx xs) f2
elemMC :: MbLName ctx a -> FVList ctx fvs -> Maybe (InCtx fvs a)
elemMC _ EmptyMC = Nothing
elemMC mbLN@(MbLName n) (mc :> MbLName n') =
case mbCmpName n n' of
Just Refl -> Just InCtxBase
Nothing -> fmap InCtxStep (elemMC mbLN mc)
------------------------------------------------------------
-- deBruijn terms, i.e., closed terms
------------------------------------------------------------
data IsLType a where IsLType :: IsLType (L a)
type LCtx ctx = MapCtx IsLType ctx
data MbLName ctx a where
MbLName :: Mb ctx (Name (L a)) -> MbLName ctx (L a)
fvsToLCtx :: FVList ctx lctx -> LCtx lctx
fvsToLCtx = ctxMap mbLNameToProof where
mbLNameToProof :: MbLName ctx a -> IsLType a
mbLNameToProof (MbLName _) = IsLType
data DBTerm ctx a where
DBWeaken :: SubCtx ctx1 ctx -> DBTerm ctx1 a -> DBTerm ctx a
DBVar :: InCtx ctx (L a) -> DBTerm ctx a
DBDVar :: Name (D a) -> DBTerm ctx a
DBApp :: DBTerm ctx (a -> b) -> DBTerm ctx a -> DBTerm ctx b
dbSubst :: DBTerm ctx a -> MapCtx Name ctx -> DTerm a
dbSubst (DBWeaken f db) names = dbSubst db $ f names
dbSubst (DBVar inCtx) names = Var $ ctxLookup inCtx names
dbSubst (DBDVar dVar) _ = DVar dVar
dbSubst (DBApp db1 db2) names =
App (dbSubst db1 names) (dbSubst db2 names)
-- applying a DBTerm to a context of names
dbAppMultiNames :: DBTerm fvs (AddArrows fvs a) -> FVList ctx fvs ->
DBTerm fvs a
dbAppMultiNames db args = dbAppMultiNamesH db args (ctxToInCtxs args)
dbAppMultiNamesH :: DBTerm fvs (AddArrows args a) ->
FVList ctx args -> MapCtx (InCtx fvs) args ->
DBTerm fvs a
dbAppMultiNamesH fun EmptyMC _ = fun
dbAppMultiNamesH fun (args :> MbLName _) (inCtxs :> inCtx) =
DBApp (dbAppMultiNamesH fun args inCtxs) (DBVar inCtx)
ctxToInCtxs :: MapCtx f ctx -> MapCtx (InCtx ctx) ctx
ctxToInCtxs EmptyMC = EmptyMC
ctxToInCtxs (ctx :> _) = ctxMap InCtxStep (ctxToInCtxs ctx) :> InCtxBase
------------------------------------------------------------
-- DBTerms combined with their free variables
------------------------------------------------------------
data FVDBTerm ctx lctx a where
FVDBTerm :: FVList ctx fvs -> DBTerm (fvs :++: lctx) a ->
FVDBTerm ctx lctx a
fvDBSepLVars :: MapCtx f lctx -> FVDBTerm (ctx :++: lctx) CtxNil a ->
FVDBTerm ctx lctx a
fvDBSepLVars lctx (FVDBTerm fvs db) =
case fvDBSepLVarsH lctx Tag fvs of
SepRet fvs' f -> FVDBTerm fvs' (DBWeaken f db)
data SepRet lctx ctx fvs where
SepRet :: FVList ctx fvs' -> SubCtx fvs (fvs' :++: lctx) ->
SepRet lctx ctx fvs
fvDBSepLVarsH :: MapCtx f lctx -> Tag ctx -> FVList (ctx :++: lctx) fvs ->
SepRet lctx ctx fvs
fvDBSepLVarsH _ _ EmptyMC = SepRet EmptyMC (\_ -> EmptyMC)
fvDBSepLVarsH lctx ctx (fvs :> fv@(MbLName n)) =
case fvDBSepLVarsH lctx ctx fvs of
SepRet m f ->
case raiseAppName (ctxAppendL ctx lctx) n of
Left idx -> SepRet m (\xs -> f xs :> ctxLookup (weakenInCtxL (ctxTag m) idx) xs)
Right n ->
SepRet (m :> MbLName n)
(\xs -> case mapCtxSplit (ctxAppendL (ctxConsTag (ctxTag m) fv) lctx) xs of
(fvs' :> fv', lctxs) -> f (ctxAppend fvs' lctxs) :> fv')
raiseAppName :: IsAppend ctx1 ctx2 ctx -> Mb ctx (Name a) ->
Either (InCtx ctx2 a) (Mb ctx1 (Name a))
raiseAppName isApp n =
case mbToplevel $(superComb [| mbNameBoundP |]) (separateMb isApp n) of
[nuQQ| Left inCtx |] -> Left $ mbInCtx inCtx
[nuQQ| Right n |] -> Right n
{-
lowerFVs :: FVList ctx fvs -> MapCtx (MbLName (CtxCons ctx a)) fvs
lowerFVs EmptyMC = EmptyMC
lowerFVs (fvs :> MbLName n) =
lowerFVs fvs :>
MbLName (combineMb $ mbToplevel $(superComb [| nu . const |]) n)
lowerMultiL :: MapCtx f ctx -> a -> Mb ctx a
lowerMultiL EmptyMC x = emptyMb x
lowerMultiL (ctx :> _) x = combineMb $ lowerMultiL ctx $ nu $ \_ -> x
mkFV :: MapCtx f ctx -> MbLName (CtxCons ctx (L a)) (L a)
mkFV ctx = MbLName $ combineMb $ lowerMultiL ctx (nu $ \n -> n)
mkFVs :: LCtx ctx -> LCtx ctx2 -> MapCtx (MbLName (ctx :++: ctx2)) ctx2
mkFVs ctx EmptyMC = EmptyMC
mkFVs ctx (ctx2 :> IsLType) =
lowerFVs (mkFVs ctx ctx2) :> (mkFV $ ctxAppend ctx ctx2)
raiseFVs :: Tag fvs -> LCtx lctx ->
MapCtx (MbLName (ctx :++: lctx)) (fvs :++: lctx) ->
FVList ctx fvs
raiseFVs = undefined
fvDBSepLVars :: LCtx ctx -> LCtx lctx -> FVDBTerm (ctx :++: lctx) CtxNil a ->
FVDBTerm ctx lctx a
fvDBSepLVars ctx lctx (FVDBTerm fvs db) =
undefined
-}
{-
helper1 lctx db $ fvUnion fvs $ mkFVs ctx lctx where
helper1 :: LCtx lctx -> DBTerm fvs a ->
FVUnionRet (ctx :++: lctx) fvs lctx ->
FVDBTerm ctx lctx a
helper1 lctx db (FVUnionRet fvs' sub1 sub2) =
FVDBTerm (raiseFVs tag lctx fvs') (DBWeaken sub1 db)
-}
------------------------------------------------------------
-- lambda-lifting, woo hoo!
------------------------------------------------------------
-- this cannot ever happen (there is no ctor for InCtx CtxNil a)
inCtxNil :: InCtx CtxNil a -> b
inCtxNil _ = undefined
dInLCtx :: LCtx ctx -> InCtx ctx (D a) -> b
dInLCtx EmptyMC inCtx = inCtxNil inCtx
dInLCtx (lctx :> IsLType) (InCtxStep inCtx) = dInLCtx lctx inCtx
type LLBodyRet b ctx a = Cont (Decls b) (FVDBTerm ctx CtxNil a)
felleisenC :: ((a -> Decls b) -> Decls b) -> Cont (Decls b) a
felleisenC f = ContT (\k -> Identity (f (runIdentity . k)))
llBody :: LCtx ctx -> Mb ctx (DTerm a) -> LLBodyRet b ctx a
llBody ctx [nuQQ| Var v |] =
return $ FVDBTerm (EmptyMC :> MbLName v) $ DBVar InCtxBase
llBody ctx [nuQQ| DVar d |] =
case mbNameBoundP d of
Right d -> return $ FVDBTerm EmptyMC $ DBDVar d
Left inCtx -> dInLCtx ctx inCtx
llBody ctx [nuQQ| App t1 t2 |] = do
FVDBTerm fvs1 db1 <- llBody ctx t1
FVDBTerm fvs2 db2 <- llBody ctx t2
FVUnionRet names sub1 sub2 <- return $ fvUnion fvs1 fvs2
return $ FVDBTerm names $ DBApp (DBWeaken sub1 db1) (DBWeaken sub2 db2)
llBody ctx lam @ [nuQQ| Lam _ |] = do
PeelRet lctx body <- return $ peelLambdas EmptyMC lam
llret <- llBody (ctxAppend ctx lctx) body
FVDBTerm fvs db <- return $ fvDBSepLVars lctx llret
felleisenC $ \k ->
DeclsCons (addLams (fvsToLCtx fvs) $ \names1 ->
addLams lctx $ \names2 ->
dbSubst db (ctxAppend names1 names2))
$ nu $ \d -> k $ FVDBTerm fvs (dbAppMultiNames (DBDVar d) fvs)
-- the top-level lambda-lifting function
lambdaLift :: DTerm a -> Decls a
lambdaLift t =
runCont (llBody EmptyMC (emptyMb t))
(\(FVDBTerm fvs db) ->
let none = ctxMap (\(MbLName mbn) -> elimEmptyMb mbn) fvs
in DeclsBase (dbSubst db none))
------------------------------------------------------------
-- lambda-lifting insde bindings
------------------------------------------------------------
mbLambdaLift :: Mb ctx (DTerm a) -> Mb ctx (Decls a)
mbLambdaLift = mbToplevel $(superComb [| lambdaLift |])
lambdaLiftDecls :: Decls a -> Decls a
lambdaLiftDecls (DeclsBase t) = lambdaLift t
lambdaLiftDecls (DeclsCons t rest) =
DeclsCons t $ mbToplevel $(superComb [| lambdaLiftDecls |]) rest
-- modules
data Module a where
Functor :: Binding (L a) (Module b) -> (Module b)
Module :: Decls a -> Module a
lambdaLiftModule :: Module a -> Module a
lambdaLiftModule (Module d) = Module $ lambdaLiftDecls d
lambdaLiftModule (Functor b) =
Functor $ mbToplevel $(superComb [| lambdaLiftModule |]) b
------------------------------------------------------------
-- examples
------------------------------------------------------------
ex1 = lam (\f -> (lam $ \x -> App f x))
res1 = lambdaLift ex1
ex2 = lam (\f1 -> App f1 (lam (\f2 -> lam (\x -> App f2 x))))
res2 = lambdaLift ex2
ex3 = lam (\x -> lam (\f1 -> App f1 (lam (\f2 -> lam (\y -> f2 `App` x `App` y)))))
res3 = lambdaLift ex3
ex4 = lam (\x -> lam (\f1 -> App f1 (lam (\f2 -> lam (\y -> f2 `App` (f1 `App` x `App` y))))))
res4 = lambdaLift ex4
ex5 = lam (\f1 -> lam $ \f2 -> App f1 (lam $ \x -> App f2 x))
res5 = lambdaLift ex5
-- lambda-lift with a free variable
ex6 = nu (\f -> App (Var f) (lam $ \x -> x))
res6 = mbToplevel $(superComb [| lambdaLift |]) ex6
|
eddywestbrook/hobbits
|
archival/LambdaLiftingDB.hs
|
Haskell
|
bsd-3-clause
| 14,594
|
module Print3Flipped where
myGreeting :: String
myGreeting = (++) "hello" " world"
hello::String
hello="hello"
world ::String
world = "world!"
main :: IO()
main = do
putStrLn myGreeting
putStrLn secondGreetiong
where secondGreetiong = (++) hello ((++) " " world)
|
dhaneshkk/haskell-programming
|
print3Flippedl.hs
|
Haskell
|
bsd-3-clause
| 298
|
{-
(c) The AQUA Project, Glasgow University, 1993-1998
\section[SimplUtils]{The simplifier utilities}
-}
{-# LANGUAGE CPP #-}
module SimplUtils (
-- Rebuilding
mkLam, mkCase, prepareAlts, tryEtaExpandRhs,
-- Inlining,
preInlineUnconditionally, postInlineUnconditionally,
activeUnfolding, activeRule,
getUnfoldingInRuleMatch,
simplEnvForGHCi, updModeForStableUnfoldings, updModeForRules,
-- The continuation type
SimplCont(..), DupFlag(..),
isSimplified,
contIsDupable, contResultType, contHoleType,
contIsTrivial, contArgs,
countArgs,
mkBoringStop, mkRhsStop, mkLazyArgStop, contIsRhsOrArg,
interestingCallContext,
-- ArgInfo
ArgInfo(..), ArgSpec(..), mkArgInfo,
addValArgTo, addCastTo, addTyArgTo,
argInfoExpr, argInfoAppArgs, pushSimplifiedArgs,
abstractFloats
) where
#include "HsVersions.h"
import SimplEnv
import CoreMonad ( SimplifierMode(..), Tick(..) )
import DynFlags
import CoreSyn
import qualified CoreSubst
import PprCore
import CoreFVs
import CoreUtils
import CoreArity
import CoreUnfold
import Name
import Id
import Var
import Demand
import SimplMonad
import Type hiding( substTy )
import Coercion hiding( substCo )
import DataCon ( dataConWorkId )
import VarEnv
import VarSet
import BasicTypes
import Util
import MonadUtils
import Outputable
import Pair
import Control.Monad ( when )
{-
************************************************************************
* *
The SimplCont and DupFlag types
* *
************************************************************************
A SimplCont allows the simplifier to traverse the expression in a
zipper-like fashion. The SimplCont represents the rest of the expression,
"above" the point of interest.
You can also think of a SimplCont as an "evaluation context", using
that term in the way it is used for operational semantics. This is the
way I usually think of it, For example you'll often see a syntax for
evaluation context looking like
C ::= [] | C e | case C of alts | C `cast` co
That's the kind of thing we are doing here, and I use that syntax in
the comments.
Key points:
* A SimplCont describes a *strict* context (just like
evaluation contexts do). E.g. Just [] is not a SimplCont
* A SimplCont describes a context that *does not* bind
any variables. E.g. \x. [] is not a SimplCont
-}
data SimplCont
= Stop -- An empty context, or <hole>
OutType -- Type of the <hole>
CallCtxt -- Tells if there is something interesting about
-- the context, and hence the inliner
-- should be a bit keener (see interestingCallContext)
-- Specifically:
-- This is an argument of a function that has RULES
-- Inlining the call might allow the rule to fire
-- Never ValAppCxt (use ApplyToVal instead)
-- or CaseCtxt (use Select instead)
| CastIt -- <hole> `cast` co
OutCoercion -- The coercion simplified
-- Invariant: never an identity coercion
SimplCont
| ApplyToVal { -- <hole> arg
sc_dup :: DupFlag, -- See Note [DupFlag invariants]
sc_arg :: InExpr, -- The argument,
sc_env :: StaticEnv, -- and its static env
sc_cont :: SimplCont }
| ApplyToTy { -- <hole> ty
sc_arg_ty :: OutType, -- Argument type
sc_hole_ty :: OutType, -- Type of the function, presumably (forall a. blah)
-- See Note [The hole type in ApplyToTy]
sc_cont :: SimplCont }
| Select { -- case <hole> of alts
sc_dup :: DupFlag, -- See Note [DupFlag invariants]
sc_bndr :: InId, -- case binder
sc_alts :: [InAlt], -- Alternatives
sc_env :: StaticEnv, -- and their static environment
sc_cont :: SimplCont }
-- The two strict forms have no DupFlag, because we never duplicate them
| StrictBind -- (\x* \xs. e) <hole>
InId [InBndr] -- let x* = <hole> in e
InExpr StaticEnv -- is a special case
SimplCont
| StrictArg -- f e1 ..en <hole>
ArgInfo -- Specifies f, e1..en, Whether f has rules, etc
-- plus strictness flags for *further* args
CallCtxt -- Whether *this* argument position is interesting
SimplCont
| TickIt
(Tickish Id) -- Tick tickish <hole>
SimplCont
data DupFlag = NoDup -- Unsimplified, might be big
| Simplified -- Simplified
| OkToDup -- Simplified and small
isSimplified :: DupFlag -> Bool
isSimplified NoDup = False
isSimplified _ = True -- Invariant: the subst-env is empty
perhapsSubstTy :: DupFlag -> StaticEnv -> Type -> Type
perhapsSubstTy dup env ty
| isSimplified dup = ty
| otherwise = substTy env ty
{-
Note [DupFlag invariants]
~~~~~~~~~~~~~~~~~~~~~~~~~
In both (ApplyToVal dup _ env k)
and (Select dup _ _ env k)
the following invariants hold
(a) if dup = OkToDup, then continuation k is also ok-to-dup
(b) if dup = OkToDup or Simplified, the subst-env is empty
(and and hence no need to re-simplify)
-}
instance Outputable DupFlag where
ppr OkToDup = text "ok"
ppr NoDup = text "nodup"
ppr Simplified = text "simpl"
instance Outputable SimplCont where
ppr (Stop ty interesting) = text "Stop" <> brackets (ppr interesting) <+> ppr ty
ppr (CastIt co cont ) = (text "CastIt" <+> ppr co) $$ ppr cont
ppr (TickIt t cont) = (text "TickIt" <+> ppr t) $$ ppr cont
ppr (ApplyToTy { sc_arg_ty = ty, sc_cont = cont })
= (text "ApplyToTy" <+> pprParendType ty) $$ ppr cont
ppr (ApplyToVal { sc_arg = arg, sc_dup = dup, sc_cont = cont })
= (text "ApplyToVal" <+> ppr dup <+> pprParendExpr arg)
$$ ppr cont
ppr (StrictBind b _ _ _ cont) = (text "StrictBind" <+> ppr b) $$ ppr cont
ppr (StrictArg ai _ cont) = (text "StrictArg" <+> ppr (ai_fun ai)) $$ ppr cont
ppr (Select { sc_dup = dup, sc_bndr = bndr, sc_alts = alts, sc_env = se, sc_cont = cont })
= (text "Select" <+> ppr dup <+> ppr bndr) $$
ifPprDebug (nest 2 $ vcat [ppr (seTvSubst se), ppr alts]) $$ ppr cont
{- Note [The hole type in ApplyToTy]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The sc_hole_ty field of ApplyToTy records the type of the "hole" in the
continuation. It is absolutely necessary to compute contHoleType, but it is
not used for anything else (and hence may not be evaluated).
Why is it necessary for contHoleType? Consider the continuation
ApplyToType Int (Stop Int)
corresponding to
(<hole> @Int) :: Int
What is the type of <hole>? It could be (forall a. Int) or (forall a. a),
and there is no way to know which, so we must record it.
In a chain of applications (f @t1 @t2 @t3) we'll lazily compute exprType
for (f @t1) and (f @t1 @t2), which is potentially non-linear; but it probably
doesn't matter because we'll never compute them all.
************************************************************************
* *
ArgInfo and ArgSpec
* *
************************************************************************
-}
data ArgInfo
= ArgInfo {
ai_fun :: OutId, -- The function
ai_args :: [ArgSpec], -- ...applied to these args (which are in *reverse* order)
ai_type :: OutType, -- Type of (f a1 ... an)
ai_rules :: [CoreRule], -- Rules for this function
ai_encl :: Bool, -- Flag saying whether this function
-- or an enclosing one has rules (recursively)
-- True => be keener to inline in all args
ai_strs :: [Bool], -- Strictness of remaining arguments
-- Usually infinite, but if it is finite it guarantees
-- that the function diverges after being given
-- that number of args
ai_discs :: [Int] -- Discounts for remaining arguments; non-zero => be keener to inline
-- Always infinite
}
data ArgSpec
= ValArg OutExpr -- Apply to this (coercion or value); c.f. ApplyToVal
| TyArg { as_arg_ty :: OutType -- Apply to this type; c.f. ApplyToTy
, as_hole_ty :: OutType } -- Type of the function (presumably forall a. blah)
| CastBy OutCoercion -- Cast by this; c.f. CastIt
instance Outputable ArgSpec where
ppr (ValArg e) = text "ValArg" <+> ppr e
ppr (TyArg { as_arg_ty = ty }) = text "TyArg" <+> ppr ty
ppr (CastBy c) = text "CastBy" <+> ppr c
addValArgTo :: ArgInfo -> OutExpr -> ArgInfo
addValArgTo ai arg = ai { ai_args = ValArg arg : ai_args ai
, ai_type = applyTypeToArg (ai_type ai) arg }
addTyArgTo :: ArgInfo -> OutType -> ArgInfo
addTyArgTo ai arg_ty = ai { ai_args = arg_spec : ai_args ai
, ai_type = piResultTy poly_fun_ty arg_ty }
where
poly_fun_ty = ai_type ai
arg_spec = TyArg { as_arg_ty = arg_ty, as_hole_ty = poly_fun_ty }
addCastTo :: ArgInfo -> OutCoercion -> ArgInfo
addCastTo ai co = ai { ai_args = CastBy co : ai_args ai
, ai_type = pSnd (coercionKind co) }
argInfoAppArgs :: [ArgSpec] -> [OutExpr]
argInfoAppArgs [] = []
argInfoAppArgs (CastBy {} : _) = [] -- Stop at a cast
argInfoAppArgs (ValArg e : as) = e : argInfoAppArgs as
argInfoAppArgs (TyArg { as_arg_ty = ty } : as) = Type ty : argInfoAppArgs as
pushSimplifiedArgs :: SimplEnv -> [ArgSpec] -> SimplCont -> SimplCont
pushSimplifiedArgs _env [] k = k
pushSimplifiedArgs env (arg : args) k
= case arg of
TyArg { as_arg_ty = arg_ty, as_hole_ty = hole_ty }
-> ApplyToTy { sc_arg_ty = arg_ty, sc_hole_ty = hole_ty, sc_cont = rest }
ValArg e -> ApplyToVal { sc_arg = e, sc_env = env, sc_dup = Simplified, sc_cont = rest }
CastBy c -> CastIt c rest
where
rest = pushSimplifiedArgs env args k
-- The env has an empty SubstEnv
argInfoExpr :: OutId -> [ArgSpec] -> OutExpr
-- NB: the [ArgSpec] is reversed so that the first arg
-- in the list is the last one in the application
argInfoExpr fun rev_args
= go rev_args
where
go [] = Var fun
go (ValArg a : as) = go as `App` a
go (TyArg { as_arg_ty = ty } : as) = go as `App` Type ty
go (CastBy co : as) = mkCast (go as) co
{-
************************************************************************
* *
Functions on SimplCont
* *
************************************************************************
-}
mkBoringStop :: OutType -> SimplCont
mkBoringStop ty = Stop ty BoringCtxt
mkRhsStop :: OutType -> SimplCont -- See Note [RHS of lets] in CoreUnfold
mkRhsStop ty = Stop ty RhsCtxt
mkLazyArgStop :: OutType -> CallCtxt -> SimplCont
mkLazyArgStop ty cci = Stop ty cci
-------------------
contIsRhsOrArg :: SimplCont -> Bool
contIsRhsOrArg (Stop {}) = True
contIsRhsOrArg (StrictBind {}) = True
contIsRhsOrArg (StrictArg {}) = True
contIsRhsOrArg _ = False
contIsRhs :: SimplCont -> Bool
contIsRhs (Stop _ RhsCtxt) = True
contIsRhs _ = False
-------------------
contIsDupable :: SimplCont -> Bool
contIsDupable (Stop {}) = True
contIsDupable (ApplyToTy { sc_cont = k }) = contIsDupable k
contIsDupable (ApplyToVal { sc_dup = OkToDup }) = True -- See Note [DupFlag invariants]
contIsDupable (Select { sc_dup = OkToDup }) = True -- ...ditto...
contIsDupable (CastIt _ k) = contIsDupable k
contIsDupable _ = False
-------------------
contIsTrivial :: SimplCont -> Bool
contIsTrivial (Stop {}) = True
contIsTrivial (ApplyToTy { sc_cont = k }) = contIsTrivial k
contIsTrivial (ApplyToVal { sc_arg = Coercion _, sc_cont = k }) = contIsTrivial k
contIsTrivial (CastIt _ k) = contIsTrivial k
contIsTrivial _ = False
-------------------
contResultType :: SimplCont -> OutType
contResultType (Stop ty _) = ty
contResultType (CastIt _ k) = contResultType k
contResultType (StrictBind _ _ _ _ k) = contResultType k
contResultType (StrictArg _ _ k) = contResultType k
contResultType (Select { sc_cont = k }) = contResultType k
contResultType (ApplyToTy { sc_cont = k }) = contResultType k
contResultType (ApplyToVal { sc_cont = k }) = contResultType k
contResultType (TickIt _ k) = contResultType k
contHoleType :: SimplCont -> OutType
contHoleType (Stop ty _) = ty
contHoleType (TickIt _ k) = contHoleType k
contHoleType (CastIt co _) = pFst (coercionKind co)
contHoleType (StrictBind b _ _ se _) = substTy se (idType b)
contHoleType (StrictArg ai _ _) = funArgTy (ai_type ai)
contHoleType (ApplyToTy { sc_hole_ty = ty }) = ty -- See Note [The hole type in ApplyToTy]
contHoleType (ApplyToVal { sc_arg = e, sc_env = se, sc_dup = dup, sc_cont = k })
= mkFunTy (perhapsSubstTy dup se (exprType e))
(contHoleType k)
contHoleType (Select { sc_dup = d, sc_bndr = b, sc_env = se })
= perhapsSubstTy d se (idType b)
-------------------
countArgs :: SimplCont -> Int
-- Count all arguments, including types, coercions, and other values
countArgs (ApplyToTy { sc_cont = cont }) = 1 + countArgs cont
countArgs (ApplyToVal { sc_cont = cont }) = 1 + countArgs cont
countArgs _ = 0
contArgs :: SimplCont -> (Bool, [ArgSummary], SimplCont)
-- Summarises value args, discards type args and coercions
-- The returned continuation of the call is only used to
-- answer questions like "are you interesting?"
contArgs cont
| lone cont = (True, [], cont)
| otherwise = go [] cont
where
lone (ApplyToTy {}) = False -- See Note [Lone variables] in CoreUnfold
lone (ApplyToVal {}) = False
lone (CastIt {}) = False
lone _ = True
go args (ApplyToVal { sc_arg = arg, sc_env = se, sc_cont = k })
= go (is_interesting arg se : args) k
go args (ApplyToTy { sc_cont = k }) = go args k
go args (CastIt _ k) = go args k
go args k = (False, reverse args, k)
is_interesting arg se = interestingArg se arg
-- Do *not* use short-cutting substitution here
-- because we want to get as much IdInfo as possible
-------------------
mkArgInfo :: Id
-> [CoreRule] -- Rules for function
-> Int -- Number of value args
-> SimplCont -- Context of the call
-> ArgInfo
mkArgInfo fun rules n_val_args call_cont
| n_val_args < idArity fun -- Note [Unsaturated functions]
= ArgInfo { ai_fun = fun, ai_args = [], ai_type = fun_ty
, ai_rules = rules, ai_encl = False
, ai_strs = vanilla_stricts
, ai_discs = vanilla_discounts }
| otherwise
= ArgInfo { ai_fun = fun, ai_args = [], ai_type = fun_ty
, ai_rules = rules
, ai_encl = interestingArgContext rules call_cont
, ai_strs = add_type_str fun_ty arg_stricts
, ai_discs = arg_discounts }
where
fun_ty = idType fun
vanilla_discounts, arg_discounts :: [Int]
vanilla_discounts = repeat 0
arg_discounts = case idUnfolding fun of
CoreUnfolding {uf_guidance = UnfIfGoodArgs {ug_args = discounts}}
-> discounts ++ vanilla_discounts
_ -> vanilla_discounts
vanilla_stricts, arg_stricts :: [Bool]
vanilla_stricts = repeat False
arg_stricts
= case splitStrictSig (idStrictness fun) of
(demands, result_info)
| not (demands `lengthExceeds` n_val_args)
-> -- Enough args, use the strictness given.
-- For bottoming functions we used to pretend that the arg
-- is lazy, so that we don't treat the arg as an
-- interesting context. This avoids substituting
-- top-level bindings for (say) strings into
-- calls to error. But now we are more careful about
-- inlining lone variables, so its ok (see SimplUtils.analyseCont)
if isBotRes result_info then
map isStrictDmd demands -- Finite => result is bottom
else
map isStrictDmd demands ++ vanilla_stricts
| otherwise
-> WARN( True, text "More demands than arity" <+> ppr fun <+> ppr (idArity fun)
<+> ppr n_val_args <+> ppr demands )
vanilla_stricts -- Not enough args, or no strictness
add_type_str :: Type -> [Bool] -> [Bool]
-- If the function arg types are strict, record that in the 'strictness bits'
-- No need to instantiate because unboxed types (which dominate the strict
-- types) can't instantiate type variables.
-- add_type_str is done repeatedly (for each call); might be better
-- once-for-all in the function
-- But beware primops/datacons with no strictness
add_type_str _ [] = []
add_type_str fun_ty strs -- Look through foralls
| Just (_, fun_ty') <- splitForAllTy_maybe fun_ty -- Includes coercions
= add_type_str fun_ty' strs
add_type_str fun_ty (str:strs) -- Add strict-type info
| Just (arg_ty, fun_ty') <- splitFunTy_maybe fun_ty
= (str || isStrictType arg_ty) : add_type_str fun_ty' strs
add_type_str _ strs
= strs
{- Note [Unsaturated functions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider (test eyeball/inline4)
x = a:as
y = f x
where f has arity 2. Then we do not want to inline 'x', because
it'll just be floated out again. Even if f has lots of discounts
on its first argument -- it must be saturated for these to kick in
-}
{-
************************************************************************
* *
Interesting arguments
* *
************************************************************************
Note [Interesting call context]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We want to avoid inlining an expression where there can't possibly be
any gain, such as in an argument position. Hence, if the continuation
is interesting (eg. a case scrutinee, application etc.) then we
inline, otherwise we don't.
Previously some_benefit used to return True only if the variable was
applied to some value arguments. This didn't work:
let x = _coerce_ (T Int) Int (I# 3) in
case _coerce_ Int (T Int) x of
I# y -> ....
we want to inline x, but can't see that it's a constructor in a case
scrutinee position, and some_benefit is False.
Another example:
dMonadST = _/\_ t -> :Monad (g1 _@_ t, g2 _@_ t, g3 _@_ t)
.... case dMonadST _@_ x0 of (a,b,c) -> ....
we'd really like to inline dMonadST here, but we *don't* want to
inline if the case expression is just
case x of y { DEFAULT -> ... }
since we can just eliminate this case instead (x is in WHNF). Similar
applies when x is bound to a lambda expression. Hence
contIsInteresting looks for case expressions with just a single
default case.
-}
interestingCallContext :: SimplCont -> CallCtxt
-- See Note [Interesting call context]
interestingCallContext cont
= interesting cont
where
interesting (Select {}) = CaseCtxt
interesting (ApplyToVal {}) = ValAppCtxt
-- Can happen if we have (f Int |> co) y
-- If f has an INLINE prag we need to give it some
-- motivation to inline. See Note [Cast then apply]
-- in CoreUnfold
interesting (StrictArg _ cci _) = cci
interesting (StrictBind {}) = BoringCtxt
interesting (Stop _ cci) = cci
interesting (TickIt _ k) = interesting k
interesting (ApplyToTy { sc_cont = k }) = interesting k
interesting (CastIt _ k) = interesting k
-- If this call is the arg of a strict function, the context
-- is a bit interesting. If we inline here, we may get useful
-- evaluation information to avoid repeated evals: e.g.
-- x + (y * z)
-- Here the contIsInteresting makes the '*' keener to inline,
-- which in turn exposes a constructor which makes the '+' inline.
-- Assuming that +,* aren't small enough to inline regardless.
--
-- It's also very important to inline in a strict context for things
-- like
-- foldr k z (f x)
-- Here, the context of (f x) is strict, and if f's unfolding is
-- a build it's *great* to inline it here. So we must ensure that
-- the context for (f x) is not totally uninteresting.
interestingArgContext :: [CoreRule] -> SimplCont -> Bool
-- If the argument has form (f x y), where x,y are boring,
-- and f is marked INLINE, then we don't want to inline f.
-- But if the context of the argument is
-- g (f x y)
-- where g has rules, then we *do* want to inline f, in case it
-- exposes a rule that might fire. Similarly, if the context is
-- h (g (f x x))
-- where h has rules, then we do want to inline f; hence the
-- call_cont argument to interestingArgContext
--
-- The ai-rules flag makes this happen; if it's
-- set, the inliner gets just enough keener to inline f
-- regardless of how boring f's arguments are, if it's marked INLINE
--
-- The alternative would be to *always* inline an INLINE function,
-- regardless of how boring its context is; but that seems overkill
-- For example, it'd mean that wrapper functions were always inlined
--
-- The call_cont passed to interestingArgContext is the context of
-- the call itself, e.g. g <hole> in the example above
interestingArgContext rules call_cont
= notNull rules || enclosing_fn_has_rules
where
enclosing_fn_has_rules = go call_cont
go (Select {}) = False
go (ApplyToVal {}) = False -- Shouldn't really happen
go (ApplyToTy {}) = False -- Ditto
go (StrictArg _ cci _) = interesting cci
go (StrictBind {}) = False -- ??
go (CastIt _ c) = go c
go (Stop _ cci) = interesting cci
go (TickIt _ c) = go c
interesting RuleArgCtxt = True
interesting _ = False
{- Note [Interesting arguments]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
An argument is interesting if it deserves a discount for unfoldings
with a discount in that argument position. The idea is to avoid
unfolding a function that is applied only to variables that have no
unfolding (i.e. they are probably lambda bound): f x y z There is
little point in inlining f here.
Generally, *values* (like (C a b) and (\x.e)) deserve discounts. But
we must look through lets, eg (let x = e in C a b), because the let will
float, exposing the value, if we inline. That makes it different to
exprIsHNF.
Before 2009 we said it was interesting if the argument had *any* structure
at all; i.e. (hasSomeUnfolding v). But does too much inlining; see Trac #3016.
But we don't regard (f x y) as interesting, unless f is unsaturated.
If it's saturated and f hasn't inlined, then it's probably not going
to now!
Note [Conlike is interesting]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider
f d = ...((*) d x y)...
... f (df d')...
where df is con-like. Then we'd really like to inline 'f' so that the
rule for (*) (df d) can fire. To do this
a) we give a discount for being an argument of a class-op (eg (*) d)
b) we say that a con-like argument (eg (df d)) is interesting
-}
interestingArg :: SimplEnv -> CoreExpr -> ArgSummary
-- See Note [Interesting arguments]
interestingArg env e = go env 0 e
where
-- n is # value args to which the expression is applied
go env n (Var v)
| SimplEnv { seIdSubst = ids, seInScope = in_scope } <- env
= case lookupVarEnv ids v of
Nothing -> go_var n (refineFromInScope in_scope v)
Just (DoneId v') -> go_var n (refineFromInScope in_scope v')
Just (DoneEx e) -> go (zapSubstEnv env) n e
Just (ContEx tvs cvs ids e) -> go (setSubstEnv env tvs cvs ids) n e
go _ _ (Lit {}) = ValueArg
go _ _ (Type _) = TrivArg
go _ _ (Coercion _) = TrivArg
go env n (App fn (Type _)) = go env n fn
go env n (App fn _) = go env (n+1) fn
go env n (Tick _ a) = go env n a
go env n (Cast e _) = go env n e
go env n (Lam v e)
| isTyVar v = go env n e
| n>0 = NonTrivArg -- (\x.b) e is NonTriv
| otherwise = ValueArg
go _ _ (Case {}) = NonTrivArg
go env n (Let b e) = case go env' n e of
ValueArg -> ValueArg
_ -> NonTrivArg
where
env' = env `addNewInScopeIds` bindersOf b
go_var n v
| isConLikeId v = ValueArg -- Experimenting with 'conlike' rather that
-- data constructors here
| idArity v > n = ValueArg -- Catches (eg) primops with arity but no unfolding
| n > 0 = NonTrivArg -- Saturated or unknown call
| conlike_unfolding = ValueArg -- n==0; look for an interesting unfolding
-- See Note [Conlike is interesting]
| otherwise = TrivArg -- n==0, no useful unfolding
where
conlike_unfolding = isConLikeUnfolding (idUnfolding v)
{-
************************************************************************
* *
SimplifierMode
* *
************************************************************************
The SimplifierMode controls several switches; see its definition in
CoreMonad
sm_rules :: Bool -- Whether RULES are enabled
sm_inline :: Bool -- Whether inlining is enabled
sm_case_case :: Bool -- Whether case-of-case is enabled
sm_eta_expand :: Bool -- Whether eta-expansion is enabled
-}
simplEnvForGHCi :: DynFlags -> SimplEnv
simplEnvForGHCi dflags
= mkSimplEnv $ SimplMode { sm_names = ["GHCi"]
, sm_phase = InitialPhase
, sm_rules = rules_on
, sm_inline = False
, sm_eta_expand = eta_expand_on
, sm_case_case = True }
where
rules_on = gopt Opt_EnableRewriteRules dflags
eta_expand_on = gopt Opt_DoLambdaEtaExpansion dflags
-- Do not do any inlining, in case we expose some unboxed
-- tuple stuff that confuses the bytecode interpreter
updModeForStableUnfoldings :: Activation -> SimplifierMode -> SimplifierMode
-- See Note [Simplifying inside stable unfoldings]
updModeForStableUnfoldings inline_rule_act current_mode
= current_mode { sm_phase = phaseFromActivation inline_rule_act
, sm_inline = True
, sm_eta_expand = False }
-- For sm_rules, just inherit; sm_rules might be "off"
-- because of -fno-enable-rewrite-rules
where
phaseFromActivation (ActiveAfter _ n) = Phase n
phaseFromActivation _ = InitialPhase
updModeForRules :: SimplifierMode -> SimplifierMode
-- See Note [Simplifying rules]
updModeForRules current_mode
= current_mode { sm_phase = InitialPhase
, sm_inline = False
, sm_rules = False
, sm_eta_expand = False }
{- Note [Simplifying rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
When simplifying a rule, refrain from any inlining or applying of other RULES.
Doing anything to the LHS is plain confusing, because it means that what the
rule matches is not what the user wrote. c.f. Trac #10595, and #10528.
Moreover, inlining (or applying rules) on rule LHSs risks introducing
Ticks into the LHS, which makes matching trickier. Trac #10665, #10745.
Doing this to either side confounds tools like HERMIT, which seek to reason
about and apply the RULES as originally written. See Trac #10829.
Note [Inlining in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Something is inlined if
(i) the sm_inline flag is on, AND
(ii) the thing has an INLINE pragma, AND
(iii) the thing is inlinable in the earliest phase.
Example of why (iii) is important:
{-# INLINE [~1] g #-}
g = ...
{-# INLINE f #-}
f x = g (g x)
If we were to inline g into f's inlining, then an importing module would
never be able to do
f e --> g (g e) ---> RULE fires
because the stable unfolding for f has had g inlined into it.
On the other hand, it is bad not to do ANY inlining into an
stable unfolding, because then recursive knots in instance declarations
don't get unravelled.
However, *sometimes* SimplGently must do no call-site inlining at all
(hence sm_inline = False). Before full laziness we must be careful
not to inline wrappers, because doing so inhibits floating
e.g. ...(case f x of ...)...
==> ...(case (case x of I# x# -> fw x#) of ...)...
==> ...(case x of I# x# -> case fw x# of ...)...
and now the redex (f x) isn't floatable any more.
The no-inlining thing is also important for Template Haskell. You might be
compiling in one-shot mode with -O2; but when TH compiles a splice before
running it, we don't want to use -O2. Indeed, we don't want to inline
anything, because the byte-code interpreter might get confused about
unboxed tuples and suchlike.
Note [Simplifying inside stable unfoldings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We must take care with simplification inside stable unfoldings (which come from
INLINE pragmas).
First, consider the following example
let f = \pq -> BIG
in
let g = \y -> f y y
{-# INLINE g #-}
in ...g...g...g...g...g...
Now, if that's the ONLY occurrence of f, it might be inlined inside g,
and thence copied multiple times when g is inlined. HENCE we treat
any occurrence in a stable unfolding as a multiple occurrence, not a single
one; see OccurAnal.addRuleUsage.
Second, we do want *do* to some modest rules/inlining stuff in stable
unfoldings, partly to eliminate senseless crap, and partly to break
the recursive knots generated by instance declarations.
However, suppose we have
{-# INLINE <act> f #-}
f = <rhs>
meaning "inline f in phases p where activation <act>(p) holds".
Then what inlinings/rules can we apply to the copy of <rhs> captured in
f's stable unfolding? Our model is that literally <rhs> is substituted for
f when it is inlined. So our conservative plan (implemented by
updModeForStableUnfoldings) is this:
-------------------------------------------------------------
When simplifying the RHS of an stable unfolding, set the phase
to the phase in which the stable unfolding first becomes active
-------------------------------------------------------------
That ensures that
a) Rules/inlinings that *cease* being active before p will
not apply to the stable unfolding, consistent with it being
inlined in its *original* form in phase p.
b) Rules/inlinings that only become active *after* p will
not apply to the stable unfolding, again to be consistent with
inlining the *original* rhs in phase p.
For example,
{-# INLINE f #-}
f x = ...g...
{-# NOINLINE [1] g #-}
g y = ...
{-# RULE h g = ... #-}
Here we must not inline g into f's RHS, even when we get to phase 0,
because when f is later inlined into some other module we want the
rule for h to fire.
Similarly, consider
{-# INLINE f #-}
f x = ...g...
g y = ...
and suppose that there are auto-generated specialisations and a strictness
wrapper for g. The specialisations get activation AlwaysActive, and the
strictness wrapper get activation (ActiveAfter 0). So the strictness
wrepper fails the test and won't be inlined into f's stable unfolding. That
means f can inline, expose the specialised call to g, so the specialisation
rules can fire.
A note about wrappers
~~~~~~~~~~~~~~~~~~~~~
It's also important not to inline a worker back into a wrapper.
A wrapper looks like
wraper = inline_me (\x -> ...worker... )
Normally, the inline_me prevents the worker getting inlined into
the wrapper (initially, the worker's only call site!). But,
if the wrapper is sure to be called, the strictness analyser will
mark it 'demanded', so when the RHS is simplified, it'll get an ArgOf
continuation.
-}
activeUnfolding :: SimplEnv -> Id -> Bool
activeUnfolding env
| not (sm_inline mode) = active_unfolding_minimal
| otherwise = case sm_phase mode of
InitialPhase -> active_unfolding_gentle
Phase n -> active_unfolding n
where
mode = getMode env
getUnfoldingInRuleMatch :: SimplEnv -> InScopeEnv
-- When matching in RULE, we want to "look through" an unfolding
-- (to see a constructor) if *rules* are on, even if *inlinings*
-- are not. A notable example is DFuns, which really we want to
-- match in rules like (op dfun) in gentle mode. Another example
-- is 'otherwise' which we want exprIsConApp_maybe to be able to
-- see very early on
getUnfoldingInRuleMatch env
= (in_scope, id_unf)
where
in_scope = seInScope env
mode = getMode env
id_unf id | unf_is_active id = idUnfolding id
| otherwise = NoUnfolding
unf_is_active id
| not (sm_rules mode) = active_unfolding_minimal id
| otherwise = isActive (sm_phase mode) (idInlineActivation id)
active_unfolding_minimal :: Id -> Bool
-- Compuslory unfoldings only
-- Ignore SimplGently, because we want to inline regardless;
-- the Id has no top-level binding at all
--
-- NB: we used to have a second exception, for data con wrappers.
-- On the grounds that we use gentle mode for rule LHSs, and
-- they match better when data con wrappers are inlined.
-- But that only really applies to the trivial wrappers (like (:)),
-- and they are now constructed as Compulsory unfoldings (in MkId)
-- so they'll happen anyway.
active_unfolding_minimal id = isCompulsoryUnfolding (realIdUnfolding id)
active_unfolding :: PhaseNum -> Id -> Bool
active_unfolding n id = isActiveIn n (idInlineActivation id)
active_unfolding_gentle :: Id -> Bool
-- Anything that is early-active
-- See Note [Gentle mode]
active_unfolding_gentle id
= isInlinePragma prag
&& isEarlyActive (inlinePragmaActivation prag)
-- NB: wrappers are not early-active
where
prag = idInlinePragma id
----------------------
activeRule :: SimplEnv -> Activation -> Bool
-- Nothing => No rules at all
activeRule env
| not (sm_rules mode) = \_ -> False -- Rewriting is off
| otherwise = isActive (sm_phase mode)
where
mode = getMode env
{-
************************************************************************
* *
preInlineUnconditionally
* *
************************************************************************
preInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~
@preInlineUnconditionally@ examines a bndr to see if it is used just
once in a completely safe way, so that it is safe to discard the
binding inline its RHS at the (unique) usage site, REGARDLESS of how
big the RHS might be. If this is the case we don't simplify the RHS
first, but just inline it un-simplified.
This is much better than first simplifying a perhaps-huge RHS and then
inlining and re-simplifying it. Indeed, it can be at least quadratically
better. Consider
x1 = e1
x2 = e2[x1]
x3 = e3[x2]
...etc...
xN = eN[xN-1]
We may end up simplifying e1 N times, e2 N-1 times, e3 N-3 times etc.
This can happen with cascades of functions too:
f1 = \x1.e1
f2 = \xs.e2[f1]
f3 = \xs.e3[f3]
...etc...
THE MAIN INVARIANT is this:
---- preInlineUnconditionally invariant -----
IF preInlineUnconditionally chooses to inline x = <rhs>
THEN doing the inlining should not change the occurrence
info for the free vars of <rhs>
----------------------------------------------
For example, it's tempting to look at trivial binding like
x = y
and inline it unconditionally. But suppose x is used many times,
but this is the unique occurrence of y. Then inlining x would change
y's occurrence info, which breaks the invariant. It matters: y
might have a BIG rhs, which will now be dup'd at every occurrenc of x.
Even RHSs labelled InlineMe aren't caught here, because there might be
no benefit from inlining at the call site.
[Sept 01] Don't unconditionally inline a top-level thing, because that
can simply make a static thing into something built dynamically. E.g.
x = (a,b)
main = \s -> h x
[Remember that we treat \s as a one-shot lambda.] No point in
inlining x unless there is something interesting about the call site.
But watch out: if you aren't careful, some useful foldr/build fusion
can be lost (most notably in spectral/hartel/parstof) because the
foldr didn't see the build. Doing the dynamic allocation isn't a big
deal, in fact, but losing the fusion can be. But the right thing here
seems to be to do a callSiteInline based on the fact that there is
something interesting about the call site (it's strict). Hmm. That
seems a bit fragile.
Conclusion: inline top level things gaily until Phase 0 (the last
phase), at which point don't.
Note [pre/postInlineUnconditionally in gentle mode]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Even in gentle mode we want to do preInlineUnconditionally. The
reason is that too little clean-up happens if you don't inline
use-once things. Also a bit of inlining is *good* for full laziness;
it can expose constant sub-expressions. Example in
spectral/mandel/Mandel.hs, where the mandelset function gets a useful
let-float if you inline windowToViewport
However, as usual for Gentle mode, do not inline things that are
inactive in the intial stages. See Note [Gentle mode].
Note [Stable unfoldings and preInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Surprisingly, do not pre-inline-unconditionally Ids with INLINE pragmas!
Example
{-# INLINE f #-}
f :: Eq a => a -> a
f x = ...
fInt :: Int -> Int
fInt = f Int dEqInt
...fInt...fInt...fInt...
Here f occurs just once, in the RHS of fInt. But if we inline it there
we'll lose the opportunity to inline at each of fInt's call sites.
The INLINE pragma will only inline when the application is saturated
for exactly this reason; and we don't want PreInlineUnconditionally
to second-guess it. A live example is Trac #3736.
c.f. Note [Stable unfoldings and postInlineUnconditionally]
Note [Top-level bottoming Ids]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Don't inline top-level Ids that are bottoming, even if they are used just
once, because FloatOut has gone to some trouble to extract them out.
Inlining them won't make the program run faster!
Note [Do not inline CoVars unconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Coercion variables appear inside coercions, and the RHS of a let-binding
is a term (not a coercion) so we can't necessarily inline the latter in
the former.
-}
preInlineUnconditionally :: DynFlags -> SimplEnv -> TopLevelFlag -> InId -> InExpr -> Bool
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
-- Reason: we don't want to inline single uses, or discard dead bindings,
-- for unlifted, side-effect-ful bindings
preInlineUnconditionally dflags env top_lvl bndr rhs
| not active = False
| isStableUnfolding (idUnfolding bndr) = False -- Note [Stable unfoldings and preInlineUnconditionally]
| isTopLevel top_lvl && isBottomingId bndr = False -- Note [Top-level bottoming Ids]
| not (gopt Opt_SimplPreInlining dflags) = False
| isCoVar bndr = False -- Note [Do not inline CoVars unconditionally]
| otherwise = case idOccInfo bndr of
IAmDead -> True -- Happens in ((\x.1) v)
OneOcc in_lam True int_cxt -> try_once in_lam int_cxt
_ -> False
where
mode = getMode env
active = isActive (sm_phase mode) act
-- See Note [pre/postInlineUnconditionally in gentle mode]
act = idInlineActivation bndr
try_once in_lam int_cxt -- There's one textual occurrence
| not in_lam = isNotTopLevel top_lvl || early_phase
| otherwise = int_cxt && canInlineInLam rhs
-- Be very careful before inlining inside a lambda, because (a) we must not
-- invalidate occurrence information, and (b) we want to avoid pushing a
-- single allocation (here) into multiple allocations (inside lambda).
-- Inlining a *function* with a single *saturated* call would be ok, mind you.
-- || (if is_cheap && not (canInlineInLam rhs) then pprTrace "preinline" (ppr bndr <+> ppr rhs) ok else ok)
-- where
-- is_cheap = exprIsCheap rhs
-- ok = is_cheap && int_cxt
-- int_cxt The context isn't totally boring
-- E.g. let f = \ab.BIG in \y. map f xs
-- Don't want to substitute for f, because then we allocate
-- its closure every time the \y is called
-- But: let f = \ab.BIG in \y. map (f y) xs
-- Now we do want to substitute for f, even though it's not
-- saturated, because we're going to allocate a closure for
-- (f y) every time round the loop anyhow.
-- canInlineInLam => free vars of rhs are (Once in_lam) or Many,
-- so substituting rhs inside a lambda doesn't change the occ info.
-- Sadly, not quite the same as exprIsHNF.
canInlineInLam (Lit _) = True
canInlineInLam (Lam b e) = isRuntimeVar b || canInlineInLam e
canInlineInLam (Tick t e) = not (tickishIsCode t) && canInlineInLam e
canInlineInLam _ = False
-- not ticks. Counting ticks cannot be duplicated, and non-counting
-- ticks around a Lam will disappear anyway.
early_phase = case sm_phase mode of
Phase 0 -> False
_ -> True
-- If we don't have this early_phase test, consider
-- x = length [1,2,3]
-- The full laziness pass carefully floats all the cons cells to
-- top level, and preInlineUnconditionally floats them all back in.
-- Result is (a) static allocation replaced by dynamic allocation
-- (b) many simplifier iterations because this tickles
-- a related problem; only one inlining per pass
--
-- On the other hand, I have seen cases where top-level fusion is
-- lost if we don't inline top level thing (e.g. string constants)
-- Hence the test for phase zero (which is the phase for all the final
-- simplifications). Until phase zero we take no special notice of
-- top level things, but then we become more leery about inlining
-- them.
{-
************************************************************************
* *
postInlineUnconditionally
* *
************************************************************************
postInlineUnconditionally
~~~~~~~~~~~~~~~~~~~~~~~~~
@postInlineUnconditionally@ decides whether to unconditionally inline
a thing based on the form of its RHS; in particular if it has a
trivial RHS. If so, we can inline and discard the binding altogether.
NB: a loop breaker has must_keep_binding = True and non-loop-breakers
only have *forward* references. Hence, it's safe to discard the binding
NOTE: This isn't our last opportunity to inline. We're at the binding
site right now, and we'll get another opportunity when we get to the
ocurrence(s)
Note that we do this unconditional inlining only for trival RHSs.
Don't inline even WHNFs inside lambdas; doing so may simply increase
allocation when the function is called. This isn't the last chance; see
NOTE above.
NB: Even inline pragmas (e.g. IMustBeINLINEd) are ignored here Why?
Because we don't even want to inline them into the RHS of constructor
arguments. See NOTE above
NB: At one time even NOINLINE was ignored here: if the rhs is trivial
it's best to inline it anyway. We often get a=E; b=a from desugaring,
with both a and b marked NOINLINE. But that seems incompatible with
our new view that inlining is like a RULE, so I'm sticking to the 'active'
story for now.
-}
postInlineUnconditionally
:: DynFlags -> SimplEnv -> TopLevelFlag
-> OutId -- The binder (an InId would be fine too)
-- (*not* a CoVar)
-> OccInfo -- From the InId
-> OutExpr
-> Unfolding
-> Bool
-- Precondition: rhs satisfies the let/app invariant
-- See Note [CoreSyn let/app invariant] in CoreSyn
-- Reason: we don't want to inline single uses, or discard dead bindings,
-- for unlifted, side-effect-ful bindings
postInlineUnconditionally dflags env top_lvl bndr occ_info rhs unfolding
| not active = False
| isWeakLoopBreaker occ_info = False -- If it's a loop-breaker of any kind, don't inline
-- because it might be referred to "earlier"
| isExportedId bndr = False
| isStableUnfolding unfolding = False -- Note [Stable unfoldings and postInlineUnconditionally]
| isTopLevel top_lvl = False -- Note [Top level and postInlineUnconditionally]
| exprIsTrivial rhs = True
| otherwise
= case occ_info of
-- The point of examining occ_info here is that for *non-values*
-- that occur outside a lambda, the call-site inliner won't have
-- a chance (because it doesn't know that the thing
-- only occurs once). The pre-inliner won't have gotten
-- it either, if the thing occurs in more than one branch
-- So the main target is things like
-- let x = f y in
-- case v of
-- True -> case x of ...
-- False -> case x of ...
-- This is very important in practice; e.g. wheel-seive1 doubles
-- in allocation if you miss this out
OneOcc in_lam _one_br int_cxt -- OneOcc => no code-duplication issue
-> smallEnoughToInline dflags unfolding -- Small enough to dup
-- ToDo: consider discount on smallEnoughToInline if int_cxt is true
--
-- NB: Do NOT inline arbitrarily big things, even if one_br is True
-- Reason: doing so risks exponential behaviour. We simplify a big
-- expression, inline it, and simplify it again. But if the
-- very same thing happens in the big expression, we get
-- exponential cost!
-- PRINCIPLE: when we've already simplified an expression once,
-- make sure that we only inline it if it's reasonably small.
&& (not in_lam ||
-- Outside a lambda, we want to be reasonably aggressive
-- about inlining into multiple branches of case
-- e.g. let x = <non-value>
-- in case y of { C1 -> ..x..; C2 -> ..x..; C3 -> ... }
-- Inlining can be a big win if C3 is the hot-spot, even if
-- the uses in C1, C2 are not 'interesting'
-- An example that gets worse if you add int_cxt here is 'clausify'
(isCheapUnfolding unfolding && int_cxt))
-- isCheap => acceptable work duplication; in_lam may be true
-- int_cxt to prevent us inlining inside a lambda without some
-- good reason. See the notes on int_cxt in preInlineUnconditionally
IAmDead -> True -- This happens; for example, the case_bndr during case of
-- known constructor: case (a,b) of x { (p,q) -> ... }
-- Here x isn't mentioned in the RHS, so we don't want to
-- create the (dead) let-binding let x = (a,b) in ...
_ -> False
-- Here's an example that we don't handle well:
-- let f = if b then Left (\x.BIG) else Right (\y.BIG)
-- in \y. ....case f of {...} ....
-- Here f is used just once, and duplicating the case work is fine (exprIsCheap).
-- But
-- - We can't preInlineUnconditionally because that woud invalidate
-- the occ info for b.
-- - We can't postInlineUnconditionally because the RHS is big, and
-- that risks exponential behaviour
-- - We can't call-site inline, because the rhs is big
-- Alas!
where
active = isActive (sm_phase (getMode env)) (idInlineActivation bndr)
-- See Note [pre/postInlineUnconditionally in gentle mode]
{-
Note [Top level and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We don't do postInlineUnconditionally for top-level things (even for
ones that are trivial):
* Doing so will inline top-level error expressions that have been
carefully floated out by FloatOut. More generally, it might
replace static allocation with dynamic.
* Even for trivial expressions there's a problem. Consider
{-# RULE "foo" forall (xs::[T]). reverse xs = ruggle xs #-}
blah xs = reverse xs
ruggle = sort
In one simplifier pass we might fire the rule, getting
blah xs = ruggle xs
but in *that* simplifier pass we must not do postInlineUnconditionally
on 'ruggle' because then we'll have an unbound occurrence of 'ruggle'
If the rhs is trivial it'll be inlined by callSiteInline, and then
the binding will be dead and discarded by the next use of OccurAnal
* There is less point, because the main goal is to get rid of local
bindings used in multiple case branches.
* The inliner should inline trivial things at call sites anyway.
Note [Stable unfoldings and postInlineUnconditionally]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Do not do postInlineUnconditionally if the Id has an stable unfolding,
otherwise we lose the unfolding. Example
-- f has stable unfolding with rhs (e |> co)
-- where 'e' is big
f = e |> co
Then there's a danger we'll optimise to
f' = e
f = f' |> co
and now postInlineUnconditionally, losing the stable unfolding on f. Now f'
won't inline because 'e' is too big.
c.f. Note [Stable unfoldings and preInlineUnconditionally]
************************************************************************
* *
Rebuilding a lambda
* *
************************************************************************
-}
mkLam :: [OutBndr] -> OutExpr -> SimplCont -> SimplM OutExpr
-- mkLam tries three things
-- a) eta reduction, if that gives a trivial expression
-- b) eta expansion [only if there are some value lambdas]
mkLam [] body _cont
= return body
mkLam bndrs body cont
= do { dflags <- getDynFlags
; mkLam' dflags bndrs body }
where
mkLam' :: DynFlags -> [OutBndr] -> OutExpr -> SimplM OutExpr
mkLam' dflags bndrs (Cast body co)
| not (any bad bndrs)
-- Note [Casts and lambdas]
= do { lam <- mkLam' dflags bndrs body
; return (mkCast lam (mkPiCos Representational bndrs co)) }
where
co_vars = tyCoVarsOfCo co
bad bndr = isCoVar bndr && bndr `elemVarSet` co_vars
mkLam' dflags bndrs body@(Lam {})
= mkLam' dflags (bndrs ++ bndrs1) body1
where
(bndrs1, body1) = collectBinders body
mkLam' dflags bndrs (Tick t expr)
| tickishFloatable t
= mkTick t <$> mkLam' dflags bndrs expr
mkLam' dflags bndrs body
| gopt Opt_DoEtaReduction dflags
, Just etad_lam <- tryEtaReduce bndrs body
= do { tick (EtaReduction (head bndrs))
; return etad_lam }
| not (contIsRhs cont) -- See Note [Eta-expanding lambdas]
, gopt Opt_DoLambdaEtaExpansion dflags
, any isRuntimeVar bndrs
, let body_arity = exprEtaExpandArity dflags body
, body_arity > 0
= do { tick (EtaExpansion (head bndrs))
; let res = mkLams bndrs (etaExpand body_arity body)
; traceSmpl "eta expand" (vcat [text "before" <+> ppr (mkLams bndrs body)
, text "after" <+> ppr res])
; return res }
| otherwise
= return (mkLams bndrs body)
{-
Note [Eta expanding lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
In general we *do* want to eta-expand lambdas. Consider
f (\x -> case x of (a,b) -> \s -> blah)
where 's' is a state token, and hence can be eta expanded. This
showed up in the code for GHc.IO.Handle.Text.hPutChar, a rather
important function!
The eta-expansion will never happen unless we do it now. (Well, it's
possible that CorePrep will do it, but CorePrep only has a half-baked
eta-expander that can't deal with casts. So it's much better to do it
here.)
However, when the lambda is let-bound, as the RHS of a let, we have a
better eta-expander (in the form of tryEtaExpandRhs), so we don't
bother to try expansion in mkLam in that case; hence the contIsRhs
guard.
Note [Casts and lambdas]
~~~~~~~~~~~~~~~~~~~~~~~~
Consider
(\x. (\y. e) `cast` g1) `cast` g2
There is a danger here that the two lambdas look separated, and the
full laziness pass might float an expression to between the two.
So this equation in mkLam' floats the g1 out, thus:
(\x. e `cast` g1) --> (\x.e) `cast` (tx -> g1)
where x:tx.
In general, this floats casts outside lambdas, where (I hope) they
might meet and cancel with some other cast:
\x. e `cast` co ===> (\x. e) `cast` (tx -> co)
/\a. e `cast` co ===> (/\a. e) `cast` (/\a. co)
/\g. e `cast` co ===> (/\g. e) `cast` (/\g. co)
(if not (g `in` co))
Notice that it works regardless of 'e'. Originally it worked only
if 'e' was itself a lambda, but in some cases that resulted in
fruitless iteration in the simplifier. A good example was when
compiling Text.ParserCombinators.ReadPrec, where we had a definition
like (\x. Get `cast` g)
where Get is a constructor with nonzero arity. Then mkLam eta-expanded
the Get, and the next iteration eta-reduced it, and then eta-expanded
it again.
Note also the side condition for the case of coercion binders.
It does not make sense to transform
/\g. e `cast` g ==> (/\g.e) `cast` (/\g.g)
because the latter is not well-kinded.
************************************************************************
* *
Eta expansion
* *
************************************************************************
-}
tryEtaExpandRhs :: SimplEnv -> OutId -> OutExpr -> SimplM (Arity, OutExpr)
-- See Note [Eta-expanding at let bindings]
tryEtaExpandRhs env bndr rhs
= do { dflags <- getDynFlags
; (new_arity, new_rhs) <- try_expand dflags
; WARN( new_arity < old_id_arity,
(text "Arity decrease:" <+> (ppr bndr <+> ppr old_id_arity
<+> ppr old_arity <+> ppr new_arity) $$ ppr new_rhs) )
-- Note [Arity decrease] in Simplify
return (new_arity, new_rhs) }
where
try_expand dflags
| exprIsTrivial rhs
= return (exprArity rhs, rhs)
| sm_eta_expand (getMode env) -- Provided eta-expansion is on
, let new_arity1 = findRhsArity dflags bndr rhs old_arity
new_arity2 = idCallArity bndr
new_arity = max new_arity1 new_arity2
, new_arity > old_arity -- And the current manifest arity isn't enough
= do { tick (EtaExpansion bndr)
; return (new_arity, etaExpand new_arity rhs) }
| otherwise
= return (old_arity, rhs)
old_arity = exprArity rhs -- See Note [Do not expand eta-expand PAPs]
old_id_arity = idArity bndr
{-
Note [Eta-expanding at let bindings]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We now eta expand at let-bindings, which is where the payoff comes.
The most significant thing is that we can do a simple arity analysis
(in CoreArity.findRhsArity), which we can't do for free-floating lambdas
One useful consequence of not eta-expanding lambdas is this example:
genMap :: C a => ...
{-# INLINE genMap #-}
genMap f xs = ...
myMap :: D a => ...
{-# INLINE myMap #-}
myMap = genMap
Notice that 'genMap' should only inline if applied to two arguments.
In the stable unfolding for myMap we'll have the unfolding
(\d -> genMap Int (..d..))
We do not want to eta-expand to
(\d f xs -> genMap Int (..d..) f xs)
because then 'genMap' will inline, and it really shouldn't: at least
as far as the programmer is concerned, it's not applied to two
arguments!
Note [Do not eta-expand PAPs]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
We used to have old_arity = manifestArity rhs, which meant that we
would eta-expand even PAPs. But this gives no particular advantage,
and can lead to a massive blow-up in code size, exhibited by Trac #9020.
Suppose we have a PAP
foo :: IO ()
foo = returnIO ()
Then we can eta-expand do
foo = (\eta. (returnIO () |> sym g) eta) |> g
where
g :: IO () ~ State# RealWorld -> (# State# RealWorld, () #)
But there is really no point in doing this, and it generates masses of
coercions and whatnot that eventually disappear again. For T9020, GHC
allocated 6.6G beore, and 0.8G afterwards; and residency dropped from
1.8G to 45M.
But note that this won't eta-expand, say
f = \g -> map g
Does it matter not eta-expanding such functions? I'm not sure. Perhaps
strictness analysis will have less to bite on?
************************************************************************
* *
\subsection{Floating lets out of big lambdas}
* *
************************************************************************
Note [Floating and type abstraction]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Consider this:
x = /\a. C e1 e2
We'd like to float this to
y1 = /\a. e1
y2 = /\a. e2
x = /\a. C (y1 a) (y2 a)
for the usual reasons: we want to inline x rather vigorously.
You may think that this kind of thing is rare. But in some programs it is
common. For example, if you do closure conversion you might get:
data a :-> b = forall e. (e -> a -> b) :$ e
f_cc :: forall a. a :-> a
f_cc = /\a. (\e. id a) :$ ()
Now we really want to inline that f_cc thing so that the
construction of the closure goes away.
So I have elaborated simplLazyBind to understand right-hand sides that look
like
/\ a1..an. body
and treat them specially. The real work is done in SimplUtils.abstractFloats,
but there is quite a bit of plumbing in simplLazyBind as well.
The same transformation is good when there are lets in the body:
/\abc -> let(rec) x = e in b
==>
let(rec) x' = /\abc -> let x = x' a b c in e
in
/\abc -> let x = x' a b c in b
This is good because it can turn things like:
let f = /\a -> letrec g = ... g ... in g
into
letrec g' = /\a -> ... g' a ...
in
let f = /\ a -> g' a
which is better. In effect, it means that big lambdas don't impede
let-floating.
This optimisation is CRUCIAL in eliminating the junk introduced by
desugaring mutually recursive definitions. Don't eliminate it lightly!
[May 1999] If we do this transformation *regardless* then we can
end up with some pretty silly stuff. For example,
let
st = /\ s -> let { x1=r1 ; x2=r2 } in ...
in ..
becomes
let y1 = /\s -> r1
y2 = /\s -> r2
st = /\s -> ...[y1 s/x1, y2 s/x2]
in ..
Unless the "..." is a WHNF there is really no point in doing this.
Indeed it can make things worse. Suppose x1 is used strictly,
and is of the form
x1* = case f y of { (a,b) -> e }
If we abstract this wrt the tyvar we then can't do the case inline
as we would normally do.
That's why the whole transformation is part of the same process that
floats let-bindings and constructor arguments out of RHSs. In particular,
it is guarded by the doFloatFromRhs call in simplLazyBind.
Note [Which type variables to abstract over]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Abstract only over the type variables free in the rhs wrt which the
new binding is abstracted. Note that
* The naive approach of abstracting wrt the
tyvars free in the Id's /type/ fails. Consider:
/\ a b -> let t :: (a,b) = (e1, e2)
x :: a = fst t
in ...
Here, b isn't free in x's type, but we must nevertheless
abstract wrt b as well, because t's type mentions b.
Since t is floated too, we'd end up with the bogus:
poly_t = /\ a b -> (e1, e2)
poly_x = /\ a -> fst (poly_t a *b*)
* We must do closeOverKinds. Example (Trac #10934):
f = /\k (f:k->*) (a:k). let t = AccFailure @ (f a) in ...
Here we want to float 't', but we must remember to abstract over
'k' as well, even though it is not explicitly mentioned in the RHS,
otherwise we get
t = /\ (f:k->*) (a:k). AccFailure @ (f a)
which is obviously bogus.
-}
abstractFloats :: [OutTyVar] -> SimplEnv -> OutExpr -> SimplM ([OutBind], OutExpr)
abstractFloats main_tvs body_env body
= ASSERT( notNull body_floats )
do { (subst, float_binds) <- mapAccumLM abstract empty_subst body_floats
; return (float_binds, CoreSubst.substExpr (text "abstract_floats1") subst body) }
where
main_tv_set = mkVarSet main_tvs
body_floats = getFloatBinds body_env
empty_subst = CoreSubst.mkEmptySubst (seInScope body_env)
abstract :: CoreSubst.Subst -> OutBind -> SimplM (CoreSubst.Subst, OutBind)
abstract subst (NonRec id rhs)
= do { (poly_id, poly_app) <- mk_poly tvs_here id
; let poly_rhs = mkLams tvs_here rhs'
subst' = CoreSubst.extendIdSubst subst id poly_app
; return (subst', (NonRec poly_id poly_rhs)) }
where
rhs' = CoreSubst.substExpr (text "abstract_floats2") subst rhs
-- tvs_here: see Note [Which type variables to abstract over]
tvs_here = varSetElemsWellScoped $
intersectVarSet main_tv_set $
closeOverKinds $
exprSomeFreeVars isTyVar rhs'
abstract subst (Rec prs)
= do { (poly_ids, poly_apps) <- mapAndUnzipM (mk_poly tvs_here) ids
; let subst' = CoreSubst.extendSubstList subst (ids `zip` poly_apps)
poly_rhss = [mkLams tvs_here (CoreSubst.substExpr (text "abstract_floats3") subst' rhs)
| rhs <- rhss]
; return (subst', Rec (poly_ids `zip` poly_rhss)) }
where
(ids,rhss) = unzip prs
-- For a recursive group, it's a bit of a pain to work out the minimal
-- set of tyvars over which to abstract:
-- /\ a b c. let x = ...a... in
-- letrec { p = ...x...q...
-- q = .....p...b... } in
-- ...
-- Since 'x' is abstracted over 'a', the {p,q} group must be abstracted
-- over 'a' (because x is replaced by (poly_x a)) as well as 'b'.
-- Since it's a pain, we just use the whole set, which is always safe
--
-- If you ever want to be more selective, remember this bizarre case too:
-- x::a = x
-- Here, we must abstract 'x' over 'a'.
tvs_here = toposortTyVars main_tvs
mk_poly tvs_here var
= do { uniq <- getUniqueM
; let poly_name = setNameUnique (idName var) uniq -- Keep same name
poly_ty = mkInvForAllTys tvs_here (idType var) -- But new type of course
poly_id = transferPolyIdInfo var tvs_here $ -- Note [transferPolyIdInfo] in Id.hs
mkLocalIdOrCoVar poly_name poly_ty
; return (poly_id, mkTyApps (Var poly_id) (mkTyVarTys tvs_here)) }
-- In the olden days, it was crucial to copy the occInfo of the original var,
-- because we were looking at occurrence-analysed but as yet unsimplified code!
-- In particular, we mustn't lose the loop breakers. BUT NOW we are looking
-- at already simplified code, so it doesn't matter
--
-- It's even right to retain single-occurrence or dead-var info:
-- Suppose we started with /\a -> let x = E in B
-- where x occurs once in B. Then we transform to:
-- let x' = /\a -> E in /\a -> let x* = x' a in B
-- where x* has an INLINE prag on it. Now, once x* is inlined,
-- the occurrences of x' will be just the occurrences originally
-- pinned on x.
{-
Note [Abstract over coercions]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If a coercion variable (g :: a ~ Int) is free in the RHS, then so is the
type variable a. Rather than sort this mess out, we simply bale out and abstract
wrt all the type variables if any of them are coercion variables.
Historical note: if you use let-bindings instead of a substitution, beware of this:
-- Suppose we start with:
--
-- x = /\ a -> let g = G in E
--
-- Then we'll float to get
--
-- x = let poly_g = /\ a -> G
-- in /\ a -> let g = poly_g a in E
--
-- But now the occurrence analyser will see just one occurrence
-- of poly_g, not inside a lambda, so the simplifier will
-- PreInlineUnconditionally poly_g back into g! Badk to square 1!
-- (I used to think that the "don't inline lone occurrences" stuff
-- would stop this happening, but since it's the *only* occurrence,
-- PreInlineUnconditionally kicks in first!)
--
-- Solution: put an INLINE note on g's RHS, so that poly_g seems
-- to appear many times. (NB: mkInlineMe eliminates
-- such notes on trivial RHSs, so do it manually.)
************************************************************************
* *
prepareAlts
* *
************************************************************************
prepareAlts tries these things:
1. Eliminate alternatives that cannot match, including the
DEFAULT alternative.
2. If the DEFAULT alternative can match only one possible constructor,
then make that constructor explicit.
e.g.
case e of x { DEFAULT -> rhs }
===>
case e of x { (a,b) -> rhs }
where the type is a single constructor type. This gives better code
when rhs also scrutinises x or e.
3. Returns a list of the constructors that cannot holds in the
DEFAULT alternative (if there is one)
Here "cannot match" includes knowledge from GADTs
It's a good idea to do this stuff before simplifying the alternatives, to
avoid simplifying alternatives we know can't happen, and to come up with
the list of constructors that are handled, to put into the IdInfo of the
case binder, for use when simplifying the alternatives.
Eliminating the default alternative in (1) isn't so obvious, but it can
happen:
data Colour = Red | Green | Blue
f x = case x of
Red -> ..
Green -> ..
DEFAULT -> h x
h y = case y of
Blue -> ..
DEFAULT -> [ case y of ... ]
If we inline h into f, the default case of the inlined h can't happen.
If we don't notice this, we may end up filtering out *all* the cases
of the inner case y, which give us nowhere to go!
-}
prepareAlts :: OutExpr -> OutId -> [InAlt] -> SimplM ([AltCon], [InAlt])
-- The returned alternatives can be empty, none are possible
prepareAlts scrut case_bndr' alts
| Just (tc, tys) <- splitTyConApp_maybe (varType case_bndr')
-- Case binder is needed just for its type. Note that as an
-- OutId, it has maximum information; this is important.
-- Test simpl013 is an example
= do { us <- getUniquesM
; let (idcs1, alts1) = filterAlts tc tys imposs_cons alts
(yes2, alts2) = refineDefaultAlt us tc tys idcs1 alts1
(yes3, idcs3, alts3) = combineIdenticalAlts idcs1 alts2
-- "idcs" stands for "impossible default data constructors"
-- i.e. the constructors that can't match the default case
; when yes2 $ tick (FillInCaseDefault case_bndr')
; when yes3 $ tick (AltMerge case_bndr')
; return (idcs3, alts3) }
| otherwise -- Not a data type, so nothing interesting happens
= return ([], alts)
where
imposs_cons = case scrut of
Var v -> otherCons (idUnfolding v)
_ -> []
{-
************************************************************************
* *
mkCase
* *
************************************************************************
mkCase tries these things
1. Merge Nested Cases
case e of b { ==> case e of b {
p1 -> rhs1 p1 -> rhs1
... ...
pm -> rhsm pm -> rhsm
_ -> case b of b' { pn -> let b'=b in rhsn
pn -> rhsn ...
... po -> let b'=b in rhso
po -> rhso _ -> let b'=b in rhsd
_ -> rhsd
}
which merges two cases in one case when -- the default alternative of
the outer case scrutises the same variable as the outer case. This
transformation is called Case Merging. It avoids that the same
variable is scrutinised multiple times.
2. Eliminate Identity Case
case e of ===> e
True -> True;
False -> False
and similar friends.
-}
mkCase, mkCase1, mkCase2
:: DynFlags
-> OutExpr -> OutId
-> OutType -> [OutAlt] -- Alternatives in standard (increasing) order
-> SimplM OutExpr
--------------------------------------------------
-- 1. Merge Nested Cases
--------------------------------------------------
mkCase dflags scrut outer_bndr alts_ty ((DEFAULT, _, deflt_rhs) : outer_alts)
| gopt Opt_CaseMerge dflags
, (ticks, Case (Var inner_scrut_var) inner_bndr _ inner_alts)
<- stripTicksTop tickishFloatable deflt_rhs
, inner_scrut_var == outer_bndr
= do { tick (CaseMerge outer_bndr)
; let wrap_alt (con, args, rhs) = ASSERT( outer_bndr `notElem` args )
(con, args, wrap_rhs rhs)
-- Simplifier's no-shadowing invariant should ensure
-- that outer_bndr is not shadowed by the inner patterns
wrap_rhs rhs = Let (NonRec inner_bndr (Var outer_bndr)) rhs
-- The let is OK even for unboxed binders,
wrapped_alts | isDeadBinder inner_bndr = inner_alts
| otherwise = map wrap_alt inner_alts
merged_alts = mergeAlts outer_alts wrapped_alts
-- NB: mergeAlts gives priority to the left
-- case x of
-- A -> e1
-- DEFAULT -> case x of
-- A -> e2
-- B -> e3
-- When we merge, we must ensure that e1 takes
-- precedence over e2 as the value for A!
; fmap (mkTicks ticks) $
mkCase1 dflags scrut outer_bndr alts_ty merged_alts
}
-- Warning: don't call mkCase recursively!
-- Firstly, there's no point, because inner alts have already had
-- mkCase applied to them, so they won't have a case in their default
-- Secondly, if you do, you get an infinite loop, because the bindCaseBndr
-- in munge_rhs may put a case into the DEFAULT branch!
mkCase dflags scrut bndr alts_ty alts = mkCase1 dflags scrut bndr alts_ty alts
--------------------------------------------------
-- 2. Eliminate Identity Case
--------------------------------------------------
mkCase1 _dflags scrut case_bndr _ alts@((_,_,rhs1) : _) -- Identity case
| all identity_alt alts
= do { tick (CaseIdentity case_bndr)
; return (mkTicks ticks $ re_cast scrut rhs1) }
where
ticks = concatMap (stripTicksT tickishFloatable . thdOf3) (tail alts)
identity_alt (con, args, rhs) = check_eq rhs con args
check_eq (Cast rhs co) con args
= not (any (`elemVarSet` tyCoVarsOfCo co) args) && check_eq rhs con args
-- See Note [RHS casts]
check_eq (Lit lit) (LitAlt lit') _ = lit == lit'
check_eq (Var v) _ _ | v == case_bndr = True
check_eq (Var v) (DataAlt con) [] = v == dataConWorkId con
-- Optimisation only
check_eq (Tick t e) alt args = tickishFloatable t &&
check_eq e alt args
check_eq rhs (DataAlt con) args = cheapEqExpr' tickishFloatable rhs $
mkConApp con (arg_tys ++
varsToCoreExprs args)
check_eq _ _ _ = False
arg_tys = map Type (tyConAppArgs (idType case_bndr))
-- Note [RHS casts]
-- ~~~~~~~~~~~~~~~~
-- We've seen this:
-- case e of x { _ -> x `cast` c }
-- And we definitely want to eliminate this case, to give
-- e `cast` c
-- So we throw away the cast from the RHS, and reconstruct
-- it at the other end. All the RHS casts must be the same
-- if (all identity_alt alts) holds.
--
-- Don't worry about nested casts, because the simplifier combines them
re_cast scrut (Cast rhs co) = Cast (re_cast scrut rhs) co
re_cast scrut _ = scrut
mkCase1 dflags scrut bndr alts_ty alts = mkCase2 dflags scrut bndr alts_ty alts
--------------------------------------------------
-- Catch-all
--------------------------------------------------
mkCase2 _dflags scrut bndr alts_ty alts
= return (Case scrut bndr alts_ty alts)
{-
Note [Dead binders]
~~~~~~~~~~~~~~~~~~~~
Note that dead-ness is maintained by the simplifier, so that it is
accurate after simplification as well as before.
Note [Cascading case merge]
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Case merging should cascade in one sweep, because it
happens bottom-up
case e of a {
DEFAULT -> case a of b
DEFAULT -> case b of c {
DEFAULT -> e
A -> ea
B -> eb
C -> ec
==>
case e of a {
DEFAULT -> case a of b
DEFAULT -> let c = b in e
A -> let c = b in ea
B -> eb
C -> ec
==>
case e of a {
DEFAULT -> let b = a in let c = b in e
A -> let b = a in let c = b in ea
B -> let b = a in eb
C -> ec
However here's a tricky case that we still don't catch, and I don't
see how to catch it in one pass:
case x of c1 { I# a1 ->
case a1 of c2 ->
0 -> ...
DEFAULT -> case x of c3 { I# a2 ->
case a2 of ...
After occurrence analysis (and its binder-swap) we get this
case x of c1 { I# a1 ->
let x = c1 in -- Binder-swap addition
case a1 of c2 ->
0 -> ...
DEFAULT -> case x of c3 { I# a2 ->
case a2 of ...
When we simplify the inner case x, we'll see that
x=c1=I# a1. So we'll bind a2 to a1, and get
case x of c1 { I# a1 ->
case a1 of c2 ->
0 -> ...
DEFAULT -> case a1 of ...
This is corect, but we can't do a case merge in this sweep
because c2 /= a1. Reason: the binding c1=I# a1 went inwards
without getting changed to c1=I# c2.
I don't think this is worth fixing, even if I knew how. It'll
all come out in the next pass anyway.
-}
|
tjakway/ghcjvm
|
compiler/simplCore/SimplUtils.hs
|
Haskell
|
bsd-3-clause
| 79,475
|
module Q3Demo.Loader.Zip where
{-
Zip specification:
http://en.wikipedia.org/wiki/Zip_(file_format)
http://www.pkware.com/documents/casestudies/APPNOTE.TXT
-}
import Control.Applicative
import Data.Binary.Get
import Data.Bits
import Data.Word
import System.IO.MMap
import qualified Codec.Compression.Zlib.Raw as Zlib
import qualified Data.ByteString.Char8 as SB
import qualified Data.ByteString.Lazy as LB
data Entry
= Entry
{ eFilePath :: String
, eIsCompressed :: Bool
, eData :: LB.ByteString
}
type Archive = [Entry]
decompress' :: Entry -> LB.ByteString
decompress' (Entry _ False dat) = dat
decompress' (Entry _ True dat) = Zlib.decompress dat
decompress :: Entry -> SB.ByteString
decompress = SB.concat . LB.toChunks . decompress'
readArchive :: String -> IO Archive
readArchive n = runGet getArchive . LB.fromChunks . (:[]) <$> mmapFileByteString n Nothing
chunks :: Word32 -> Get a -> Get [a]
chunks c a = lookAhead getWord32le >>= \code -> case code == c of
True -> (:) <$> a <*> chunks c a
False -> return []
getArchive :: Get Archive
getArchive = chunks 0x04034b50 $ do
-- local file header
skip 6
flag <- getWord16le
isComp <- getWord16le >>= \i -> case i of
0 -> return False
8 -> return True
_ -> fail "Unsupported compression method!"
skip 8
size <- getWord32le
skip 4
nameLen <- getWord16le
extraLen <- getWord16le
name <- SB.unpack <$> getByteString (fromIntegral nameLen)
skip $ fromIntegral extraLen
d <- if flag .&. 8 /= 0 then fail "Zip data descriptor is not supported!" else getLazyByteString $ fromIntegral size
return $ Entry name isComp d
|
csabahruska/q3demo
|
src/Q3Demo/Loader/Zip.hs
|
Haskell
|
bsd-3-clause
| 1,705
|
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE KindSignatures #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE PolyKinds #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeOperators #-}
{-# OPTIONS_GHC -Wall #-}
-- TODO: Complex Numbers
module Language.Fortran.Model.Op.Core.Match where
import Control.Monad ((>=>))
import Data.Typeable
import Control.Lens
import Data.Singletons
import Data.Singletons.Prelude.List
import Data.Vinyl hiding ((:~:), Field)
import Language.Fortran.Model.Op.Core.Core
import Language.Fortran.Model.Singletons
import Language.Fortran.Model.Types
import Language.Fortran.Model.Types.Match
import Language.Fortran.Model.Util
data MatchNumType a where
MatchNumType :: Sing p -> Sing k -> NumericBasicType k -> Prim p k a -> MatchNumType (PrimS a)
-- | Checks if the given type is numeric, and if so returns a proof of that
-- fact.
matchNumType :: D a -> Maybe (MatchNumType a)
matchNumType = matchPrimD >=> \case
MatchPrimD (MatchPrim sp SBTInt) p -> Just (MatchNumType sp SBTInt NBTInt p)
MatchPrimD (MatchPrim sp SBTReal) p -> Just (MatchNumType sp SBTReal NBTReal p)
_ -> Nothing
data MatchNumR a b where
MatchNumR
:: NumericBasicType k1 -> NumericBasicType k2
-> Prim p1 k1 a -> Prim p2 k2 b
-> Prim (PrecMax p1 p2) (BasicTypeMax k1 k2) c
-> MatchNumR (PrimS a) (PrimS b)
-- | Checks if it is possible to perform a binary numeric operation on arguments
-- with the given respective types. If so, returns the type that would result
-- plus some more information about the types.
matchNumR :: D a -> D b -> Maybe (MatchNumR a b)
matchNumR = matchingWith2 matchNumType matchNumType $ \case
(MatchNumType sp1 sk1 nk1 prim1, MatchNumType sp2 sk2 nk2 prim2) ->
makePrim (sPrecMax sp1 sp2) (sBasicTypeMax sk1 sk2) <$$> \case
MakePrim prim3 -> MatchNumR nk1 nk2 prim1 prim2 prim3
primCeil :: Prim p1 k1 a -> Prim p2 k2 b -> Maybe (MakePrim (PrecMax p1 p2) (BasicTypeMax k1 k2))
primCeil prim1 prim2 = case (matchPrim prim1, matchPrim prim2) of
(MatchPrim p1 k1, MatchPrim p2 k2) -> makePrim (sPrecMax p1 p2) (sBasicTypeMax k1 k2)
data MatchCompareR a b where
MatchCompareR :: ComparableBasicTypes k1 k2 -> Prim p1 k1 a -> Prim p2 k2 b -> MatchCompareR (PrimS a) (PrimS b)
-- | Checks if it is possible to perform a binary comparison (equality or
-- relational) operation on arguments with the given respective types. If so,
-- returns proof of that fact.
matchCompareR :: D a -> D b -> Maybe (MatchCompareR a b)
matchCompareR =
(matchingWithBoth matchNumR $ Just . \case
MatchNumR nk1 nk2 p1 p2 _ -> MatchCompareR (CBTNum nk1 nk2) p1 p2
) `altf2`
(matchingWith2 matchPrimD matchPrimD $ \case
(MatchPrimD (MatchPrim _ SBTLogical) p1, MatchPrimD (MatchPrim _ SBTLogical) p2) ->
Just (MatchCompareR CBTBool p1 p2)
(MatchPrimD (MatchPrim _ SBTChar) p1, MatchPrimD (MatchPrim _ SBTChar) p2) ->
Just (MatchCompareR CBTChar p1 p2)
_ -> Nothing
)
--------------------------------------------------------------------------------
-- Matching on operator result types
--------------------------------------------------------------------------------
data MatchOpSpec ok args where
MatchOpSpec :: OpSpec ok args result -> D result -> MatchOpSpec ok args
-- | Checks if it is possible to apply the given operator to the given
-- arguments, and if so returns a proof of that fact, packaged with information
-- about the result of applying the operator.
matchOpSpec :: Op (Length args) ok -> Rec D args -> Maybe (MatchOpSpec ok args)
matchOpSpec operator argTypes =
case argTypes of
RNil -> case operator of
OpLit -> Nothing
d1 :& RNil -> case operator of
OpNeg -> argsNumeric <$$> \case
MatchNumType _ _ nk p :& RNil -> MatchOpSpec (OSNum1 nk p p) d1
OpPos -> argsNumeric <$$> \case
MatchNumType _ _ nk p :& RNil -> MatchOpSpec (OSNum1 nk p p) d1
OpNot -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p :& RNil -> Just $ MatchOpSpec (OSLogical1 p PBool8) (DPrim PBool8)
_ -> Nothing
-- In the deref case, we don't have access to a particular field to
-- dereference, so there's nothing we can return.
OpDeref -> Nothing
d1 :& d2 :& RNil -> case operator of
OpAdd -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpSub -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpMul -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpDiv -> matchNumR d1 d2 <$$> \case
MatchNumR nk1 nk2 p1 p2 p3 -> MatchOpSpec (OSNum2 nk1 nk2 p1 p2 p3) (DPrim p3)
OpAnd -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpOr -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpEquiv -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpNotEquiv -> argsPrim >>= \case
MatchPrimD (MatchPrim _ SBTLogical) p1 :& MatchPrimD (MatchPrim _ SBTLogical) p2 :& RNil ->
Just $ MatchOpSpec (OSLogical2 p1 p2 PBool8) (DPrim PBool8)
_ -> Nothing
OpEq -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSEq cmp p1 p2 PBool8) (DPrim PBool8)
OpNE -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSEq cmp p1 p2 PBool8) (DPrim PBool8)
OpLT -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpLE -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpGT -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpGE -> matchCompareR d1 d2 <$$> \case
MatchCompareR cmp p1 p2 -> MatchOpSpec (OSRel cmp p1 p2 PBool8) (DPrim PBool8)
OpLookup -> with (d1, d2) $ traverseOf _2 matchPrimD >=> \case
(DArray (Index pi1) av, MatchPrimD _ pi2) -> case eqPrim pi1 pi2 of
Just Refl -> Just $ MatchOpSpec (OSLookup d1) (dArrValue av)
_ -> Nothing
_ -> Nothing
_ -> Nothing
where
argsNumeric = rtraverse matchNumType argTypes
argsPrim = rtraverse matchPrimD argTypes
|
dorchard/camfort
|
src/Language/Fortran/Model/Op/Core/Match.hs
|
Haskell
|
apache-2.0
| 7,394
|
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE FlexibleContexts #-}
module Pontarius.E2E.Message where
import Control.Monad
import Control.Monad.Except
import Control.Monad.State.Strict
import qualified Crypto.Random as CRandom
import qualified Data.ByteString as BS
import Data.Word (Word8)
import Pontarius.E2E.Monad
import Pontarius.E2E.Types
import Pontarius.E2E.Helpers
import Pontarius.E2E.Serialize
decryptDataMessage :: CRandom.CPRG g => DataMessage -> E2E g BS.ByteString
decryptDataMessage msg = do
s <- get
unless (isEncrypted $ msgState s) . throwError
$ WrongState "decryptDataMessage"
MK{ recvEncKey
, recvMacKey } <- makeMessageKeys (senderKeyID msg) (recipientKeyID msg)
check <- parameter paramCheckMac
protocolGuard MACFailure "message" $ check recvMacKey (encodeMessageBytes msg)
(messageMAC msg)
case () of () | recipientKeyID msg == ourKeyID s -> return ()
| recipientKeyID msg == ourKeyID s + 1 -> shiftKeys
| otherwise -> throwError $ ProtocolError WrongKeyID ""
pl <- decCtr recvEncKey (ctrHi msg) (messageEnc msg)
shiftTheirKeys (nextDHy msg) (senderKeyID msg)
return pl
where
isEncrypted MsgStateEncrypted{} = True
isEncrypted _ = False
shiftKeys = do
newDH <- makeDHKeyPair
s <- get
put s{ ourPreviousKey = ourCurrentKey s
, ourCurrentKey = nextDH s
, nextDH = newDH
, ourKeyID = ourKeyID s + 1
}
shiftTheirKeys newKey keyID = do
s <- get
when (keyID == theirKeyID s) $
put s{ theirPreviousKey = theirCurrentKey s
, theirCurrentKey = Just newKey
, theirKeyID = theirKeyID s + 1
}
makeMessageKeys :: Integer
-> Integer
-> E2E g MessageKeys
makeMessageKeys tKeyID oKeyID = do
s <- get
tck <- case ( tKeyID == theirKeyID s - 1
, tKeyID == theirKeyID s
, theirPreviousKey s
, theirCurrentKey s
) of
(True, _ , Just tpk , _ ) -> return tpk
(True, _ , Nothing , _ ) -> throwError NoPeerDHKey
(_ , True, _ , Just tck ) -> return tck
(_ , True, _ , Nothing ) -> throwError NoPeerDHKey
_ -> throwError
$ ProtocolError WrongKeyID ""
ok <- case ( oKeyID == ourKeyID s
, oKeyID == ourKeyID s + 1
) of
(True, _) -> return $ ourCurrentKey s
(_, True) -> return $ nextDH s
_ -> throwError $ ProtocolError WrongKeyID ""
sharedSecret <- makeDHSharedSecret (priv ok) tck
let secBytes = encodeInteger sharedSecret
(sendByte, recvByte) = if tck <= pub ok
then (0x01, 0x02) :: (Word8, Word8)
else (0x02, 0x01)
let h1 b = hash (BS.singleton b `BS.append` secBytes)
-- TODO: Check against yabasta
sendEncKey <- h1 sendByte
sendMacKey <- hash sendEncKey
recvEncKey <- h1 recvByte
recvMacKey <- hash recvEncKey
return MK{ sendEncKey
, sendMacKey
, recvEncKey
, recvMacKey
}
encryptDataMessage :: BS.ByteString -> E2E g DataMessage
encryptDataMessage payload = do
s <- get
unless (isEncrypted $ msgState s) $ throwError (WrongState "encryptDataMessage")
mk <- makeMessageKeys (theirKeyID s) (ourKeyID s)
pl <- encCtr (sendEncKey mk) (encodeInteger $ counter s) payload
let msg = DM { senderKeyID = ourKeyID s
, recipientKeyID = theirKeyID s
, nextDHy = pub $ nextDH s
, ctrHi = encodeInteger $ counter s
, messageEnc = pl
, messageMAC = BS.empty
}
messageMAC <- mac (sendMacKey mk) (encodeMessageBytes msg)
put s{counter = counter s + 1}
return $ msg{messageMAC = messageMAC}
where
isEncrypted MsgStateEncrypted{} = True
isEncrypted _ = False
|
Philonous/pontarius-xmpp-e2e
|
source/Pontarius/E2E/Message.hs
|
Haskell
|
apache-2.0
| 4,327
|
{-# LANGUAGE DeriveGeneric #-}
--------------------------------------------------------------------
-- |
-- Copyright : (c) Dan Doel 2014
-- License : BSD2
-- Maintainer: Dan Doel <dan.doel@gmail.com>
-- Stability : experimental
-- Portability: non-portable
--------------------------------------------------------------------
module Ermine.Unification.Class
( ClassCheck(ClassCheck)
, instantiateClass
) where
import Bound
import Bound.Scope
import Bound.Var
import Control.Applicative
import Control.Lens
import Data.Map as Map
import Data.Text
import Data.Traversable
import Data.Void
import Ermine.Syntax.Class
import Ermine.Syntax.Global
import Ermine.Syntax.Hint
import Ermine.Syntax.Kind as Kind
import Ermine.Syntax.Type as Type
import Ermine.Syntax.Term as Term
import Ermine.Unification.Meta
import GHC.Generics
data ClassCheck s = ClassCheck
{ _cctparams :: [(Hint, KindM s)]
, _cccxt :: [Scope Int (Type (KindM s)) Text]
, _ccsigs :: Map Global (Type (KindM s) (Var Int Text))
, _ccdefs :: Map Global (Bodies (Annot Void Text) Void)
}
deriving (Eq, Show, Generic)
instantiateClass :: Class () Text -> M s (Schema (MetaK s), ClassCheck s)
instantiateClass cls = do
clazz@(Class ks ts cxt sigs defs) <- kindVars (\_ -> newShallowMeta 0 False Nothing) cls
mks <- for ks $ newMeta False
tks <- for ts $ \(h, _) -> (,) h . pure <$> newShallowMeta 0 False Nothing
return $ ( schema clazz
, ClassCheck
tks
(hoistScope (over kindVars $ pure . unvar (mks!!) id) <$> cxt)
(over kindVars (pure . unvar (mks!!) id) <$> sigs)
defs
)
|
PipocaQuemada/ermine
|
src/Ermine/Unification/Class.hs
|
Haskell
|
bsd-2-clause
| 1,742
|
{-# LANGUAGE OverloadedStrings #-}
module Formalize.Html
( formHtml
, pdfHtml
) where
import Control.Monad.IO.Class (MonadIO)
import Data.Text.Lazy as LT (Text)
import Formalize.Types
import System.FilePath
import Text.Hastache
import Text.Hastache.Context
-- HTML for view containing the main form.
formHtml :: FormData -> IO LT.Text
formHtml = mustache "form.mustache" . mkGenericContext
-- HTML for the PDF to render.
pdfHtml :: FormData -> IO LT.Text
pdfHtml = mustache "pdf.mustache" . mkGenericContext
-- Location of view files.
viewFolder :: FilePath
viewFolder = "web/view"
-- Render mustache template.
mustache :: MonadIO m => FilePath -> MuContext m -> m LT.Text
mustache file = hastacheFile defaultConfig path
where path = viewFolder </> file
|
Lepovirta/Crystallize
|
src/Formalize/Html.hs
|
Haskell
|
bsd-3-clause
| 844
|
{-# LANGUAGE GADTs #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeFamilyDependencies #-}
module Mimir.Types where
import Control.Lens.Lens (Lens')
import Control.Lens.TH
import Control.Monad.Except (ExceptT)
import Control.Monad.Reader (ReaderT)
import Network.HTTP.Nano (HttpCfg, HttpError)
class HasExchange e r where
exchange :: Lens' r e
class Exchange e where
type ExchangeM e = (m :: * -> *) | m -> e
class Exchange e => TickerP e where
type TickerT e :: *
ticker :: ExchangeM e (TickerT e)
class Exchange e => SpotP e where
type SpotBalancesT e :: *
type SpotOrderT e :: *
type SpotOrderIDT e :: *
spotBalances :: ExchangeM e (SpotBalancesT e)
currentSpotOrders :: ExchangeM e [SpotOrderT e]
placeSpotOrder :: SpotOrderT e -> ExchangeM e (SpotOrderIDT e)
cancelSpotOrder :: SpotOrderIDT e -> ExchangeM e ()
type TradeM e = ReaderT (Ctx e) (ExceptT TradeError IO)
data Ctx e = Ctx {
_ctxHttpCfg :: HttpCfg,
_ctxExchange :: e
}
data TradeError
= THttpError HttpError
| TLogicError String
deriving Show
---
--- Standard data types
---
data Ticker = Ticker {
_tiTimeUTCMS :: Int,
_tiAsk :: Double,
_tiBid :: Double,
_tiLast :: Double
} deriving (Eq, Show)
type CandleInterval = Int
data Candle = Candle {
_caTimeUTC :: Int,
_caOpen :: Double,
_caClose :: Double,
_caHigh :: Double,
_caLow :: Double,
_caVolume :: Double
} deriving (Eq, Show)
data OrderBook = OrderBook {
_obBids :: [OrderBookEntry],
_obAsks :: [OrderBookEntry]
} deriving (Eq, Show)
data OrderBookEntry = OrderBookEntry {
_oeVolume :: Double,
_oePrice :: Double
} deriving (Eq, Show)
data Trade = Trade {
_trTimeUTCMS :: Int,
_trUnitPrice :: Double,
_trVolume :: Double,
_trType :: OrderType
} deriving (Eq, Show)
data Order = Order {
_oType :: OrderType,
_oID :: Int,
_oTimeUTCMS :: Int,
_oVolume :: Double,
_oUnitPrice :: Double
} deriving (Eq, Show)
data OrderType
= LIMIT_BUY
| LIMIT_SELL
| MARKET_BUY
| MARKET_SELL
deriving (Eq, Read, Show)
data OrderResponse = OrderResponse String deriving (Eq, Show)
data Balances = Balances {
_bCurrency :: Double,
_bCommodity :: Double
} deriving (Eq, Show)
makeLenses ''Ctx
makeClassyPrisms ''TradeError
makeLenses ''Ticker
makeLenses ''Candle
makeLenses ''OrderBook
makeLenses ''OrderBookEntry
makeLenses ''Trade
makeLenses ''Order
makeLenses ''Balances
|
ralphmorton/Mimir
|
src/Mimir/Types.hs
|
Haskell
|
bsd-3-clause
| 2,550
|
module MediaWiki.API.Query.AllUsers.Import where
import MediaWiki.API.Types
import MediaWiki.API.Utils
import MediaWiki.API.Query.AllUsers
import Text.XML.Light.Types
import Control.Monad
import Data.Maybe
stringXml :: String -> Either (String,[{-Error msg-}String]) AllUsersResponse
stringXml s = parseDoc xml s
xml :: Element -> Maybe AllUsersResponse
xml e = do
guard (elName e == nsName "api")
let es1 = children e
p <- pNode "query" es1
let es = children p
ps <- fmap (mapMaybe xmlUser) (fmap children $ pNode "allusers" es)
let cont = pNode "query-continue" es1 >>= xmlContinue "allusers" "aufrom"
return emptyAllUsersResponse{auUsers=ps,auContinue=cont}
xmlUser :: Element -> Maybe (UserName,Maybe Int, Maybe String)
xmlUser e = do
guard (elName e == nsName "u")
let ns = fromMaybe "0" $ pAttr "ns" e
let nm = fromMaybe "" $ pAttr "name" e
let ec = pAttr "editcount" e >>= \ x -> case reads x of { ((v,_):_) -> Just v; _ -> Nothing}
let grps = pAttr "groups" e
return (nm,ec,grps)
|
HyperGainZ/neobot
|
mediawiki/MediaWiki/API/Query/AllUsers/Import.hs
|
Haskell
|
bsd-3-clause
| 1,042
|
{-# LANGUAGE QuasiQuotes #-}
module Main where
import Control.Monad
import System.Environment
import Language.C
import Language.C.System.GCC
import Text.Printf
import Text.PrettyPrint.HughesPJ
--import Here (here)
main = do
-- this is not the prettiest, but easiest solution
let depth = 2
putStrLn "#include <stdio.h>"
print $ pretty $ parseCExtDecl $ show $
text "int main(int argc, char**argv)" $+$
(braces $
stat_embed depth (stat1 depth) $+$
stat_embed depth (stat2 depth) $+$
text "return(0);")
parseCStat :: String -> CStat
parseCStat s = either (error.show) id $ execParser_ statementP (inputStreamFromString s) (initPos "<stdin>")
parseCExtDecl :: String -> CExtDecl
parseCExtDecl s = either (error.show) id $ execParser_ extDeclP (inputStreamFromString s) (initPos "<stdin>")
stat_embed :: Int -> CStat -> Doc
stat_embed k stat = braces $ nest 2 $
decls $+$
text "int r = 0;" $+$
iteropen $+$
(nest 2 stmt) $+$
(nest 2 $ text "printf(\"%d\\n\",r);") $+$
iterclose
where
stmt = pretty stat
decls = vcat $ map (\n -> text "int" <+> text(guardName n) <> semi) [1..k]
iteropen = vcat $ map (\n -> let gn = guardName n in text (printf "for(%s=0;%s<=1;%s++){" gn gn gn)) [1..k]
iterclose = vcat $ replicate k (char '}')
guardName n = "g_"++show n
setR :: Int -> CStat
setR k = parseCStat $ printf "r = %d;" k
stat1 :: Int -> CStatement NodeInfo
stat1 depth = go depth
where
go depth =
case depth of
n | n <= 1 -> CIf (guard n) (setR 1) (Just$ setR 2) u
| otherwise -> CIf (guard n) (go (n-1)) Nothing u
cexpr = CExpr . Just
vexpr s = CVar (internalIdent s) u
guard n = vexpr (guardName n)
u = undefNode
stat2 :: Int -> CStatement NodeInfo
stat2 depth = CIf (guard depth) (go (depth-1)) (Just$ setR 2) u
where
go n | n == 0 = setR 1
| otherwise = CIf (guard n) (go (n-1)) Nothing u
cexpr = CExpr . Just
vexpr s = CVar (internalIdent s) u
guard n = vexpr (guardName n)
u = undefNode
|
llelf/language-c
|
test/harness/bug31_pp_if_else/Test.hs
|
Haskell
|
bsd-3-clause
| 2,083
|
{-# LANGUAGE DeriveDataTypeable, TypeFamilies, TemplateHaskell #-}
module Distribution.Server.Features.PackageCandidates.State where
import Distribution.Server.Features.PackageCandidates.Types
import Distribution.Server.Framework.MemSize
import qualified Distribution.Server.Packages.PackageIndex as PackageIndex
import Distribution.Package
import Data.Acid (Query, Update, makeAcidic)
import Data.SafeCopy (deriveSafeCopy, base)
import Data.Typeable
import Control.Monad.Reader
import qualified Control.Monad.State as State
import Data.Monoid
---------------------------------- Index of candidate tarballs and metadata
-- boilerplate code based on PackagesState
data CandidatePackages = CandidatePackages {
candidateList :: !(PackageIndex.PackageIndex CandPkgInfo)
} deriving (Typeable, Show, Eq)
deriveSafeCopy 0 'base ''CandidatePackages
instance MemSize CandidatePackages where
memSize (CandidatePackages a) = memSize1 a
initialCandidatePackages :: CandidatePackages
initialCandidatePackages = CandidatePackages {
candidateList = mempty
}
replaceCandidate :: CandPkgInfo -> Update CandidatePackages ()
replaceCandidate pkg = State.modify $ \candidates -> candidates { candidateList = replaceVersions (candidateList candidates) }
where replaceVersions = PackageIndex.insert pkg . PackageIndex.deletePackageName (packageName pkg)
addCandidate :: CandPkgInfo -> Update CandidatePackages ()
addCandidate pkg = State.modify $ \candidates -> candidates { candidateList = addVersion (candidateList candidates) }
where addVersion = PackageIndex.insert pkg
deleteCandidate :: PackageId -> Update CandidatePackages ()
deleteCandidate pkg = State.modify $ \candidates -> candidates { candidateList = deleteVersion (candidateList candidates) }
where deleteVersion = PackageIndex.deletePackageId pkg
deleteCandidates :: PackageName -> Update CandidatePackages ()
deleteCandidates pkg = State.modify $ \candidates -> candidates { candidateList = deleteVersions (candidateList candidates) }
where deleteVersions = PackageIndex.deletePackageName pkg
-- |Replace all existing packages and reports
replaceCandidatePackages :: CandidatePackages -> Update CandidatePackages ()
replaceCandidatePackages = State.put
getCandidatePackages :: Query CandidatePackages CandidatePackages
getCandidatePackages = ask
makeAcidic ''CandidatePackages ['getCandidatePackages
,'replaceCandidatePackages
,'replaceCandidate
,'addCandidate
,'deleteCandidate
,'deleteCandidates
]
|
mpickering/hackage-server
|
Distribution/Server/Features/PackageCandidates/State.hs
|
Haskell
|
bsd-3-clause
| 2,683
|
{- $Id: AFRPTestsCOC.hs,v 1.2 2003/11/10 21:28:58 antony Exp $
******************************************************************************
* A F R P *
* *
* Module: AFRPTestsCOC *
* Purpose: Test cases for collection-oriented combinators *
* Authors: Antony Courtney and Henrik Nilsson *
* *
* Copyright (c) Yale University, 2003 *
* *
******************************************************************************
-}
module AFRPTestsCOC (coc_tr, coc_trs) where
import FRP.Yampa
import AFRPTestsCommon
------------------------------------------------------------------------------
-- Test cases for collection-oriented combinators
------------------------------------------------------------------------------
coc_inp1 = deltaEncode 0.1 [0.0, 0.5 ..]
coc_t0 :: [[Double]]
coc_t0 = take 20 $ embed (parB [constant 1.0, identity, integral]) coc_inp1
coc_t0r =
[[1.0, 0.0, 0.00],
[1.0, 0.5, 0.00],
[1.0, 1.0, 0.05],
[1.0, 1.5, 0.15],
[1.0, 2.0, 0.30],
[1.0, 2.5, 0.50],
[1.0, 3.0, 0.75],
[1.0, 3.5, 1.05],
[1.0, 4.0, 1.40],
[1.0, 4.5, 1.80],
[1.0, 5.0, 2.25],
[1.0, 5.5, 2.75],
[1.0, 6.0, 3.30],
[1.0, 6.5, 3.90],
[1.0, 7.0, 4.55],
[1.0, 7.5, 5.25],
[1.0, 8.0, 6.00],
[1.0, 8.5, 6.80],
[1.0, 9.0, 7.65],
[1.0, 9.5, 8.55]]
coc_trs =
[ coc_t0 ~= coc_t0r
]
coc_tr = and coc_trs
|
meimisaki/Yampa
|
tests/AFRPTestsCOC.hs
|
Haskell
|
bsd-3-clause
| 1,770
|
--
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--
module Thrift
( module Thrift.Transport
, module Thrift.Protocol
, AppExnType(..)
, AppExn(..)
, readAppExn
, writeAppExn
, ThriftException(..)
) where
import Control.Monad ( when )
import Control.Exception
import Data.Typeable ( Typeable )
import Thrift.Transport
import Thrift.Protocol
data ThriftException = ThriftException
deriving ( Show, Typeable )
instance Exception ThriftException
data AppExnType
= AE_UNKNOWN
| AE_UNKNOWN_METHOD
| AE_INVALID_MESSAGE_TYPE
| AE_WRONG_METHOD_NAME
| AE_BAD_SEQUENCE_ID
| AE_MISSING_RESULT
deriving ( Eq, Show, Typeable )
instance Enum AppExnType where
toEnum 0 = AE_UNKNOWN
toEnum 1 = AE_UNKNOWN_METHOD
toEnum 2 = AE_INVALID_MESSAGE_TYPE
toEnum 3 = AE_WRONG_METHOD_NAME
toEnum 4 = AE_BAD_SEQUENCE_ID
toEnum 5 = AE_MISSING_RESULT
fromEnum AE_UNKNOWN = 0
fromEnum AE_UNKNOWN_METHOD = 1
fromEnum AE_INVALID_MESSAGE_TYPE = 2
fromEnum AE_WRONG_METHOD_NAME = 3
fromEnum AE_BAD_SEQUENCE_ID = 4
fromEnum AE_MISSING_RESULT = 5
data AppExn = AppExn { ae_type :: AppExnType, ae_message :: String }
deriving ( Show, Typeable )
instance Exception AppExn
writeAppExn :: (Protocol p, Transport t) => p t -> AppExn -> IO ()
writeAppExn pt ae = do
writeStructBegin pt "TApplicationException"
when (ae_message ae /= "") $ do
writeFieldBegin pt ("message", T_STRING , 1)
writeString pt (ae_message ae)
writeFieldEnd pt
writeFieldBegin pt ("type", T_I32, 2);
writeI32 pt (fromEnum (ae_type ae))
writeFieldEnd pt
writeFieldStop pt
writeStructEnd pt
readAppExn :: (Protocol p, Transport t) => p t -> IO AppExn
readAppExn pt = do
readStructBegin pt
rec <- readAppExnFields pt (AppExn {ae_type = undefined, ae_message = undefined})
readStructEnd pt
return rec
readAppExnFields pt rec = do
(n, ft, id) <- readFieldBegin pt
if ft == T_STOP
then return rec
else case id of
1 -> if ft == T_STRING then
do s <- readString pt
readAppExnFields pt rec{ae_message = s}
else do skip pt ft
readAppExnFields pt rec
2 -> if ft == T_I32 then
do i <- readI32 pt
readAppExnFields pt rec{ae_type = (toEnum i)}
else do skip pt ft
readAppExnFields pt rec
_ -> do skip pt ft
readFieldEnd pt
readAppExnFields pt rec
|
ajayanandgit/mbunit-v3
|
tools/Thrift/src/lib/hs/src/Thrift.hs
|
Haskell
|
apache-2.0
| 3,468
|
{-# LANGUAGE CPP #-}
#ifdef TRUSTWORTHY
{-# LANGUAGE Trustworthy #-}
#endif
#ifndef MIN_VERSION_parallel
#define MIN_VERSION_parallel(x,y,z) (defined(__GLASGOW_HASKELL__) && __GLASGOW_HASKELL > 700)
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Parallel.Strategies.Lens
-- Copyright : (C) 2012-2015 Edward Kmett
-- License : BSD-style (see the file LICENSE)
-- Maintainer : Edward Kmett <ekmett@gmail.com>
-- Stability : provisional
-- Portability : portable
--
-- A 'Lens' or 'Traversal' can be used to take the role of 'Traversable' in
-- @Control.Parallel.Strategies@, enabling those combinators to work with
-- monomorphic containers.
----------------------------------------------------------------------------
module Control.Parallel.Strategies.Lens
( evalOf
, parOf
, after
, throughout
) where
import Control.Lens
import Control.Parallel.Strategies
-- | Evaluate the targets of a 'Lens' or 'Traversal' into a data structure
-- according to the given 'Strategy'.
--
-- @
-- 'evalTraversable' = 'evalOf' 'traverse' = 'traverse'
-- 'evalOf' = 'id'
-- @
--
-- @
-- 'evalOf' :: 'Lens'' s a -> 'Strategy' a -> 'Strategy' s
-- 'evalOf' :: 'Traversal'' s a -> 'Strategy' a -> 'Strategy' s
-- 'evalOf' :: (a -> 'Eval' a) -> s -> 'Eval' s) -> 'Strategy' a -> 'Strategy' s
-- @
evalOf :: LensLike' Eval s a -> Strategy a -> Strategy s
evalOf l = l
{-# INLINE evalOf #-}
-- | Evaluate the targets of a 'Lens' or 'Traversal' according into a
-- data structure according to a given 'Strategy' in parallel.
--
-- @'parTraversable' = 'parOf' 'traverse'@
--
-- @
-- 'parOf' :: 'Lens'' s a -> 'Strategy' a -> 'Strategy' s
-- 'parOf' :: 'Traversal'' s a -> 'Strategy' a -> 'Strategy' s
-- 'parOf' :: ((a -> 'Eval' a) -> s -> 'Eval' s) -> 'Strategy' a -> 'Strategy' s
-- @
parOf :: LensLike' Eval s a -> Strategy a -> Strategy s
#if MIN_VERSION_parallel(3,2,0)
parOf l s = l (rparWith s)
#else
parOf l s = l (rpar `dot` s)
#endif
{-# INLINE parOf #-}
-- | Transform a 'Lens', 'Fold', 'Getter', 'Setter' or 'Traversal' to
-- first evaluates its argument according to a given 'Strategy' /before/ proceeding.
--
-- @
-- 'after' 'rdeepseq' 'traverse' :: 'Traversable' t => 'Strategy' a -> 'Strategy' [a]
-- @
after :: Strategy s -> LensLike f s t a b -> LensLike f s t a b
after s l f = l f $| s
{-# INLINE after #-}
-- | Transform a 'Lens', 'Fold', 'Getter', 'Setter' or 'Traversal' to
-- evaluate its argument according to a given 'Strategy' /in parallel with/ evaluating.
--
-- @
-- 'throughout' 'rdeepseq' 'traverse' :: 'Traversable' t => 'Strategy' a -> 'Strategy' [a]
-- @
throughout :: Strategy s -> LensLike f s t a b -> LensLike f s t a b
throughout s l f = l f $|| s
{-# INLINE throughout #-}
|
rpglover64/lens
|
src/Control/Parallel/Strategies/Lens.hs
|
Haskell
|
bsd-3-clause
| 2,786
|
module Package06e where
import HsTypes
import UniqFM
|
urbanslug/ghc
|
testsuite/tests/package/package06e.hs
|
Haskell
|
bsd-3-clause
| 53
|
-- !!! Infix record constructor.
module ShouldCompile where
data Rec = (:<-:) { a :: Int, b :: Float }
|
urbanslug/ghc
|
testsuite/tests/parser/should_compile/read010.hs
|
Haskell
|
bsd-3-clause
| 104
|
{-# OPTIONS_GHC -funbox-strict-fields #-}
import Data.List
data Vec4 = Vec4 !Float !Float !Float !Float
main :: IO ()
main = print traceList
traceList = concatMap (\(x,y) -> let (r,g,b,a) = getPixel (x,y) in [r,g,b,a])
[(0,0)]
where
getPixel (x,y) = (red,green,blue,alpha)
where
Vec4 fr fg fb fa = seq x (Vec4 1 2 3 4)
red = round fr
green = round fg
blue = round fb
alpha = round fa
|
sdiehl/ghc
|
testsuite/tests/codeGen/should_run/T1852.hs
|
Haskell
|
bsd-3-clause
| 448
|
module Language.Go.Token where
import Language.Go.SrcLocation
data Token
= IdentifierToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
-- Literals
| IntToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| FloatToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| ImaginaryToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| RuneToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
| StringToken { tokenLiteral :: !String, tokenSpan :: !SrcSpan }
-- Keywords
| BreakToken { tokenSpan :: !SrcSpan }
| CaseToken { tokenSpan :: !SrcSpan }
| ChanToken { tokenSpan :: !SrcSpan }
| ConstToken { tokenSpan :: !SrcSpan }
| ContinueToken { tokenSpan :: !SrcSpan }
| DefaultToken { tokenSpan :: !SrcSpan }
| DeferToken { tokenSpan :: !SrcSpan }
| ElseToken { tokenSpan :: !SrcSpan }
| FallthroughToken { tokenSpan :: !SrcSpan }
| ForToken { tokenSpan :: !SrcSpan }
| FuncToken { tokenSpan :: !SrcSpan }
| GoToken { tokenSpan :: !SrcSpan }
| GotoToken { tokenSpan :: !SrcSpan }
| IfToken { tokenSpan :: !SrcSpan }
| ImportToken { tokenSpan :: !SrcSpan }
| InterfaceToken { tokenSpan :: !SrcSpan }
| MapToken { tokenSpan :: !SrcSpan }
| PackageToken { tokenSpan :: !SrcSpan }
| RangeToken { tokenSpan :: !SrcSpan }
| ReturnToken { tokenSpan :: !SrcSpan }
| SelectToken { tokenSpan :: !SrcSpan }
| StructToken { tokenSpan :: !SrcSpan }
| SwitchToken { tokenSpan :: !SrcSpan }
| TypeToken { tokenSpan :: !SrcSpan }
| VarToken { tokenSpan :: !SrcSpan }
-- Operators and delimiters
| PlusToken { tokenSpan :: !SrcSpan } -- '+'
| MinusToken { tokenSpan :: !SrcSpan } -- '-'
| MultToken { tokenSpan :: !SrcSpan } -- '*'
| DivToken { tokenSpan :: !SrcSpan } -- '/'
| ModuloToken { tokenSpan :: !SrcSpan } -- '%'
| BinaryAndToken { tokenSpan :: !SrcSpan } -- '&'
| BinaryOrToken { tokenSpan :: !SrcSpan } -- '|'
| BinaryXorToken { tokenSpan :: !SrcSpan } -- '^'
| BinaryShiftLeftToken { tokenSpan :: !SrcSpan } -- '<<'
| BinaryShiftRightToken { tokenSpan :: !SrcSpan } -- '>>'
| BinaryAndNotToken { tokenSpan :: !SrcSpan } -- '&^'
| AndToken { tokenSpan :: !SrcSpan } -- '&&'
| OrToken { tokenSpan :: !SrcSpan } -- '||'
| ArrowToken { tokenSpan :: !SrcSpan } -- '<-'
| IncToken { tokenSpan :: !SrcSpan } -- '++'
| DecToken { tokenSpan :: !SrcSpan } -- '--'
| EqualityToken { tokenSpan :: !SrcSpan } -- '=='
| LessThanToken { tokenSpan :: !SrcSpan } -- '<'
| GreaterThanToken { tokenSpan :: !SrcSpan } -- '>'
| AssignToken { tokenSpan :: !SrcSpan } -- '='
| NotToken { tokenSpan :: !SrcSpan } -- '!'
| NotEqualsToken { tokenSpan :: !SrcSpan } -- '!='
| LessThanEqualsToken { tokenSpan :: !SrcSpan } -- '<='
| GreaterThanEqualsToken { tokenSpan :: !SrcSpan } -- '>='
| DefineToken { tokenSpan :: !SrcSpan } -- ':='
| EllipsisToken { tokenSpan :: !SrcSpan } -- '...'
| LeftRoundBracketToken { tokenSpan :: !SrcSpan } -- '('
| RightRoundBracketToken { tokenSpan :: !SrcSpan } -- ')'
| LeftSquareBracketToken { tokenSpan :: !SrcSpan } -- '['
| RightSquareBracketToken { tokenSpan :: !SrcSpan } -- ']'
| LeftCurlyBracketToken { tokenSpan :: !SrcSpan } -- '{'
| RightCurlyBracketToken { tokenSpan :: !SrcSpan } -- '}'
| CommaToken { tokenSpan :: !SrcSpan } -- ','
| DotToken { tokenSpan :: !SrcSpan } -- '.'
| SemicolonToken { tokenSpan :: !SrcSpan } -- ';'
| ColonToken { tokenSpan :: !SrcSpan } -- ':'
| PlusAssignToken { tokenSpan :: !SrcSpan } -- '+='
| MinusAssignToken { tokenSpan :: !SrcSpan } -- '-='
| MultAssignToken { tokenSpan :: !SrcSpan } -- '*='
| DivAssignToken { tokenSpan :: !SrcSpan } -- '/='
| ModuloAssignToken { tokenSpan :: !SrcSpan } -- '%='
| BinaryAndAssignToken { tokenSpan :: !SrcSpan } -- '&='
| BinaryOrAssignToken { tokenSpan :: !SrcSpan } -- '|='
| BinaryXorAssignToken { tokenSpan :: !SrcSpan } -- '^='
| BinaryShiftLeftAssignToken { tokenSpan :: !SrcSpan } -- '<<='
| BinaryShiftRightAssignToken { tokenSpan :: !SrcSpan } -- '>>='
| BinaryAndNotAssignToken { tokenSpan :: !SrcSpan } -- '&^='
deriving (Eq, Ord, Show)
|
codeq/language-go
|
src/Language/Go/Token.hs
|
Haskell
|
mit
| 4,170
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE CPP #-}
module Network.HTTP.ReverseProxy.Rewrite
( ReverseProxyConfig (..)
, RewriteRule (..)
, RPEntry (..)
, simpleReverseProxy
)
where
import Control.Applicative
import Control.Exception (bracket)
import Data.Function (fix)
import Data.Monoid ((<>))
import qualified Data.Set as Set
import Data.Set (Set)
import qualified Data.Map as Map
import Data.Map ( Map )
import Data.Array ((!))
import Data.Aeson
import Control.Monad (unless)
import qualified Data.ByteString as S
import qualified Data.Text as T
import Data.Text (Text)
import Data.Text.Encoding (encodeUtf8, decodeUtf8)
import qualified Data.CaseInsensitive as CI
import Blaze.ByteString.Builder (fromByteString)
import Keter.Types.Common
-- Configuration files
import Data.Default
-- Regular expression parsing, replacement, matching
import Data.Attoparsec.Text (string, takeWhile1, endOfInput, parseOnly, Parser)
import Text.Regex.TDFA (makeRegex, matchOnceText, MatchText)
import Text.Regex.TDFA.String (Regex)
import Data.Char (isDigit)
-- Reverse proxy apparatus
import qualified Network.Wai as Wai
import qualified Network.Wai.Internal as I
import Network.HTTP.Client.Conduit
import qualified Network.HTTP.Client as NHC
import Network.HTTP.Types
data RPEntry = RPEntry
{ config :: ReverseProxyConfig
, httpManager :: Manager
}
instance Show RPEntry where
show x = "RPEntry { config = " ++ (show $ config x) ++ " }"
getGroup :: MatchText String -> Int -> String
getGroup matches i = fst $ matches ! i
rewrite :: (String, MatchText String, String) -> String -> String -> Text
rewrite (before, match, after) input replacement =
case parseOnly parseSubstitute (T.pack replacement) of
Left _ -> T.pack input
Right result -> T.pack before <> result <> T.pack after
where
parseSubstitute :: Parser Text
parseSubstitute =
(endOfInput >> "")
<|> do
{ _ <- string "\\\\"
; rest <- parseSubstitute
; return $ "\\" <> rest
}
<|> do
{ _ <- string "\\"
; n <- (fmap (read . T.unpack) $ takeWhile1 isDigit) :: Parser Int
; rest <- parseSubstitute
; return $ T.pack (getGroup match n) <> rest
}
<|> do
{ text <- takeWhile1 (/= '\\')
; rest <- parseSubstitute
; return $ text <> rest
}
rewriteHeader :: Map HeaderName RewriteRule -> Header -> Header
rewriteHeader rules header@(name, value) =
case Map.lookup name rules of
Nothing -> header
Just r -> (name, regexRewrite r value)
rewriteHeaders :: Map HeaderName RewriteRule -> [Header] -> [Header]
rewriteHeaders ruleMap = map (rewriteHeader ruleMap)
regexRewrite :: RewriteRule -> S.ByteString -> S.ByteString
regexRewrite (RewriteRule _ regex' replacement) input =
case matchOnceText regex strInput of
Just match -> encodeUtf8 $ rewrite match strInput strReplacement
Nothing -> input
where
strRegex = T.unpack regex'
regex :: Regex
regex = makeRegex strRegex
strInput = T.unpack . decodeUtf8 $ input
strReplacement = T.unpack replacement
filterHeaders :: [Header] -> [Header]
filterHeaders = filter useHeader
where
useHeader ("Transfer-Encoding", _) = False
useHeader ("Content-Length", _) = False
useHeader ("Host", _) = False
useHeader _ = True
mkRuleMap :: Set RewriteRule -> Map HeaderName RewriteRule
mkRuleMap = Map.fromList . map (\k -> (CI.mk . encodeUtf8 $ ruleHeader k, k)) . Set.toList
mkRequest :: ReverseProxyConfig -> Wai.Request -> Request
mkRequest rpConfig request =
#if MIN_VERSION_http_client(0, 5, 0)
NHC.defaultRequest
{ NHC.checkResponse = \_ _ -> return ()
, NHC.responseTimeout = maybe NHC.responseTimeoutNone NHC.responseTimeoutMicro $ reverseTimeout rpConfig
#else
def
{ NHC.checkStatus = \_ _ _ -> Nothing
, NHC.responseTimeout = reverseTimeout rpConfig
#endif
, method = Wai.requestMethod request
, secure = reversedUseSSL rpConfig
, host = encodeUtf8 $ reversedHost rpConfig
, port = reversedPort rpConfig
, path = Wai.rawPathInfo request
, queryString = Wai.rawQueryString request
, requestHeaders = filterHeaders $ rewriteHeaders reqRuleMap (Wai.requestHeaders request)
, requestBody =
case Wai.requestBodyLength request of
Wai.ChunkedBody -> RequestBodyStreamChunked ($ I.getRequestBodyChunk request)
Wai.KnownLength n -> RequestBodyStream (fromIntegral n) ($ I.getRequestBodyChunk request)
, decompress = const False
, redirectCount = 0
, cookieJar = Nothing
, requestVersion = Wai.httpVersion request
}
where
reqRuleMap = mkRuleMap $ rewriteRequestRules rpConfig
simpleReverseProxy :: Manager -> ReverseProxyConfig -> Wai.Application
simpleReverseProxy mgr rpConfig request sendResponse = bracket
(NHC.responseOpen proxiedRequest mgr)
responseClose
$ \res -> sendResponse $ Wai.responseStream
(responseStatus res)
(rewriteHeaders respRuleMap $ responseHeaders res)
(sendBody $ responseBody res)
where
proxiedRequest = mkRequest rpConfig request
respRuleMap = mkRuleMap $ rewriteResponseRules rpConfig
sendBody body send _flush = fix $ \loop -> do
bs <- body
unless (S.null bs) $ do
() <- send $ fromByteString bs
loop
data ReverseProxyConfig = ReverseProxyConfig
{ reversedHost :: Text
, reversedPort :: Int
, reversedUseSSL :: Bool
, reversingHost :: Text
, reversingUseSSL :: !SSLConfig
, reverseTimeout :: Maybe Int
, rewriteResponseRules :: Set RewriteRule
, rewriteRequestRules :: Set RewriteRule
} deriving (Eq, Ord, Show)
instance FromJSON ReverseProxyConfig where
parseJSON (Object o) = ReverseProxyConfig
<$> o .: "reversed-host"
<*> o .: "reversed-port"
<*> o .: "reversed-ssl" .!= False
<*> o .: "reversing-host"
<*> o .:? "ssl" .!= SSLFalse
<*> o .:? "timeout" .!= Nothing
<*> o .:? "rewrite-response" .!= Set.empty
<*> o .:? "rewrite-request" .!= Set.empty
parseJSON _ = fail "Wanted an object"
instance ToJSON ReverseProxyConfig where
toJSON ReverseProxyConfig {..} = object
[ "reversed-host" .= reversedHost
, "reversed-port" .= reversedPort
, "reversed-ssl" .= reversedUseSSL
, "reversing-host" .= reversingHost
, "ssl" .= reversingUseSSL
, "timeout" .= reverseTimeout
, "rewrite-response" .= rewriteResponseRules
, "rewrite-request" .= rewriteRequestRules
]
instance Default ReverseProxyConfig where
def = ReverseProxyConfig
{ reversedHost = ""
, reversedPort = 80
, reversedUseSSL = False
, reversingHost = ""
, reversingUseSSL = SSLFalse
, reverseTimeout = Nothing
, rewriteResponseRules = Set.empty
, rewriteRequestRules = Set.empty
}
data RewriteRule = RewriteRule
{ ruleHeader :: Text
, ruleRegex :: Text
, ruleReplacement :: Text
} deriving (Eq, Ord, Show)
instance FromJSON RewriteRule where
parseJSON (Object o) = RewriteRule
<$> o .: "header"
<*> o .: "from"
<*> o .: "to"
parseJSON _ = fail "Wanted an object"
instance ToJSON RewriteRule where
toJSON RewriteRule {..} = object
[ "header" .= ruleHeader
, "from" .= ruleRegex
, "to" .= ruleReplacement
]
|
snoyberg/keter
|
Network/HTTP/ReverseProxy/Rewrite.hs
|
Haskell
|
mit
| 7,638
|
{-# LANGUAGE OverloadedStrings #-}
-- import Text.HTML.TagSoup
-- import Text.HTML.TagSoup.Tree
import GHC.Int
import Data.Maybe
import Control.Applicative ( (<$>) )
import Network.HTTP.Conduit
import Data.String.Conversions (cs)
import Data.Aeson
import Data.Aeson.Types
import Data.ByteString.Lazy as BL
import Data.Text
type TweetId = Int64
timelineURI :: String -> Maybe TweetId -> String
timelineURI tweep maxTweetId =
"https://twitter.com/i/profiles/show/" ++ tweep ++ "/timeline?include_entities=1" ++
fromMaybe "" (("&max_id=" ++) . show <$> maxTweetId)
main :: IO ()
main = do
body <- simpleHttp (timelineURI "drboolean" Nothing)
let html = fromMaybe "" $ htmlPayload body
Prelude.putStr $ cs html
htmlPayload :: BL.ByteString -> Maybe Text
htmlPayload v = do
obj <- decode v
flip parseMaybe obj $ \x -> x .: "items_html"
|
begriffs/twittective
|
Main.hs
|
Haskell
|
mit
| 855
|
-- |
module Language.Imperative.C where
import Language.Imperative
import qualified Language.C as C
type CType a = C.CDeclaration a
type CLit a = C.CConstant a
type CExpr e s a = Expr (CLit a) (CType a) e s a
type CStmt e s a = Statement (CLit a) (CType a) e s a
type CCase s a = Case (CLit a) s a
type CPat a = Pattern (CLit a) a
newtype Fix f = Fix (f (Fix f))
newtype FCExpr a = FCExpr (CExpr (FCExpr a) (FCStmt a) a)
newtype FCStmt a = FCStmt (CStmt (FCExpr a) (FCStmt a) a)
fixS s = FCStmt s
fixE e = FCExpr e
fromC :: C.CStatement a -> FCStmt a
fromC (C.CLabel i stmt attrs a) =
fixS $
SBlock a [ fixS $ SLabel a (Label $ C.identToString i)
, fromC stmt
]
fromC (C.CSwitch expr stmt a) =
fixS $ SSwitch a (fromCE expr) (toCases stmt) Nothing
fromCE :: C.CExpression a -> FCExpr a
fromCE (C.CAssign assignOp e1 e2 a) =
fixE $
EAssign a (fromCE e1) (fromCE e2) -- TODO use assignOp
toCases :: C.CStatement a -> [CCase (FCStmt a) a]
toCases (C.CCase expr next@(C.CCase{}) a) =
(Case a (toPattern expr) (fixS $ SBlock a []))
: toCases next
toPattern :: C.CExpression a -> CPat a
toPattern (C.CConst c) = Pattern (C.annotation c) c
|
sinelaw/imperative
|
src/Language/Imperative/C.hs
|
Haskell
|
mit
| 1,197
|
module Main (main) where
-- Imports from 'tasty'
import Test.Tasty (defaultMain, testGroup)
-- Imports from 'jupyter'
import Jupyter.Test.Client (clientTests)
import Jupyter.Test.Install (installTests)
import Jupyter.Test.Kernel (kernelTests)
import Jupyter.Test.ZeroMQ (zmqTests)
-- | Run all Haskell tests for the @jupyter@ package.
main :: IO ()
main =
defaultMain $
testGroup "Tests" [installTests, zmqTests, kernelTests, clientTests]
|
gibiansky/jupyter-haskell
|
tests/Test.hs
|
Haskell
|
mit
| 498
|
import Control.Arrow ((&&&))
import Data.List (transpose, sort, group)
colFreq :: Ord a => [a] -> [a]
colFreq = map snd . sort . count
where count = map (length &&& head) . group . sort
decode :: Ord a => ([a] -> a) -> [[a]] -> [a]
decode f = map (f . colFreq) . transpose
main :: IO ()
main = do
input <- lines <$> readFile "../input.txt"
print (decode last input, decode head input)
|
mattvperry/AoC_2016
|
day06/haskell/day6.hs
|
Haskell
|
mit
| 399
|
module Language.Pal.Parser
( expr
) where
import Control.Applicative
import Text.Parsec.Char hiding (string)
import Text.Parsec.Combinator
import Text.Parsec.String
import Text.Parsec ((<?>))
import Language.Pal.Types
list :: Parser LValue
list = char '(' *> (List <$> (expr `sepBy` whitespaces)) <* char ')'
whitespace :: Parser Char
whitespace = oneOf " \n\t"
whitespaces :: Parser String
whitespaces = many1 whitespace
expr :: Parser LValue
expr = Atom <$> atom
<|> list
<|> Number <$> number
<|> String <$> string
<|> Bool <$> bool
<|> quoted
<?> "expression"
atom :: Parser LAtom
atom = many1 $ oneOf symbolChars
symbolChars :: String
symbolChars = ['a'..'z'] ++ ['A'..'Z'] ++ "+-*/_!?<>"
number :: Parser LNumber
number = read <$> many1 digit
string :: Parser LString
string = char '"' *> many (noneOf "\"") <* char '"'
bool :: Parser Bool
bool = char '#' *> ((char 't' *> pure True) <|> (char 'f' *> pure False))
quoted :: Parser LValue
quoted = (List . (Atom "quote" :) . singleton) <$> (char '\'' *> expr)
where singleton a = [a]
|
samstokes/pal
|
Language/Pal/Parser.hs
|
Haskell
|
mit
| 1,103
|
-- Harshad or Niven numbers
-- http://www.codewars.com/kata/54a0689443ab7271a90000c6/
module Codewars.Kata.Harshad where
import Control.Arrow ((&&&))
import Data.Char (digitToInt)
import Data.Maybe (fromMaybe)
isValid :: Integer -> Bool
isValid = (==0) . uncurry mod . (id &&& fromIntegral . foldr ((+) . digitToInt) 0 . show)
getNext :: Integer -> Integer
getNext n = head . filter isValid $ [n+1, n+2 ..]
getSerie :: Int -> Maybe Integer -> [Integer]
getSerie n = take n . iterate getNext . getNext . fromMaybe 0
|
gafiatulin/codewars
|
src/6 kyu/Harshad.hs
|
Haskell
|
mit
| 522
|
module Main where
import Control.Monad
import Control.Monad.Except
import Control.Monad.IO.Class (liftIO)
import Data.Char
import System.IO
import Language.Janus.AST
import Language.Janus.Interp
import Language.Janus.Parser
main :: IO ()
main = do
putStrLn "Janus REPL"
putStrLn "type :q to quit"
escInterp prompt
escInterp :: InterpM a -> IO ()
escInterp m = do
result <- runInterpM m
case result of
Left err -> putStrLn "ERROR" >> print err >> return ()
_ -> return ()
prompt :: InterpM ()
prompt = do
liftIO (putStr ">>> " >> hFlush stdout)
line <- trim <$> liftIO getLine
case line of
"" -> prompt
":q" -> return ()
(':':_) -> do
liftIO (putStrLn "unknown meta command" >> hFlush stdout)
prompt
_ -> do
result <- runLine line
liftIO (print result >> hFlush stdout)
prompt
where
runLine line = case parseStatement line of
Left parseErr -> do
liftIO (print parseErr >> hFlush stdout)
return JUnit
Right ast -> eval ast `catchError` \err -> do
liftIO (print err >> hFlush stdout)
return JUnit
trim xs = dropSpaceTail "" $ dropWhile isSpace xs
where
dropSpaceTail maybeStuff "" = ""
dropSpaceTail maybeStuff (x:xs)
| isSpace x = dropSpaceTail (x:maybeStuff) xs
| null maybeStuff = x : dropSpaceTail "" xs
| otherwise = reverse maybeStuff ++ x : dropSpaceTail "" xs
|
mkaput/janus
|
repl/Main.hs
|
Haskell
|
mit
| 1,531
|
{-# htermination (fromEnumRatio :: Ratio MyInt -> MyInt) #-}
import qualified Prelude
data MyBool = MyTrue | MyFalse
data List a = Cons a (List a) | Nil
data Tup2 a b = Tup2 a b ;
data Double = Double MyInt MyInt ;
data Float = Float MyInt MyInt ;
data Integer = Integer MyInt ;
data MyInt = Pos Nat | Neg Nat ;
data Nat = Succ Nat | Zero ;
data Ratio a = CnPc a a;
truncateM0 xu (Tup2 m vv) = m;
fromIntMyInt :: MyInt -> MyInt
fromIntMyInt x = x;
properFractionQ1 xv xw (Tup2 q vw) = q;
stop :: MyBool -> a;
stop MyFalse = stop MyFalse;
error :: a;
error = stop MyTrue;
primMinusNatS :: Nat -> Nat -> Nat;
primMinusNatS (Succ x) (Succ y) = primMinusNatS x y;
primMinusNatS Zero (Succ y) = Zero;
primMinusNatS x Zero = x;
primDivNatS0 x y MyTrue = Succ (primDivNatS (primMinusNatS x y) (Succ y));
primDivNatS0 x y MyFalse = Zero;
primGEqNatS :: Nat -> Nat -> MyBool;
primGEqNatS (Succ x) Zero = MyTrue;
primGEqNatS (Succ x) (Succ y) = primGEqNatS x y;
primGEqNatS Zero (Succ x) = MyFalse;
primGEqNatS Zero Zero = MyTrue;
primDivNatS :: Nat -> Nat -> Nat;
primDivNatS Zero Zero = error;
primDivNatS (Succ x) Zero = error;
primDivNatS (Succ x) (Succ y) = primDivNatS0 x y (primGEqNatS x y);
primDivNatS Zero (Succ x) = Zero;
primQuotInt :: MyInt -> MyInt -> MyInt;
primQuotInt (Pos x) (Pos (Succ y)) = Pos (primDivNatS x (Succ y));
primQuotInt (Pos x) (Neg (Succ y)) = Neg (primDivNatS x (Succ y));
primQuotInt (Neg x) (Pos (Succ y)) = Neg (primDivNatS x (Succ y));
primQuotInt (Neg x) (Neg (Succ y)) = Pos (primDivNatS x (Succ y));
primQuotInt ww wx = error;
primModNatS0 x y MyTrue = primModNatS (primMinusNatS x (Succ y)) (Succ (Succ y));
primModNatS0 x y MyFalse = Succ x;
primModNatS :: Nat -> Nat -> Nat;
primModNatS Zero Zero = error;
primModNatS Zero (Succ x) = Zero;
primModNatS (Succ x) Zero = error;
primModNatS (Succ x) (Succ Zero) = Zero;
primModNatS (Succ x) (Succ (Succ y)) = primModNatS0 x y (primGEqNatS x (Succ y));
primRemInt :: MyInt -> MyInt -> MyInt;
primRemInt (Pos x) (Pos (Succ y)) = Pos (primModNatS x (Succ y));
primRemInt (Pos x) (Neg (Succ y)) = Pos (primModNatS x (Succ y));
primRemInt (Neg x) (Pos (Succ y)) = Neg (primModNatS x (Succ y));
primRemInt (Neg x) (Neg (Succ y)) = Neg (primModNatS x (Succ y));
primRemInt vy vz = error;
primQrmInt :: MyInt -> MyInt -> Tup2 MyInt MyInt;
primQrmInt x y = Tup2 (primQuotInt x y) (primRemInt x y);
quotRemMyInt :: MyInt -> MyInt -> Tup2 MyInt MyInt
quotRemMyInt = primQrmInt;
properFractionVu30 xv xw = quotRemMyInt xv xw;
properFractionQ xv xw = properFractionQ1 xv xw (properFractionVu30 xv xw);
properFractionR0 xv xw (Tup2 vx r) = r;
properFractionR xv xw = properFractionR0 xv xw (properFractionVu30 xv xw);
properFractionRatio :: Ratio MyInt -> Tup2 MyInt (Ratio MyInt)
properFractionRatio (CnPc x y) = Tup2 (fromIntMyInt (properFractionQ x y)) (CnPc (properFractionR x y) y);
truncateVu6 xu = properFractionRatio xu;
truncateM xu = truncateM0 xu (truncateVu6 xu);
truncateRatio :: Ratio MyInt -> MyInt
truncateRatio x = truncateM x;
fromEnumRatio :: Ratio MyInt -> MyInt
fromEnumRatio = truncateRatio;
|
ComputationWithBoundedResources/ara-inference
|
doc/tpdb_trs/Haskell/basic_haskell/fromEnum_2.hs
|
Haskell
|
mit
| 3,161
|
module Main where
import Interpreter
import PowAST
import qualified Parse as P
import qualified Data.Map as M
import Text.ParserCombinators.Parsec (parse)
main :: IO ()
main = runMoreCode M.empty M.empty
runMoreCode :: SymTab -> FunTab -> IO ()
runMoreCode vtab ftab = do
putStr "> "
input <- getLine
let ast = parse P.expr "" input
case ast of
Right ast' -> do
(_, vtab') <- evalExpr vtab ftab (Write ast')
putStr "\n"
runMoreCode vtab' ftab
Left e -> do
putStrLn $ show e
putStr "\n"
runMoreCode vtab ftab
|
rloewe/petulant-octo-wallhack
|
Repl.hs
|
Haskell
|
mit
| 563
|
answer = length $ combinations sums
coins = [200,100,50,20,10,5,2,1]
sums = [200,0,0,0,0,0,0,0]
combinations :: [Int] -> [[Int]]
combinations ts
| next ts == [] = [ts]
| otherwise = ts : combinations(next ts)
next :: [Int] -> [Int]
next ts
| ts!!6>0 = fst' 6 ++ [ (ts!!6)-2, (ts!!7)+2]
| ts!!5>0&&mod2 = fst' 5 ++ [ts!!5- 5, sum(snd' 6)+ 5, 0]
| ts!!5>0 = fst' 5 ++ [ts!!5- 5, sum(snd' 6)+ 4, 1]
| ts!!4>0 = fst' 4 ++ [ts!!4- 10, sum (snd' 5)+ 10, 0,0]
| ts!!3>0 = fst' 3 ++ [ts!!3- 20, sum (snd' 4)+ 20, 0,0,0]
| ts!!2>0&&mod20 = fst' 2 ++ [ts!!2- 50, sum (snd' 3)+ 50, 0,0,0,0]
| ts!!2>0 = fst' 2 ++ [ts!!2- 50, sum (snd' 3)+ 40, 10,0,0,0]
| ts!!1>0 = fst' 1 ++ [ts!!1-100, sum (snd' 2)+100, 0,0,0,0,0]
| ts!!0>0 = [0, 200, 0,0,0,0,0,0]
| otherwise = [] where
fst' n = fst $ splitAt n ts
snd' n = snd $ splitAt n ts
mod2 = (sum(snd' 6)+ 5) `mod` 2 == 0
mod20 = (sum(snd' 3)+50) `mod` 20 == 0
-- down' :: [Int] -> [Int]
-- down' ts
-- | ts!!6>0 = fst' 6 ++ [(ts!!6)-2, (ts!!7)+2]
-- | take 2 (snd' 5)==[5,0] = fst' 5 ++ [0, sum (snd' 5), 0]
-- | ts!!5>0 = fst' 5 ++ [ts!!5-5, ts!!6+4, ts!!7+1]
-- | take 3 (snd' 4)==[10,0,0] = fst' 4 ++ [0, sum (snd' 4), 0, 0]
-- | ts!!4>0 = fst' 4 ++ [ts!!4-10, ts!!5+10] ++ snd' 6
-- | take 4 (snd' 3)==[20,0,0,0] = fst' 3 ++ [0, sum (snd' 3), 0, 0, 0]
-- | ts!!3>0 = fst' 3 ++ [ts!!3-20, ts!!4+20] ++ snd' 5
-- | take 5 (snd' 2)==[50,0,0,0,0] = fst' 2 ++ [0, sum (snd' 2), 0, 0, 0, 0]
-- | ts!!2>0 = fst' 2 ++ [ts!!2-50, ts!!3+40, ts!!4+10]++ snd' 5
-- | take 6 (snd' 1)==[100,0,0,0,0,0] = fst' 1 ++ [0, sum (snd' 1), 0, 0, 0, 0, 0]
-- | ts!!1>0 = fst' 1 ++ [(ts!!1)-100,(ts!!2)+100] ++ snd' 3
-- | ts!!0==200 = [0, 200, 0, 0, 0, 0, 0, 0]
-- | otherwise = [] where
-- fst' n = fst $ splitAt n ts
-- snd' n = snd $ splitAt n ts
-- downs :: [Int] -> [[Int]]
-- downs ts | next == [] = ts
-- | otherwise = ts : downs next where
-- next = down ts
--
-- down :: [Int] -> [Int]
-- down ts | ts!!6 > 0 = [zipWith (+) ts [ 0, 0, 0, 0, 0, 0,-1, 2] | i<-[1..(ts!!6)]] -- 2p
-- | ts!!5 > 0 = [zipWith (+) ts [ 0, 0, 0, 0, 0,-1, 2, 1] | i<-[1..(ts!!6)]] -- 5p
-- | ts!!4 > 0 = [zipWith (+) ts [ 0, 0, 0, 0,-1, 2, 0, 0] | i<-[1..(ts!!6)]] -- 10p
-- | ts!!3 > 0 = [zipWith (+) ts [ 0, 0, 0,-1, 2, 0, 0, 0] | i<-[1..(ts!!6)]] -- 20p
-- | ts!!2 > 0 = [zipWith (+) ts [ 0, 0,-1, 2, 1, 0, 0, 0] | i<-[1..(ts!!6)]] -- 50p
-- | ts!!1 > 0 = [zipWith (+) ts [ 0,-1, 2, 0, 0, 0, 0, 0] | i<-[1..(ts!!6)]] -- 100p
-- | ts!!0 > 0 = [zipWith (+) ts [-1, 2, 0, 0, 0, 0, 0, 0] | i<-[1..(ts!!6)]] -- 200p
-- | otherwise = []
|
yuto-matsum/contest-util-hs
|
src/Euler/031.hs
|
Haskell
|
mit
| 3,090
|
module Options (
extractOptions,
isHelp,
isVerbose,
imgOptions,
eqInlineOptions,
fontSize,
outputFile,
packages,
showHelp
) where
import System.Console.GetOpt
import qualified Data.Text as T (pack, unpack, split)
data Flag = Help | Verbose | Package String | FontSize String | Output String
| ImgOpts String | EqInlineOpts String deriving (Show,Eq)
options :: [OptDescr Flag]
options =
[ Option ['h'] ["help"] (NoArg Help) "Show this help and exit"
, Option ['v'] ["verbose"] (NoArg Verbose) "Verbose and keep LaTeX files"
, Option ['o'] ["output"] (ReqArg Output "FILE") "Output FILE"
, Option ['f'] ["fontsize"] (ReqArg FontSize "FONTSIZE") "change the font size in LaTeX files"
, Option ['p'] ["package"] (ReqArg Package "PACKAGES")
"Comma separated list of packages to be included in the LaTeX"
, Option [] ["img"] (ReqArg ImgOpts "STRING")
"Attributes for <img>-tags generated from the <tex>-tags"
, Option [] ["eq-inline"] (ReqArg EqInlineOpts "STRING")
"Attributes for <img>-tags generated from the <$>-tags"
]
extractOptions :: [String] -> IO ([Flag], [String])
extractOptions argv =
case getOpt Permute options argv of
(o,n,[] ) -> return (o,n)
(_,_,errs) -> ioError (userError (concat errs ++ showHelp))
-- is the help flag set?
isHelp :: [Flag] -> Bool
isHelp flags = Help `elem` flags
-- is the verbose flag set?
isVerbose :: [Flag] -> Bool
isVerbose flags = Verbose `elem` flags
-- output file name
outputFile :: String -> [Flag] -> String
outputFile defaultname [] = defaultname
outputFile defaultname (flag:flags) =
case flag of
Output str -> str
_ -> outputFile defaultname flags
eqInlineOptions :: [Flag] -> String
eqInlineOptions [] = ""
eqInlineOptions (flag:flags) =
case flag of
EqInlineOpts str -> str
_ -> eqInlineOptions flags
imgOptions :: [Flag] -> String
imgOptions [] = ""
imgOptions (flag:flags) =
case flag of
ImgOpts str -> str
_ -> imgOptions flags
-- check for fontsize option
fontSize :: String -> [Flag] -> String
fontSize sizedefault [] = sizedefault
fontSize sizedefault (flag:flags) =
case flag of
FontSize str -> str
_ -> fontSize sizedefault flags
-- check for Package Option and return list of Strings
packages :: [Flag] -> [String]
packages [] = []
packages (flag:flags) =
case flag of
Package str -> (split str) ++ (packages flags)
_ -> packages flags
where split str = ( map T.unpack . T.split (==',') . T.pack) str
-- show help text
showHelp :: String
showHelp = usageInfo "USAGE: texerupter [-hv] [-o FILE] [-f FONTSIZE]\
\ [-p PACKAGES] [--img=STRING] [--eq-inline=STRING] FILE \n" options
|
dino-r/TexErupter
|
src/Options.hs
|
Haskell
|
mit
| 2,863
|
import Network.HTTP.Server
import Network.HTTP.Server.Logger
import Network.HTTP.Server.HtmlForm as Form
import Network.URL as URL
import Text.JSON
import Text.JSON.String(runGetJSON)
import Text.XHtml
import Codec.Binary.UTF8.String
import Control.Exception(try,SomeException)
import System.FilePath
import Data.List(isPrefixOf)
main :: IO ()
main = serverWith defaultConfig { srvLog = stdLogger, srvPort = 8888 }
$ \_ url request ->
case rqMethod request of
GET ->
do let ext = takeExtension (url_path url)
mb_txt <- try (readFile (url_path url))
case mb_txt of
Right a -> return $ if ext == ".html"
then sendHTML OK (primHtml a)
else sendScript OK a
Left e -> return $ sendHTML NotFound $
thehtml $ concatHtml
[ thead noHtml
, body $ concatHtml
[ toHtml "I could not find "
, toHtml $ exportURL url { url_type = HostRelative }
, toHtml ", so I made this with XHTML combinators. "
, toHtml $ hotlink "example.html" (toHtml "Try this instead.")
]
]
where _hack :: SomeException
_hack = e -- to specify the type
POST ->
return $
case findHeader HdrContentType request of
Just ty
| "application/x-www-form-urlencoded" `isPrefixOf` ty ->
case URL.importParams txt of
Just fields -> sendHTML OK $
toHtml "You posted a URL encoded form:" +++ br +++
toHtml (show fields) +++ br +++
hotlink "example.html" (toHtml "back")
Nothing -> sendHTML BadRequest $
toHtml "Could not understand URL encoded form data"
| "multipart/form-data" `isPrefixOf` ty ->
case Form.fromRequest request of
Just fields -> sendHTML OK $
toHtml "You posted a multipart form:" +++ br +++
toHtml (show (Form.toList fields)) +++ br +++
hotlink "example.html" (toHtml "back")
Nothing -> sendHTML BadRequest $
toHtml "Could not understand multipart form data"
| "application/json" `isPrefixOf` ty ->
case runGetJSON readJSValue txt of
Right val -> sendJSON OK $
JSObject $ toJSObject [("success", JSString $ toJSString "hello")]
Left err -> sendJSON BadRequest $
JSObject $ toJSObject [("error", JSString $ toJSString err)]
x -> sendHTML BadRequest $
toHtml $ "I don't know how to deal with POSTed content" ++
" of type " ++ show x
-- we assume UTF8 encoding
where txt = decodeString (rqBody request)
_ -> return $ sendHTML BadRequest $ toHtml "I don't understand"
sendText :: StatusCode -> String -> Response String
sendText s v = insertHeader HdrContentLength (show (length txt))
$ insertHeader HdrContentEncoding "UTF-8"
$ insertHeader HdrContentEncoding "text/plain"
$ (respond s :: Response String) { rspBody = txt }
where txt = encodeString v
sendJSON :: StatusCode -> JSValue -> Response String
sendJSON s v = insertHeader HdrContentType "application/json"
$ sendText s (showJSValue v "")
sendHTML :: StatusCode -> Html -> Response String
sendHTML s v = insertHeader HdrContentType "text/html"
$ sendText s (renderHtml v)
sendScript :: StatusCode -> String -> Response String
sendScript s v = insertHeader HdrContentType "application/x-javascript"
$ sendText s v
|
GaloisInc/http-server
|
example/SimpleWeb.hs
|
Haskell
|
mit
| 3,778
|
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE Rank2Types #-}
{-# LANGUAGE TupleSections #-}
{-# LANGUAGE ViewPatterns #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE FlexibleContexts #-}
{- |
Module : Control.Lens.SemiIso
Description : Semi-isomorphisms.
Copyright : (c) Paweł Nowak
License : MIT
Maintainer : Paweł Nowak <pawel834@gmail.com>
Stability : experimental
Semi-isomorphisms were motivated by reversible parsing/pretty printing. For example
we can map a number 12 to a string "12" (and the other way around). But the isomorphism
is partial - we cannot map the string "forty-two" to a number.
Another example: when parsing a list of numbers like "12_53___42" we want to skip underscores
between numbers (and forget about them). During pretty printing we have to decide how many
underscores should we insert between numbers. Let's say we insert a single underscore. But now
@prettyPrint (parse "12_53___42") = "12_53_42"@ and not "12_53___42". We have to weaken
isomorphism laws to allow such semi-iso. Notice that
> parse (prettyPrint (parse "12_53___42")) = parse "12_53___42"
> prettyPrint (parse (prettyPrint [12, 53, 42])) = prettyPrint [12, 53, 42]
Our semi-isomorphisms will obey weakened laws:
> apply i >=> unapply i >=> apply i = apply i
> unapply i >=> apply i >=> unapply i = unapply i
When you see an "Either String a", the String is usually an error message.
Disclaimer: the name "semi-isomorphism" is fictitious and made up for this library.
Any resemblance to known mathematical objects of the same name is purely coincidental.
-}
module Control.Lens.SemiIso (
-- * Semi-isomorphism types.
SemiIso,
SemiIso',
ASemiIso,
ASemiIso',
-- * Patterns.
pattern SemiIso,
-- * Constructing semi-isos.
semiIso,
cloneSemiIso,
-- * Reified semi-isos.
ReifiedSemiIso'(..),
reifySemiIso,
-- * Consuming semi-isos.
apply,
unapply,
withSemiIso,
viewSemiIso,
-- * Common semi-isomorphisms and isomorphisms.
unit,
swapped,
associated,
constant,
exact,
bifiltered,
alwaysFailing,
-- * Semi-isos for numbers.
_Negative,
-- * Transforming semi-isos.
rev,
prod,
elimFirst,
elimSecond,
attempt,
attemptAp,
attemptUn,
attempt_,
attemptAp_,
attemptUn_,
-- * Bidirectional folds.
bifoldr,
bifoldr1,
bifoldl,
bifoldl1,
bifoldr_,
bifoldr1_,
bifoldl_,
bifoldl1_
) where
import Control.Arrow (Kleisli(..))
import Control.Category
import Control.Category.Structures
import Control.Lens.Internal.SemiIso
import Control.Lens.Iso
import Data.Foldable
import Data.Functor.Identity
import Data.Profunctor.Exposed
import Data.Traversable
import Prelude hiding (id, (.))
-- | A semi-isomorphism is a partial isomorphism with weakened laws.
--
-- Should satisfy laws:
--
-- > apply i >=> unapply i >=> apply i = apply i
-- > unapply i >=> apply i >=> unapply i = unapply i
--
-- Every 'Prism' is a 'SemiIso'.
-- Every 'Iso' is a 'Prism'.
type SemiIso s t a b = forall p f. (Exposed (Either String) p, Traversable f)
=> p a (f b) -> p s (f t)
-- | Non-polymorphic variant of 'SemiIso'.
type SemiIso' s a = SemiIso s s a a
-- | When you see this as an argument to a function, it expects a 'SemiIso'.
type ASemiIso s t a b = Retail a b a (Identity b) -> Retail a b s (Identity t)
-- | When you see this as an argument to a function, it expects a 'SemiIso''.
type ASemiIso' s a = ASemiIso s s a a
-- | A nice pattern synonym for SemiIso's. Gives you the two functions, just like
-- 'viewSemiIso' or 'fromSemiIso'.
pattern SemiIso sa bt <- (viewSemiIso -> (sa, bt))
-- | A semi-iso stored in a container.
newtype ReifiedSemiIso' s a = ReifiedSemiIso' { runSemiIso :: SemiIso' s a }
instance Category ReifiedSemiIso' where
id = ReifiedSemiIso' id
ReifiedSemiIso' f . ReifiedSemiIso' g = ReifiedSemiIso' (g . f)
instance Products ReifiedSemiIso' where
-- TODO: pattern synonyms dont work here for some reason
first (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ first $ Kleisli f)
(runKleisli $ first $ Kleisli g)
second (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ second $ Kleisli f)
(runKleisli $ second $ Kleisli g)
ReifiedSemiIso' ai *** ReifiedSemiIso' ai' = ReifiedSemiIso' $
withSemiIso ai $ \f g -> withSemiIso ai' $ \f' g' ->
semiIso (runKleisli $ Kleisli f *** Kleisli f')
(runKleisli $ Kleisli g *** Kleisli g')
instance Coproducts ReifiedSemiIso' where
left (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ left $ Kleisli f)
(runKleisli $ left $ Kleisli g)
right (ReifiedSemiIso' ai) = withSemiIso ai $ \f g ->
ReifiedSemiIso' $ cloneSemiIso $
semiIso (runKleisli $ right $ Kleisli f)
(runKleisli $ right $ Kleisli g)
ReifiedSemiIso' ai +++ ReifiedSemiIso' ai' = ReifiedSemiIso' $
withSemiIso ai $ \f g -> withSemiIso ai' $ \f' g' ->
semiIso (runKleisli $ Kleisli f +++ Kleisli f')
(runKleisli $ Kleisli g +++ Kleisli g')
instance CatPlus ReifiedSemiIso' where
cempty = ReifiedSemiIso' $ alwaysFailing "cempty"
ReifiedSemiIso' ai /+/ ReifiedSemiIso' ai' = ReifiedSemiIso' $
withSemiIso ai $ \f g -> withSemiIso ai' $ \f' g' ->
semiIso (runKleisli $ Kleisli f /+/ Kleisli f')
(runKleisli $ Kleisli g /+/ Kleisli g')
-- | Constructs a semi isomorphism from a pair of functions that can
-- fail with an error message.
semiIso :: (s -> Either String a) -> (b -> Either String t) -> SemiIso s t a b
semiIso sa bt = merge . dimap sa (sequenceA . fmap bt) . expose
-- | Clones a semi-iso.
cloneSemiIso :: ASemiIso s t a b -> SemiIso s t a b
cloneSemiIso (SemiIso sa bt) = semiIso sa bt
-- | Applies the 'SemiIso'.
apply :: ASemiIso s t a b -> s -> Either String a
apply (SemiIso sa _) = sa
-- | Applies the 'SemiIso' in the opposite direction.
unapply :: ASemiIso s t a b -> b -> Either String t
unapply (SemiIso _ bt) = bt
-- | Extracts the two functions that characterize the 'SemiIso'.
withSemiIso :: ASemiIso s t a b
-> ((s -> Either String a) -> (b -> Either String t) -> r)
-> r
withSemiIso ai k = case ai (Retail Right (Right . Identity)) of
Retail sa bt -> k sa (rmap (runIdentity . sequenceA) bt)
-- | Extracts the two functions that characterize the 'SemiIso'.
viewSemiIso :: ASemiIso s t a b -> (s -> Either String a, b -> Either String t)
viewSemiIso ai = withSemiIso ai (,)
-- | Reifies a semi-iso.
reifySemiIso :: ASemiIso' s a -> ReifiedSemiIso' s a
reifySemiIso ai = ReifiedSemiIso' $ cloneSemiIso ai
-- | A trivial isomorphism between a and (a, ()).
unit :: Iso' a (a, ())
unit = iso (, ()) fst
-- | Products are associative.
associated :: Iso' (a, (b, c)) ((a, b), c)
associated = iso (\(a, (b, c)) -> ((a, b), c)) (\((a, b), c) -> (a, (b, c)))
-- | \-> Always returns the argument.
--
-- \<- Maps everything to a @()@.
--
-- Note that this isn't an @Iso'@ because
--
-- > unapply (constant x) >=> apply (constant x) /= id
--
-- But SemiIso laws do hold.
constant :: a -> SemiIso' () a
constant x = semiIso (\_ -> Right x) (\_ -> Right ())
-- | \-> Filters out all values not equal to the argument.
--
-- \<- Always returns the argument.
exact :: Eq a => a -> SemiIso' a ()
exact x = semiIso f g
where
f y | x == y = Right ()
| otherwise = Left "exact: not equal"
g _ = Right x
-- | Like 'filtered' but checks the predicate in both ways.
bifiltered :: (a -> Bool) -> SemiIso' a a
bifiltered p = semiIso check check
where check x | p x = Right x
| otherwise = Left "bifiltered: predicate failed"
-- | A semi-iso that fails in both directions.
alwaysFailing :: String -> SemiIso s t a b
alwaysFailing msg = semiIso (\_ -> Left msg) (\_ -> Left msg)
-- | \-> Matches only negative numbers, turns it into a positive one.
--
-- \<- Matches only positive numbers, turns it into a negative one.
_Negative :: Real a => SemiIso' a a
_Negative = semiIso f g
where
f x | x < 0 = Right (-x)
| otherwise = Left "_Negative: apply expected a negative number"
g x | x >= 0 = Right (-x)
| otherwise = Left "_Negative: unapply expected a positive number"
-- | Reverses a 'SemiIso'.
rev :: ASemiIso s t a b -> SemiIso b a t s
rev ai = withSemiIso ai $ \l r -> semiIso r l
-- | A product of semi-isos.
prod :: ASemiIso' s a -> ASemiIso' t b -> SemiIso' (s, t) (a, b)
prod a b = runSemiIso (reifySemiIso a *** reifySemiIso b)
-- | Uses an @SemiIso' a ()@ to construct a @SemiIso' (a, b) b@,
-- i.e. eliminates the first pair element.
elimFirst :: ASemiIso' s () -> SemiIso' (s, t) t
elimFirst ai = swapped . elimSecond ai
-- | Uses an @SemiIso b ()@ to construct a @SemiIso (a, b) a@,
-- i.e. eliminates the second pair element.
elimSecond :: ASemiIso' s () -> SemiIso' (t, s) t
elimSecond ai = runSemiIso (id *** reifySemiIso ai) . rev unit
-- | Transforms the semi-iso so that applying it in both directions never fails,
-- but instead catches any errors and returns them as an @Either String a@.
attempt :: ASemiIso s t a b -> SemiIso s (Either String t) (Either String a) b
attempt = attemptAp . attemptUn
-- | Transforms the semi-iso so that applying it in direction (->) never fails,
-- but instead catches any errors and returns them as an @Either String a@.
attemptAp :: ASemiIso s t a b -> SemiIso s t (Either String a) b
attemptAp (SemiIso sa bt) = semiIso (Right . sa) bt
-- | Transforms the semi-iso so that applying it in direction (<-) never fails,
-- but instead catches any errors and returns them as an @Either String a@.
attemptUn :: ASemiIso s t a b -> SemiIso s (Either String t) a b
attemptUn (SemiIso sa bt) = semiIso sa (Right . bt)
discard :: Either a b -> Maybe b
discard = either (const Nothing) Just
-- | Transforms the semi-iso like 'attempt', but ignores the error message.
attempt_ :: ASemiIso s t a b -> SemiIso s (Maybe t) (Maybe a) b
attempt_ ai = rmap (fmap discard) . attempt ai . lmap discard
-- | Transforms the semi-iso like 'attemptAp', but ignores the error message.
--
-- Very useful when you want to bifold using a prism.
attemptAp_ :: ASemiIso s t a b -> SemiIso s t (Maybe a) b
attemptAp_ ai = attemptAp ai . lmap discard
-- | Transforms the semi-iso like 'attemptUn', but ignores the error message.
attemptUn_ :: ASemiIso s t a b -> SemiIso s (Maybe t) a b
attemptUn_ ai = rmap (fmap discard) . attemptUn ai
-- | Monadic counterpart of 'foldl1' (or non-empty list counterpart of 'foldlM').
foldlM1 :: Monad m => (a -> a -> m a) -> [a] -> m a
foldlM1 f (x:xs) = foldlM f x xs
foldlM1 _ [] = fail "foldlM1: empty list"
-- | Monadic counterpart of 'foldr1' (or non-empty list counterpart of 'foldrM').
foldrM1 :: Monad m => (a -> a -> m a) -> [a] -> m a
foldrM1 _ [x] = return x
foldrM1 f (x:xs) = foldrM1 f xs >>= f x
foldrM1 _ [] = fail "foldrM1: empty list"
-- | Monadic counterpart of 'unfoldr'.
unfoldrM :: Monad m => (a -> m (Maybe (b, a))) -> a -> m (a, [b])
unfoldrM f a = do
r <- f a
case r of
Just (b, new_a) -> do
(final_a, bs) <- unfoldrM f new_a
return (final_a, b : bs)
Nothing -> return (a, [])
-- | A variant of 'unfoldrM' that always produces a non-empty list.
unfoldrM1 :: Monad m => (a -> m (Maybe (a, a))) -> a -> m [a]
unfoldrM1 f a = do
r <- f a
case r of
Just (b, new_a) -> do
bs <- unfoldrM1 f new_a
return (b : bs)
Nothing -> return [a]
-- | Monadic counterpart of 'unfoldl'.
unfoldlM :: Monad m => (a -> m (Maybe (a, b))) -> a -> m (a, [b])
unfoldlM f a0 = go a0 []
where
go a bs = do
r <- f a
case r of
Just (new_a, b) -> go new_a (b : bs)
Nothing -> return (a, bs)
-- | A variant of 'unfoldlM' that always produces a non-empty list.
unfoldlM1 :: Monad m => (a -> m (Maybe (a, a))) -> a -> m [a]
unfoldlM1 f a0 = go a0 []
where
go a bs = do
r <- f a
case r of
Just (new_a, b) -> go new_a (b : bs)
Nothing -> return (a : bs)
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Right unfolds using the (->) part of the given semi-iso, until it fails.
--
-- \<- Right folds using the (<-) part of the given semi-iso.
bifoldr :: ASemiIso' a (b, a) -> SemiIso' a (a, [b])
bifoldr = bifoldr_ . attemptAp_
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Right unfolds using the (->) part of the given semi-iso, until it fails.
-- It should produce a non-empty list.
--
-- \<- Right folds a non-empty list using the (<-) part of the given semi-iso.
bifoldr1 :: ASemiIso' a (a, a) -> SemiIso' a [a]
bifoldr1 = bifoldr1_ . attemptAp_
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Left unfolds using the (->) part of the given semi-iso, until it fails.
--
-- \<- Left folds using the (<-) part of the given semi-iso.
bifoldl :: ASemiIso' a (a, b) -> SemiIso' a (a, [b])
bifoldl = bifoldl_ . attemptAp_
-- | Constructs a bidirectional fold. Works with prisms.
--
-- \-> Left unfolds using the (->) part of the given semi-iso, until it fails.
-- It should produce a non-empty list.
--
-- \<- Left folds a non-empty list using the (<-) part of the given semi-iso.
bifoldl1 :: ASemiIso' a (a, a) -> SemiIso' a [a]
bifoldl1 = bifoldl1_ . attemptAp_
-- | Constructs a bidirectional fold.
--
-- \-> Right unfolds using the (->) part of the given semi-iso.
--
-- \<- Right folds using the (<-) part of the given semi-iso.
bifoldr_ :: ASemiIso a a (Maybe (b, a)) (b, a) -> SemiIso' a (a, [b])
bifoldr_ ai = semiIso (uf ai) (f ai)
where
f = uncurry . foldrM . curry . unapply
uf = unfoldrM . apply
-- | Constructs a bidirectional fold.
--
-- \-> Right unfolds using the (->) part of the given semi-iso. It should
-- produce a non-empty list.
--
-- \<- Right folds a non-empty list using the (<-) part of the given semi-iso.
bifoldr1_ :: ASemiIso a a (Maybe (a, a)) (a, a) -> SemiIso' a [a]
bifoldr1_ ai = semiIso (uf ai) (f ai)
where
f = foldrM1 . curry . unapply
uf = unfoldrM1 . apply
-- | Constructs a bidirectional fold.
--
-- \-> Left unfolds using the (->) part of the given semi-iso.
--
-- \<- Left folds using the (<-) part of the given semi-iso.
bifoldl_ :: ASemiIso a a (Maybe (a, b)) (a, b) -> SemiIso' a (a, [b])
bifoldl_ ai = semiIso (uf ai) (f ai)
where
f = uncurry . foldlM . curry . unapply
uf = unfoldlM . apply
-- | Constructs a bidirectional fold.
--
-- \-> Left unfolds using the (->) part of the given semi-iso. It should
-- produce a non-empty list.
--
-- \<- Left folds a non-empty list using the (<-) part of the given semi-iso.
bifoldl1_ :: ASemiIso a a (Maybe (a, a)) (a, a) -> SemiIso' a [a]
bifoldl1_ ai = semiIso (uf ai) (f ai)
where
f = foldlM1 . curry . unapply
uf = unfoldlM1 . apply
|
pawel-n/semi-iso
|
Control/Lens/SemiIso.hs
|
Haskell
|
mit
| 15,262
|
{-# LANGUAGE CPP #-}
{-# LANGUAGE TemplateHaskell #-}
-- Copyright (C) 2012 John Millikin <jmillikin@gmail.com>
--
-- See license.txt for details
module OptionsTests.StringParsing
( suite_StringParsing
) where
import Control.Applicative
import Test.Chell
import Options
data StringOptions = StringOptions
{ optString :: String
, optString_defA :: String
, optString_defU :: String
}
instance Options StringOptions where
defineOptions = pure StringOptions
<*> simpleOption "string" "" ""
-- String, ASCII default
<*> simpleOption "string_defA" "a" ""
-- String, Unicode default
<*> simpleOption "string_defU" "\12354" ""
suite_StringParsing :: Suite
suite_StringParsing = suite "string-parsing"
[ test_Defaults
, test_Ascii
, test_UnicodeValid
, test_UnicodeInvalid
]
test_Defaults :: Test
test_Defaults = assertions "defaults" $ do
let opts = defaultOptions
$expect (equal (optString_defA opts) "a")
$expect (equal (optString_defU opts) "\12354")
test_Ascii :: Test
test_Ascii = assertions "ascii" $ do
let parsed = parseOptions ["--string=a"]
let Just opts = parsedOptions parsed
$expect (equal (optString opts) "a")
test_UnicodeValid :: Test
test_UnicodeValid = assertions "unicode-valid" $ do
#if defined(OPTIONS_ENCODING_UTF8)
let parsed = parseOptions ["--string=\227\129\130"]
#else
let parsed = parseOptions ["--string=\12354"]
#endif
let Just opts = parsedOptions parsed
$expect (equal (optString opts) "\12354")
test_UnicodeInvalid :: Test
test_UnicodeInvalid = assertions "unicode-invalid" $ do
#if __GLASGOW_HASKELL__ >= 704
let parsed = parseOptions ["--string=\56507"]
let expectedString = "\56507"
#elif __GLASGOW_HASKELL__ >= 702
let parsed = parseOptions ["--string=\61371"]
let expectedString = "\61371"
#else
let parsed = parseOptions ["--string=\187"]
let expectedString = "\56507"
#endif
let Just opts = parsedOptions parsed
$expect (equal (optString opts) expectedString)
|
jmillikin/haskell-options
|
tests/OptionsTests/StringParsing.hs
|
Haskell
|
mit
| 1,981
|
{-# LANGUAGE OverloadedStrings #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module AEAD.XChaCha20Poly1305Properties (
testAEADXChaCha20
) where
import Util
import Crypto.Saltine.Core.AEAD.XChaCha20Poly1305
import Crypto.Saltine.Class (decode)
import Crypto.Saltine.Internal.AEAD.XChaCha20Poly1305 as Bytes
import qualified Data.ByteString as S
import Data.Maybe (fromJust)
import Test.Framework.Providers.QuickCheck2
import Test.Framework
import Test.QuickCheck (Property, (==>))
import Test.QuickCheck.Arbitrary
instance Arbitrary Nonce where
arbitrary =
do bs <- S.pack <$> vector Bytes.aead_xchacha20poly1305_ietf_npubbytes
pure $ fromJust (decode bs)
instance Arbitrary Key where
arbitrary =
do bs <- S.pack <$> vector Bytes.aead_xchacha20poly1305_ietf_keybytes
pure $ fromJust (decode bs)
-- | Ciphertext can be decrypted
rightInverseProp :: Key -> Nonce -> Message -> Message -> Bool
rightInverseProp k n (Message bs) (Message aad) =
Just bs == aeadOpen k n (aead k n bs aad) aad
-- | Detached ciphertext/tag can be decrypted
rightInverseDetachedProp :: Key -> Nonce -> Message -> Message -> Bool
rightInverseDetachedProp k n (Message bs) (Message aad) =
let (tag,ct) = aeadDetached k n bs aad
in Just bs == aeadOpenDetached k n tag ct aad
-- | Ciphertext cannot be decrypted if the ciphertext is perturbed
rightInverseFailureProp :: Key -> Nonce -> Message -> Message -> Perturb -> Property
rightInverseFailureProp k n (Message bs) (Message aad) p =
S.length bs /= 0 ==>
let ct = aead k n bs aad
fakeCT = perturb ct p
in fakeCT /= ct ==> Nothing == aeadOpen k n fakeCT aad
-- | Ciphertext cannot be decrypted if the aad is perturbed
rightInverseAADFailureProp :: Key -> Nonce -> Message -> Message -> Message -> Property
rightInverseAADFailureProp k n (Message bs) (Message aad) (Message aad2) =
aad /= aad2 ==> Nothing == aeadOpen k n (aead k n bs aad) aad2
-- | Ciphertext cannot be decrypted if the tag is perturbed
rightInverseTagFailureProp :: Key -> Nonce -> Message -> Message -> Message -> Property
rightInverseTagFailureProp k n (Message bs) (Message aad) (Message newTag) =
let (tag,ct) = aeadDetached k n bs aad
in newTag /= tag ==> Nothing == aeadOpenDetached k n newTag ct aad
-- | Ciphertext cannot be decrypted if the ciphertext is perturbed
rightInverseFailureDetachedProp :: Key -> Nonce -> Message -> Message -> Perturb -> Property
rightInverseFailureDetachedProp k n (Message bs) (Message aad) p@(Perturb pBytes) =
let (tag,ct) = aeadDetached k n bs aad
in S.length bs > length pBytes ==>
Nothing == aeadOpenDetached k n tag (perturb ct p) aad
-- | Ciphertext cannot be decrypted with a different key
cannotDecryptKeyProp :: Key -> Key -> Nonce -> Message -> Message -> Property
cannotDecryptKeyProp k1 k2 n (Message bs) (Message aad) =
let ct = aead k1 n bs aad
in k1 /= k2 ==> Nothing == aeadOpen k2 n ct aad
-- | Ciphertext cannot be decrypted with a different key
cannotDecryptKeyDetachedProp :: Key -> Key -> Nonce -> Message -> Message -> Property
cannotDecryptKeyDetachedProp k1 k2 n (Message bs) (Message aad) =
let (tag,ct) = aeadDetached k1 n bs aad
in k1 /= k2 ==> Nothing == aeadOpenDetached k2 n tag ct aad
-- | Ciphertext cannot be decrypted with a different nonce
cannotDecryptNonceProp :: Key -> Nonce -> Nonce -> Message -> Message -> Property
cannotDecryptNonceProp k n1 n2 (Message bs) (Message aad) =
n1 /= n2 ==> Nothing == aeadOpen k n2 (aead k n1 bs aad) aad
-- | Ciphertext cannot be decrypted with a different nonce
cannotDecryptNonceDetachedProp :: Key -> Nonce -> Nonce -> Message -> Message -> Property
cannotDecryptNonceDetachedProp k n1 n2 (Message bs) (Message aad) =
let (tag,ct) = aeadDetached k n1 bs aad
in n1 /= n2 ==> Nothing == aeadOpenDetached k n2 tag ct aad
testAEADXChaCha20 :: Test
testAEADXChaCha20 = buildTest $ do
return $ testGroup "...Internal.AEAD.XChaCha20Poly1305" [
testProperty "Can decrypt ciphertext"
$ rightInverseProp,
testProperty "Can decrypt ciphertext (detached)"
$ rightInverseDetachedProp,
testGroup "Cannot decrypt ciphertext when..." [
testProperty "... ciphertext is perturbed"
$ rightInverseFailureProp,
testProperty "... AAD is perturbed"
$ rightInverseAADFailureProp,
testProperty "... ciphertext is perturbed (detached)"
$ rightInverseFailureDetachedProp,
testProperty "... tag is perturbed (detached)"
$ rightInverseTagFailureProp,
testProperty "... using the wrong key"
$ cannotDecryptKeyProp,
testProperty "... using the wrong key (detached)"
$ cannotDecryptKeyDetachedProp,
testProperty "... using the wrong nonce"
$ cannotDecryptNonceProp,
testProperty "... using the wrong nonce (detached"
$ cannotDecryptNonceDetachedProp
]
]
|
tel/saltine
|
tests/AEAD/XChaCha20Poly1305Properties.hs
|
Haskell
|
mit
| 4,999
|
import Stomp
import System
main = do
args <- getArgs
client<- connect "localhost" 61613 []
send client (args !! 0) [] (args !! 1)
|
akisystems/stomp-hs
|
src/Sender.hs
|
Haskell
|
mit
| 144
|
module NestedRoots.A338271Spec (main, spec) where
import Test.Hspec
import NestedRoots.A338271 (a338271)
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "A338271" $
it "correctly computes the first 20 elements" $
map a338271 [1..20] `shouldBe` expectedValue where
expectedValue = [1,0,0,2,0,2,0,2,2,4,2,6,2,8,4,14,6,20,8,28]
|
peterokagey/haskellOEIS
|
test/NestedRoots/A338271Spec.hs
|
Haskell
|
apache-2.0
| 352
|
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE FunctionalDependencies #-}
{-# LANGUAGE FlexibleInstances #-}
import Data.Maybe (maybe)
import Control.Monad (liftM)
newtype Reader e a = Reader { runReader :: e -> a }
instance Functor (Reader e) where
fmap f (Reader r) = Reader $ \e -> f (r e)
instance Applicative (Reader e) where
pure a = Reader $ \_ -> a
(Reader rf) <*> (Reader rx) = Reader $ \e -> (rf e) (rx e)
instance Monad (Reader e) where
-- return :: a -> Reader e a
return a = Reader $ \_ -> a
-- (>>=) :: Reader e a -> (a -> Reader e b) -> Reader e b
(Reader r) >>= f = Reader $ \e -> runReader (f (r e)) e
class MonadReader e m | m -> e where
ask :: m e
local :: (e -> e) -> m a -> m a
instance MonadReader e (Reader e) where
ask = Reader id
local f (Reader r) = Reader $ \e -> r (f e)
asks :: (Monad m, MonadReader e m) => (e -> a) -> m a
asks sel = ask >>= return . sel
-- Text | Variable | Quote | Include | Compound
data Template = T String | V Template | Q Template | I Template [Definition] | C [Template] deriving Show
data Definition = D Template Template deriving Show
data Environment = Env {templates :: [(String,Template)],
variables :: [(String,String)]} deriving Show
lookupVar :: String -> Environment -> Maybe String
lookupVar name env = lookup name (variables env)
lookupTemplate :: String -> Environment -> Maybe Template
lookupTemplate name env = lookup name (templates env)
addDefs :: [(String, String)] -> Environment -> Environment
addDefs defs env = env { variables = defs ++ (variables env) }
resolveDef :: Definition -> Reader Environment (String,String)
resolveDef (D t d) = do name <- resolve t
value <- resolve d
return (name, value)
-- resolve template into a stirng
resolve :: Template -> Reader Environment String
resolve (T s) = return s
resolve (V t) = do varName <- resolve t
varValue <- asks (lookupVar varName)
return $ maybe "" id varValue
resolve (Q t) = do tmplName <- resolve t
body <- asks (lookupTemplate tmplName)
return $ maybe "" show body
resolve (I t ds) = do tmplName <- resolve t
body <- asks (lookupTemplate tmplName)
case body of
Just t' -> do defs <- mapM resolveDef ds
local (addDefs defs) (resolve t')
Nothing -> return""
resolve (C ts) = (liftM concat) (mapM resolve ts)
type Envr = [(String, Int)]
lookp :: String -> Envr -> Maybe Int
lookp str env = lookup str env
envr :: Envr
envr = [("abc",1), ("def",2), ("hij",3)]
|
egaburov/funstuff
|
Haskell/monads/readm.hs
|
Haskell
|
apache-2.0
| 2,794
|
-- Demonstrates of a possible solution to expression problem in Haskell.
module Main where
import Prelude hiding (print)
-- Classes
class Document a where
load :: a -> IO ()
save :: a -> IO ()
class Printable a where
print :: a -> IO ()
-- Text Document
data TextDocument = TextDocument String
-- Document Interface
instance Document TextDocument where
load (TextDocument a) = putStrLn ("Loading TextDocument(" ++ a ++ ")...")
save (TextDocument a) = putStrLn ("Saving TextDocument(" ++ a ++ ")...")
-- Printable Interface
instance Printable TextDocument where
print (TextDocument a) = putStrLn ("Printing TextDocument(" ++ a ++ ")")
-- Drawing Document
data DrawingDocument = DrawingDocument String
-- Document Interface
instance Document DrawingDocument where
load (DrawingDocument a) = putStrLn ("Loading DrawingDocument(" ++ a ++ ")...")
save (DrawingDocument a) = putStrLn ("Saving DrawingDocument(" ++ a ++ ")...")
-- Printable Interface
instance Printable DrawingDocument where
print (DrawingDocument a) = putStrLn ("Printing DrawingDocument(" ++ a ++ ")")
-- Demonstration
test a = do
load a
save a
print a
main = do
putStrLn ""
test (TextDocument "text")
putStrLn ""
test (DrawingDocument "text")
putStrLn ""
|
rizo/lambda-lab
|
expression-problem/haskell/expression-problem-1.hs
|
Haskell
|
apache-2.0
| 1,307
|
module PopVox
( module X
) where
import PopVox.OpenSecrets
import PopVox.OpenSecrets.Output
import PopVox.OpenSecrets.Types
import PopVox.OpenSecrets.Utils
import PopVox.Types
import qualified PopVox.OpenSecrets as X
import qualified PopVox.OpenSecrets.Output as X
import qualified PopVox.OpenSecrets.Types as X
import qualified PopVox.OpenSecrets.Utils as X
import qualified PopVox.Types as X
|
erochest/popvox-scrape
|
src/PopVox.hs
|
Haskell
|
apache-2.0
| 480
|
{-# LANGUAGE OverloadedStrings #-}
module Yesod.Session.Redis (
localRedisSessionBackend,
redisSessionBackend
) where
import qualified Web.RedisSession as R
import Yesod.Core
import qualified Network.Wai as W
import Web.Cookie
import Control.Monad.Trans (liftIO)
import Data.Maybe (fromMaybe)
import Data.Time (UTCTime, addUTCTime)
import Data.Conduit.Pool (Pool)
import Data.Binary
import Data.Text (Text)
import Data.Text.Encoding
import Control.Monad (liftM)
instance Binary Text where
put = put . encodeUtf8
get = liftM decodeUtf8 get
sessionName = "yesodSession"
loadRedisSession :: (Yesod master) => Pool R.Redis -> master -> W.Request -> UTCTime -> IO BackendSession
loadRedisSession pool _ req now = do
let val = do
raw <- lookup "Cookie" $ W.requestHeaders req
lookup sessionName $ parseCookies raw
case val of
Nothing -> return []
Just s -> fmap (fromMaybe []) $ liftIO $ R.getSession pool s
saveRedisSession :: (Yesod master) => Pool R.Redis -> Int -> master -> W.Request -> UTCTime -> BackendSession -> BackendSession -> IO [Header]
saveRedisSession pool timeout master req now _ sess = do
let val = do
raw <- lookup "Cookie" $ W.requestHeaders req
lookup sessionName $ parseCookies raw
key <- case val of
Nothing -> R.newKey
Just k -> return k
R.setSessionExpiring pool key sess timeout
return [AddCookie def {
setCookieName = sessionName,
setCookieValue = key,
setCookiePath = Just $ cookiePath master,
setCookieExpires = Just expires,
setCookieDomain = cookieDomain master,
setCookieHttpOnly = True
}]
where
expires = fromIntegral (timeout * 60) `addUTCTime` now
localRedisSessionBackend :: (Yesod master) => Int -> IO (SessionBackend master)
localRedisSessionBackend = sessionBackend R.makeRedisLocalConnectionPool
redisSessionBackend :: (Yesod master) => String -> String -> Int -> IO (SessionBackend master)
redisSessionBackend server port = sessionBackend (R.makeRedisConnectionPool server port)
sessionBackend :: (Yesod master) => IO (Pool R.Redis) -> Int -> IO (SessionBackend master)
sessionBackend mkPool timeout = do
pool <- mkPool
return $ SessionBackend {
sbSaveSession = saveRedisSession pool timeout,
sbLoadSession = loadRedisSession pool
}
|
scan/redissession
|
Yesod/Session/Redis.hs
|
Haskell
|
bsd-2-clause
| 2,239
|
module HEP.Physics.MSSM.Model.Common where
newtype Sign = Sign Bool
deriving (Show,Eq,Ord)
-- |
sgnplus :: Sign
sgnplus = Sign True
-- |
sgnminus :: Sign
sgnminus = Sign False
-- |
toInt :: Sign -> Int
toInt (Sign True) = 1
toInt (Sign False) = -1
-- |
fromInt :: Int -> Sign
fromInt i = Sign (i >= 0)
|
wavewave/MSSMType
|
src/HEP/Physics/MSSM/Model/Common.hs
|
Haskell
|
bsd-2-clause
| 323
|
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
module Test.ZM.ADT.LeastSignificantFirst.K20ffacc8f8c9 (LeastSignificantFirst(..)) where
import qualified Prelude(Eq,Ord,Show)
import qualified GHC.Generics
import qualified Flat
import qualified Data.Model
newtype LeastSignificantFirst a = LeastSignificantFirst a
deriving (Prelude.Eq, Prelude.Ord, Prelude.Show, GHC.Generics.Generic, Flat.Flat)
instance ( Data.Model.Model a ) => Data.Model.Model ( LeastSignificantFirst a )
|
tittoassini/typed
|
test/Test/ZM/ADT/LeastSignificantFirst/K20ffacc8f8c9.hs
|
Haskell
|
bsd-3-clause
| 496
|
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE UndecidableInstances #-}
module FPNLA.Operations.BLAS.Strategies.GEMV (
) where
import FPNLA.Matrix (asColumn_vm, toCols_vm)
import FPNLA.Operations.BLAS (Elt(), GEMM (gemm), GEMV (gemv))
import FPNLA.Operations.Parameters (ResM, TransType (..), blasResultV,
getResultDataM)
instance (Elt e, GEMM s m v e) => GEMV s m v e where
gemv strat tmA vB alpha beta vC =
blasResultV . head . toCols_vm. getResultDataM $
call_gemm tmA pmB alpha beta pmC
where pmB = NoTrans $ asColumn_vm vB
pmC = asColumn_vm vC
call_gemm mA mB alpha beta mC = gemm strat mA mB alpha beta mC :: ResM s v m e
|
mauroblanco/fpnla-examples
|
src/FPNLA/Operations/BLAS/Strategies/GEMV.hs
|
Haskell
|
bsd-3-clause
| 873
|
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE DeriveGeneric #-}
module Web.ChatWork.Endpoints.My (
statusEndpoint
, MyStatus(..)
, Task(..)
, tasksEndpoint
) where
import Data.Aeson
import Data.ByteString.Char8
import GHC.Generics
import Web.ChatWork.Endpoints.Base
import Web.ChatWork.Internal as I
import Web.ChatWork.Endpoints.TaskAccount
data MyStatus = MyStatus {
unreadRoomNum :: Int
, mentionRoomNum :: Int
, mytaskRoomNum :: Int
, unreadNum :: Int
, mytaskNum :: Int
} deriving (Show, Generic)
instance FromJSON MyStatus where
parseJSON = I.parseJSON
statusEndpoint :: String
statusEndpoint = baseURL ++ "/my/status"
data Status = Open | Done
deriving (Show, Generic)
instance FromJSON Status where
parseJSON value = case value of
String "open" -> return Open
String "done" -> return Done
instance ToJSON Status where
toJSON n = case n of
Open -> String "open"
Done -> String "done"
data Task = Task {
taskId :: Int
, room :: TaskRoom
, assignedByAccount :: TaskAccount
, messageId :: Int
, body :: String
, limitTime :: Int
, status :: Status
} deriving (Show, Generic)
instance FromJSON Task where
parseJSON = I.parseJSON
data TaskRoom = TaskRoom {
roomId :: Int
, name :: String
, iconPath :: String
} deriving (Show, Generic)
instance FromJSON TaskRoom where
parseJSON = I.parseJSON
tasksEndpoint :: String
tasksEndpoint = baseURL ++ "/my/tasks"
|
eiel/haskell-chatwork
|
src/Web/ChatWork/Endpoints/My.hs
|
Haskell
|
bsd-3-clause
| 1,452
|
module Examples where
import Lang.Lam.Syntax
import FP
import qualified FP.Pretty as P
import qualified Lang.Lam.Analyses as A
import Lang.Lam.Passes.B_CPSConvert
formatResults :: Doc -> Doc
formatResults = localSetL P.maxColumnWidthL 120 . localSetL P.maxRibbonWidthL 120
doConfig :: Exp -> [String] -> [String] -> [String] -> [String] -> [String] -> [String] -> [String] -> Doc
doConfig e modes gcs createClosures lexTimeFilter dynTimeFilter μs monads =
let (se, c) = stampCPS e
in P.vsep
[ P.heading "Source"
, localSetL P.maxRibbonWidthL 40 $ pretty e
, P.heading "Stamped"
, localSetL P.maxRibbonWidthL 40 $ pretty se
, P.heading "CPS"
, localSetL P.maxRibbonWidthL 40 $ pretty c
, P.vsep $ mapOn (A.allE modes gcs createClosures lexTimeFilter dynTimeFilter μs monads) $ uncurry $ \ n f -> P.vsep
[ P.heading n
, formatResults $ f c
]
]
simpleKCFA :: Exp
simpleKCFA =
llet "id" (lam "x" $ v "x") $
iif someBool
(v "id" $# int 1)
(v "id" $# int 2)
simpleMCFA :: Exp
simpleMCFA =
llet "g" (lam "x" $ lam "y" $
iif (gez (v "x")) (int 100) (int 200)) $
llet "ff" (lam "f" $ v "f" @# int 0) $
iif someBool
(v "ff" $# v "g" @# int 1)
(v "ff" $# v "g" @# int (-1))
simpleLexicalTime :: Exp
simpleLexicalTime =
llet "ff" (lam "f" $ lam "x" $ v "f" @# v "x") $
llet "g" (lam "x" $ gez $ v "x") $
llet "h" (lam "x" $ gez $ v "x") $
iif someBool
(v "ff" @# v "g" @# int 1)
(v "ff" @# v "h" @# int (-1))
examplesMain :: IO ()
examplesMain =
pprint $ P.vsep
[ return ()
-- , doConfig simpleKCFA ["abstract"] ["no"] ["link"] ["location"] ["location"] ["0-cfa", "1k-cfa"] ["fi"]
-- , doConfig simpleMCFA ["abstract"] ["no"] ["link", "copy"] ["location"] ["location"] ["1k-cfa"] ["fi"]
, doConfig simpleLexicalTime ["abstract"] ["no"] ["link"] ["app"] ["app"] ["1k-cfa", "1o-cfa"] ["fi"]
]
|
davdar/quals
|
src/Examples.hs
|
Haskell
|
bsd-3-clause
| 1,966
|
{-# LANGUAGE CPP #-}
-------------------------------------------------------------------------------
-- |
-- Copyright : (c) 2010 Eugene Kirpichov, Dmitry Astapov
-- License : BSD3
--
-- Maintainer : Eugene Kirpichov <ekirpichov@gmail.com>,
-- Dmitry Astapov <dastapov@gmail.com>
-- Stability : experimental
-- Portability : GHC only (STM, GHC.Conc for unsafeIOToSTM)
--
-- This module provides a binding to the greg distributed logger,
-- which provides a high-precision global time axis and is very performant.
--
-- See project home page at <http://code.google.com/p/greg> for an explanation
-- of how to use the server, the features, motivation and design.
--
module System.Log.Greg (
Configuration(..)
,logMessage
,withGregDo
,defaultConfiguration
) where
import System.Log.PreciseClock
import System.Posix.Clock
import Data.ByteString.Unsafe
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy.Char8 as L
import Data.Binary
import Data.Binary.Put
import Network
import Network.HostName (getHostName)
import System.UUID.V4
import System.IO
import Foreign
#ifdef DEBUG
import Debug.Trace
#endif
import qualified Control.Exception as E
import Control.Concurrent
import Control.Concurrent.STM
import GHC.Conc
import Control.Monad
{-
Messages are stored in TChan
1 thread performs calibration
1 'packer' thread takes messages from tchan and offloads them to sender thread(s).
1 'checking' thread keeps an eye on TChan size, initiates message dropping if necessary.
1 'sender' thread delivers the batch of messages to the server
-}
data Record = Record {
timestamp :: TimeSpec,
message :: B.ByteString
}
data GregState = GregState {
configuration :: Configuration,
records :: TChan Record, -- FIFO for queued Records
numRecords :: TVar Int, -- How many records are in FIFO
isDropping :: TVar Bool, -- True is we are not adding records to the FIFO since there are more than 'maxBufferedRecords' of them
packet :: TMVar [Record] -- Block of records we are currently trying to send
}
-- | Client configuration.
-- You probably only need to change @server@.
data Configuration = Configuration {
server :: String -- ^ Server hostname (default @localhost@)
,port :: Int -- ^ Message port (default @5676@)
,calibrationPort :: Int -- ^ Calibration port (default @5677@)
,flushPeriodMs :: Int -- ^ How often to send message batches to server
-- (default @1000@)
,clientId :: String -- ^ Arbitrary identifier, will show up in logs.
-- For example, @\"DataService\"@
-- (default @\"unknown\"@)
,maxBufferedRecords :: Int -- ^ How many records to store between flushes
-- (more will be dropped) (default @100000@)
,useCompression :: Bool -- ^ Whether to use gzip compression
-- (default @False@, @True@ is unsupported)
,calibrationPeriodSec :: Int -- ^ How often to initiate calibration exchanges
-- (default @10@)
}
hostname, ourUuid :: B.ByteString
hostname = B.pack $ unsafePerformIO getHostName
ourUuid = repack . runPut . put $ unsafePerformIO uuid
-- | The default configuration, suitable for most needs.
defaultConfiguration :: Configuration
defaultConfiguration = Configuration {
server = "localhost",
port = 5676,
calibrationPort = 5677,
flushPeriodMs = 1000,
clientId = "unknown",
maxBufferedRecords = 100000,
useCompression = True,
calibrationPeriodSec = 10
}
-- | Perform an IO action with logging (will wait for all messages to flush).
withGregDo :: Configuration -> IO () -> IO ()
withGregDo conf realMain = withSocketsDo $ do
st <- atomically $ do st <- readTVar state
let st' = st{configuration = conf}
writeTVar state $ st'
return st'
let everyMs ms action = forkIO $ forever (action >> threadDelay (1000 * ms))
let safely action label = action `E.catch` \e -> putStrLnT ("Error in " ++ label ++ ": " ++ show (e::E.SomeException))
let safelyEveryMs ms action label = everyMs ms (safely action label)
-- Packer thread offloads records to sender thread
-- Housekeeping thread keeps queue size at check
calTID <- safelyEveryMs (1000*calibrationPeriodSec conf) (initiateCalibrationOnce st) "calibrator"
packTID <- safelyEveryMs ( flushPeriodMs conf) (packRecordsOnce st) "packer"
checkTID <- safelyEveryMs ( flushPeriodMs conf) (checkQueueSize st) "queue size checker"
sendTID <- safelyEveryMs ( flushPeriodMs conf) (sendPacketOnce st) "sender"
realMain
putStrLnT "Flushing remaining messages"
-- Shutdown. For now, just wait untill all messages are out of the queue
-- 1. Stop reception of new messages
killThread checkTID
atomically $ writeTVar (isDropping st) True
-- 2. Wait until all messages are sent
let waitFlush = do
numrs <- atomically $ readTVar (numRecords st)
unless (numrs == 0) $ threadDelay (1000*flushPeriodMs conf) >> waitFlush
waitFlush
killThread packTID
atomically $ putTMVar (packet st) []
let waitSend = do
sent <- atomically $ isEmptyTMVar (packet st)
unless sent $ threadDelay (1000*flushPeriodMs conf) >> waitSend
waitSend
killThread sendTID
killThread calTID
putStrLnT "Shutdown finished."
checkQueueSize :: GregState -> IO ()
checkQueueSize st = do
currsize <- atomically $ readTVar (numRecords st)
let maxrs = maxBufferedRecords (configuration st)
droppingNow <- atomically $ readTVar (isDropping st)
case (droppingNow, currsize > maxrs) of
(True , True) -> putStrLnT ("Still dropping (queue " ++ show currsize ++ ")")
(False, True) -> do putStrLnT ("Started to drop (queue " ++ show currsize ++ ")")
atomically $ writeTVar (isDropping st) True
(True, False) -> do putStrLnT ("Stopped dropping (queue " ++ show currsize ++ ")")
atomically $ writeTVar (isDropping st) False
(False, False) -> return () -- everything is OK
packRecordsOnce :: GregState -> IO ()
packRecordsOnce st = atomically $ do
putStrLnT $ "Packing: reading all messages ..."
rs <- readAtMost (10000::Int) -- Mandated by protocol
putStrLnT $ "Packing: reading all messages done (" ++ show (length rs) ++ ")"
unless (null rs) $ do
putStrLnT $ "Packing " ++ show (length rs) ++ " records"
atomModTVar (numRecords st) (\x -> x - length rs) -- decrease queue length
senderAccepted <- tryPutTMVar (packet st) rs -- putting messages in the outbox
unless senderAccepted retry
putStrLnT "Packing done"
where
readAtMost 0 = return []
readAtMost n = do
empty <- isEmptyTChan (records st)
if empty then return []
else do r <- readTChan (records st)
rest <- readAtMost (n-1)
return (r:rest)
sendPacketOnce :: GregState -> IO ()
sendPacketOnce st = atomically $ withWarning "Failed to pack/send records" $ do
rs <- takeTMVar $ packet st
unless (null rs) $ do
let conf = configuration st
putStrLnT "Pushing records"
unsafeIOToSTM $ E.bracket (connectTo (server conf) (PortNumber $ fromIntegral $ port conf)) hClose $ \hdl -> do
putStrLnT "Pushing records - connected"
let msg = formatRecords (configuration st) rs
putStrLnT $ "Snapshotted " ++ show (length rs) ++ " records --> " ++ show (B.length msg) ++ " bytes"
unsafeUseAsCStringLen msg $ \(ptr, len) -> hPutBuf hdl ptr len
hFlush hdl
putStrLnT $ "Pushing records - done"
where
withWarning s t = (t `catchSTM` (\e -> putStrLnT (s ++ ": " ++ show (e::E.SomeException)) >> check False)) `orElse` return ()
formatRecords :: Configuration -> [Record] -> B.ByteString
formatRecords conf records = repack . runPut $ do
putByteString ourUuid
putWord8 0
putWord32le (fromIntegral $ length $ clientId conf)
putByteString (B.pack $ clientId conf)
mapM_ putRecord records
putWord32le 0
putRecord :: Record -> Put
putRecord r = do
putWord32le 1
putWord64le (toNanos64 (timestamp r))
putWord32le (fromIntegral $ B.length hostname)
putByteString hostname
putWord32le (fromIntegral $ B.length (message r))
putByteString (message r)
initiateCalibrationOnce :: GregState -> IO ()
initiateCalibrationOnce st = do
putStrLnT "Initiating calibration"
let conf = configuration st
E.bracket (connectTo (server conf) (PortNumber $ fromIntegral $ calibrationPort conf)) hClose $ \hdl -> do
hSetBuffering hdl NoBuffering
putStrLnT "Calibration - connected"
unsafeUseAsCString ourUuid $ \p -> hPutBuf hdl p 16
allocaBytes 8 $ \pTheirTimestamp -> do
let whenM mp m = mp >>= \v -> when v m
loop = whenM (hSkipBytes hdl 8 pTheirTimestamp) $ do
ts <- preciseTimeSpec
let pOurTimestamp = repack $ runPut $ putWord64le (toNanos64 ts)
unsafeUseAsCString pOurTimestamp $ \ptr -> hPutBuf hdl ptr 8
-- putStrLnT "Calibration - next loop iteration passed"
loop
loop
putStrLnT "Calibration ended - sleeping"
state :: TVar GregState
state = unsafePerformIO $ do rs <- newTChanIO
numrs <- newTVarIO 0
dropping <- newTVarIO False
pkt <- newEmptyTMVarIO
newTVarIO $ GregState defaultConfiguration rs numrs dropping pkt
-- | Log a message. The message will show up in server's output
-- annotated with a global timestamp (client's clock offset does
-- not matter).
logMessage :: String -> IO ()
logMessage s = do
t <- preciseTimeSpec
st <- atomically $ readTVar state
shouldDrop <- atomically $ readTVar (isDropping st)
unless shouldDrop $ atomically $ do
writeTChan (records st) (Record {timestamp = t, message = B.pack s})
atomModTVar (numRecords st) (+1)
--------------------------------------------------------------------------
-- Utilities
toNanos64 :: TimeSpec -> Word64
toNanos64 (TimeSpec s ns) = fromIntegral ns + 1000000000*fromIntegral s
hSkipBytes :: Handle -> Int -> Ptr a -> IO Bool
hSkipBytes _ 0 _ = return True
hSkipBytes h n p = do
closed <- hIsEOF h
if closed
then return False
else do skipped <- hGetBuf h p n
if skipped < 0
then return False
else hSkipBytes h (n-skipped) p
repack :: L.ByteString -> B.ByteString
repack = B.concat . L.toChunks
atomModTVar :: TVar a -> (a -> a) -> STM ()
atomModTVar var f = readTVar var >>= \val -> writeTVar var (f val)
putStrLnT :: (Monad m) => String -> m ()
#ifdef DEBUG
putStrLnT s = trace s $ return ()
#else
putStrLnT _ = return ()
#endif
#ifdef DEBUG
testFlood :: IO ()
testFlood = withGregDo defaultConfiguration $ forever $ logMessage "Hello" -- >> threadDelay 1000
testSequence :: IO ()
testSequence = withGregDo defaultConfiguration $ mapM_ (\x -> logMessage (show x) >> threadDelay 100000) [1..]
#endif
|
jkff/greg
|
greg-clients/haskell/System/Log/Greg.hs
|
Haskell
|
bsd-3-clause
| 11,509
|
module Mask where
import Data.Char (toUpper)
import Data.Maybe (catMaybes, isJust)
import Data.List
newtype Mask = Mask [Maybe Char]
deriving (Eq, Show)
parseMask :: String -> Maybe Mask
parseMask = fmap Mask . mapM (aux . toUpper)
where
aux '.' = return Nothing
aux c | c `elem` alphabet = return (Just c)
aux _ = Nothing
maskElem :: Char -> Mask -> Bool
maskElem c (Mask xs) = any (Just c ==) xs
scrubMask :: Mask -> Mask
scrubMask (Mask xs) = Mask (map erase xs)
where
v = find isVowel (reverse (catMaybes xs))
erase c | v == c = c
| otherwise = Nothing
maskLetters :: Mask -> [Char]
maskLetters (Mask xs) = nub (sort (catMaybes xs))
trailingConsonants :: Mask -> Int
trailingConsonants (Mask xs) =
length (takeWhile (maybe True (not . isVowel)) (reverse xs))
match :: Mask -> [Char] -> String -> Bool
match (Mask xs0) banned ys0 = aux xs0 ys0
where
aux (Nothing : xs) (y : ys) = y `notElem` banned && aux xs ys
aux (Just x : xs) (y : ys) = x == y && aux xs ys
aux [] [] = True
aux _ _ = False
alphabet :: String
alphabet = ['A'..'Z']
vowels :: String
vowels = "AEIOU"
isVowel :: Char -> Bool
isVowel x = x `elem` vowels
-- * Mask editing functions
extendMask :: Mask -> Maybe Char -> Mask -> Maybe Mask
extendMask (Mask template) c (Mask input) = fmap Mask (aux template input)
where
aux (_ : xs) (y : ys) = fmap (y :) (aux xs ys)
aux [] _ = Nothing
aux (_ : xs) _ = Just (c : takeWhile isJust xs)
generateMaskPrefix :: Mask -> Mask
generateMaskPrefix (Mask template) = Mask (takeWhile isJust template)
|
glguy/vty-hangman-helper
|
Mask.hs
|
Haskell
|
bsd-3-clause
| 1,697
|
module Basil.Database.InMemory ( module Basil.Database.InMemory.Interface,
module Basil.Database.InMemory.Cache,
) where
import Basil.Database.InMemory.Cache
import Basil.Database.InMemory.Interface
|
chriseidhof/Basil
|
src/Basil/Database/InMemory.hs
|
Haskell
|
bsd-3-clause
| 266
|
module GrammarLexer
( GrammarLexeme(..)
, runLexer
) where
import Data.Char
import Data.Monoid ((<>))
import Control.Applicative ((<*>), (<*), (*>))
import Text.Parsec.String (Parser)
import Text.Parsec.Char
import Text.Parsec.Prim
import Text.Parsec.Combinator
import Text.Parsec.Error (ParseError)
import Text.Parsec.Pos (SourcePos)
data GrammarLexeme = NonTerm String
| Term String
| Typename String
| StringLiteral String
| RegexLiteral String
| CodeBlock String
| ParamsBlock String
| Colon
| Semicolon
| Comma
| Divider
| ReturnArrow
| DoubleColon
| LeftSquare
| RightSquare
deriving Eq
instance Show GrammarLexeme where
show (NonTerm s) = s
show (Term s) = s
show (StringLiteral s) = s
show (RegexLiteral s) = s
show (CodeBlock s) = s
show (ParamsBlock s) = s
show Colon = ":"
show Semicolon = ";"
show Comma = ","
show Divider = "|"
show ReturnArrow = "->"
show DoubleColon = "::"
show LeftSquare = "["
show RightSquare = "]"
instance Monoid a => Monoid (ParsecT s u m a) where
mappend a b = mappend <$> a <*> b
mempty = return mempty
skipSpaces :: Parser a -> Parser a
skipSpaces p = p <* spaces
surround :: Parser a -> Parser b -> Parser b
surround p q = p *> q <* p
parsePos :: Parser a -> Parser (SourcePos, a)
parsePos p = (,) <$> getPosition <*> p
tokenize :: Parser [(SourcePos, GrammarLexeme)]
tokenize = spaces *> many1 (parsePos grammarLexeme) <* eof
grammarLexeme :: Parser GrammarLexeme
grammarLexeme = choice $ map (try . skipSpaces)
[ nonTerm
, term
, stringLiteral
, regexLiteral
, codeBlock
, paramsBlock
, dcolon
, semicolon
, comma
, divider
, arrow
, lsq
, rsq
, colon
, lineComment
]
lineComment :: Parser GrammarLexeme
lineComment = string "//" *> many (noneOf "\r\n") *> spaces *> grammarLexeme
nonTerm :: Parser GrammarLexeme
nonTerm = NonTerm <$> ((:) <$> lower <*> many (try alphaNum <|> char '_'))
term :: Parser GrammarLexeme
term = Term <$> ((:) <$> upper <*> many (try alphaNum <|> char '_'))
stringLiteral :: Parser GrammarLexeme
stringLiteral = StringLiteral <$> surround (char '\'') (escapedString "'")
regexLiteral :: Parser GrammarLexeme
regexLiteral = RegexLiteral <$> surround (char '/') (escapedString1 "/")
codeBlock :: Parser GrammarLexeme
codeBlock = CodeBlock <$> (char '{' *> bracesText "{" "}" <* char '}')
paramsBlock :: Parser GrammarLexeme
paramsBlock = ParamsBlock <$> (char '(' *> bracesText "(" ")" <* char ')')
bracesText :: String -> String -> Parser String
bracesText lb rb = noBrace <> (concat <$> many (string lb <> bracesText lb rb <> string rb <> noBrace))
where noBrace = many (noneOf (lb ++ rb))
colon, semicolon, comma, divider, arrow, rsq, lsq, dcolon :: Parser GrammarLexeme
colon = char ':' *> pure Colon
semicolon = char ';' *> pure Semicolon
comma = char ',' *> pure Comma
divider = char '|' *> pure Divider
lsq = char '[' *> pure LeftSquare
rsq = char ']' *> pure RightSquare
arrow = string "->" *> pure ReturnArrow
dcolon = string "::" *> pure DoubleColon
escapedChar :: String -> Parser Char
escapedChar s = try (char '\\' *> oneOf ('\\' : s))
<|> char '\\'
escapedString, escapedString1 :: String -> Parser String
escapedString s = many (noneOf ('\\' : s) <|> escapedChar s)
escapedString1 s = many1 (noneOf ('\\' : s) <|> escapedChar s)
runLexer :: String -> Either ParseError [(SourcePos, GrammarLexeme)]
runLexer = parse tokenize ""
|
flyingleafe/parser-gen
|
src/GrammarLexer.hs
|
Haskell
|
bsd-3-clause
| 4,002
|
module TestTypechecker where
import Test.HUnit
import Parser
import Typechecker
intCheck :: Assertion
intCheck = parseAndTypecheck "60" "int"
if0Check :: Assertion
if0Check = parseAndTypecheck "if0 5 then <> else <>" "unit"
absCheck :: Assertion
absCheck = parseAndTypecheck "func [] (x:int) . x" "forall [] (int) -> int"
-- The following should fail, since we need a type annotation for the function
-- appCheck :: Assertion
-- appCheck = parseAndTypecheck "((func [] (x:int) . x) [] 12)" "int"
annotatedAppCheck :: Assertion
annotatedAppCheck = parseAndTypecheck "(((func [] (x:int) . x) : forall [] (int) -> int) [] (12))" "int"
tyAppCheck :: Assertion
tyAppCheck = parseAndTypecheck "(((func [α] (x:α) . x) : forall [α] (α) -> α) [int] (13))" "int"
handleCheck :: Assertion
handleCheck = parseAndTypecheck "handle (1; x.2)" "int"
raiseCheck :: Assertion
raiseCheck = parseAndTypecheck "raise [int] 1" "int"
letCheck :: Assertion
letCheck = parseAndTypecheck "let x = 1 in <>" "unit"
letAnnoCheck :: Assertion
letAnnoCheck = parseAndTypecheck "let x = (1:int) in <>" "unit"
parseAndTypecheck :: String -> String -> Assertion
parseAndTypecheck ex typ =
case (parseExpr ex, parseType typ) of
(Left err, _) -> assertFailure $ show err
(_, Left err) -> assertFailure $ show err
(Right e, Right τ) -> (typeCheckProgram e τ) @?= (Right True)
|
phillipm/mlish-to-llvm
|
test/TestTypechecker.hs
|
Haskell
|
bsd-3-clause
| 1,374
|
module Settings.Exception.Prettify.JSONException (
pJSONException
) where
import Control.Exception
import Network.HTTP.Simple
import Settings.Exception.Prettify.HttpException (getReqStringHost)
pJSONException :: SomeException -> Maybe String
pJSONException ex = extractor <$> fromException ex
extractor :: JSONException -> String
extractor (JSONParseException req _ _) =
"Server responded with ill-formatted JSON when we request \"" ++
getReqStringHost (show req) ++ "\"."
extractor (JSONConversionException req _ _) =
"Server responded with JSON of unexpected format when we request \"" ++
getReqStringHost (show req) ++ "\"."
|
Evan-Zhao/FastCanvas
|
src/Settings/Exception/Prettify/JSONException.hs
|
Haskell
|
bsd-3-clause
| 682
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.