code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE NamedFieldPuns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE TypeSynonymInstances #-} {-# LANGUAGE UndecidableInstances #-} #if __GLASGOW_HASKELL__ >= 706 {-# LANGUAGE PolyKinds #-} #endif #include "overlapping-compat.h" -- TODO: Drop this when we remove support for Data.Attoparsec.Number {-# OPTIONS_GHC -fno-warn-deprecations #-} module Data.Aeson.Types.ToJSON ( -- * Core JSON classes ToJSON(..) -- * Liftings to unary and binary type constructors , ToJSON1(..) , toJSON1 , toEncoding1 , ToJSON2(..) , toJSON2 , toEncoding2 -- * Generic JSON classes , GToJSON(..) , GToEncoding(..) , ToArgs(..) , genericToJSON , genericToEncoding , genericLiftToJSON , genericLiftToEncoding -- * Classes and types for map keys , ToJSONKey(..) , ToJSONKeyFunction(..) , toJSONKeyText , contramapToJSONKeyFunction -- * Object key-value pairs , KeyValue(..) -- * Functions needed for documentation -- * Encoding functions , listEncoding , listValue ) where import Prelude () import Prelude.Compat import Control.Applicative (Const(..)) import Control.Monad.ST (ST) import Data.Aeson.Encoding (Encoding, Encoding', Series, dict, emptyArray_) import Data.Aeson.Encoding.Internal ((>*<), (><)) import Data.Aeson.Internal.Functions (mapHashKeyVal, mapKeyVal) import Data.Aeson.Types.Generic (AllNullary, False, IsRecord(..), One, ProductSize, Tagged2(..), True, Zero, productSize) import Data.Aeson.Types.Internal import Data.Attoparsec.Number (Number(..)) import Data.Bits (unsafeShiftR) import Data.DList (DList) import Data.Fixed (Fixed, HasResolution) import Data.Foldable (toList) import Data.Functor.Compose (Compose(..)) import Data.Functor.Identity (Identity(..)) import Data.Functor.Product (Product(..)) import Data.Functor.Sum (Sum(..)) import Data.Int (Int16, Int32, Int64, Int8) import Data.List (intersperse) import Data.List.NonEmpty (NonEmpty(..)) import Data.Monoid ((<>)) import Data.Proxy (Proxy(..)) import Data.Ratio (Ratio, denominator, numerator) import Data.Scientific (Scientific) import Data.Tagged (Tagged(..)) import Data.Text (Text, pack) import Data.Time (Day, LocalTime, NominalDiffTime, TimeOfDay, UTCTime, ZonedTime) import Data.Time.Format (FormatTime, formatTime) import Data.Time.Locale.Compat (defaultTimeLocale) import Data.Vector (Vector) import Data.Version (Version, showVersion) import Data.Word (Word16, Word32, Word64, Word8) import Foreign.Storable (Storable) import GHC.Generics import Numeric.Natural (Natural) import qualified Data.Aeson.Encoding as E import qualified Data.Aeson.Encoding.Internal as E (InArray, colon, comma, econcat, empty, retagEncoding, wrapObject) import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as L import qualified Data.DList as DList import qualified Data.HashMap.Strict as H import qualified Data.HashSet as HashSet import qualified Data.IntMap as IntMap import qualified Data.IntSet as IntSet import qualified Data.List.NonEmpty as NE import qualified Data.Map as M import qualified Data.Monoid as Monoid import qualified Data.Scientific as Scientific import qualified Data.Semigroup as Semigroup import qualified Data.Sequence as Seq import qualified Data.Set as Set import qualified Data.Text as T import qualified Data.Text.Encoding as T import qualified Data.Text.Lazy as LT import qualified Data.Tree as Tree import qualified Data.Vector as V import qualified Data.Vector.Generic as VG import qualified Data.Vector.Mutable as VM import qualified Data.Vector.Primitive as VP import qualified Data.Vector.Storable as VS import qualified Data.Vector.Unboxed as VU #if !(MIN_VERSION_bytestring(0,10,0)) import Foreign.ForeignPtr (withForeignPtr) import Foreign.Marshal.Utils (copyBytes) import Foreign.Ptr (plusPtr) import qualified Data.ByteString.Internal as S import qualified Data.ByteString.Lazy.Internal as L #endif toJSONPair :: (a -> Value) -> (b -> Value) -> (a, b) -> Value toJSONPair a b = liftToJSON2 a (listValue a) b (listValue b) {-# INLINE toJSONPair #-} realFloatToJSON :: RealFloat a => a -> Value realFloatToJSON d | isNaN d || isInfinite d = Null | otherwise = Number $ Scientific.fromFloatDigits d {-# INLINE realFloatToJSON #-} ------------------------------------------------------------------------------- -- Generics ------------------------------------------------------------------------------- -- | Class of generic representation types that can be converted to -- JSON. class GToJSON arity f where -- | This method (applied to 'defaultOptions') is used as the -- default generic implementation of 'toJSON' (if the @arity@ is 'Zero') -- or 'liftToJSON' (if the @arity@ is 'One'). gToJSON :: Options -> ToArgs Value arity a -> f a -> Value -- | Class of generic representation types that can be converted to -- a JSON 'Encoding'. class GToEncoding arity f where -- | This method (applied to 'defaultOptions') can be used as the -- default generic implementation of 'toEncoding' (if the @arity@ is 'Zero') -- or 'liftToEncoding' (if the @arity@ is 'One'). gToEncoding :: Options -> ToArgs Encoding arity a -> f a -> Encoding -- | A 'ToArgs' value either stores nothing (for 'ToJSON') or it stores the two -- function arguments that encode occurrences of the type parameter (for -- 'ToJSON1'). data ToArgs res arity a where NoToArgs :: ToArgs res Zero a To1Args :: (a -> res) -> ([a] -> res) -> ToArgs res One a -- | A configurable generic JSON creator. This function applied to -- 'defaultOptions' is used as the default for 'toJSON' when the type -- is an instance of 'Generic'. genericToJSON :: (Generic a, GToJSON Zero (Rep a)) => Options -> a -> Value genericToJSON opts = gToJSON opts NoToArgs . from -- | A configurable generic JSON creator. This function applied to -- 'defaultOptions' is used as the default for 'liftToJSON' when the type -- is an instance of 'Generic1'. genericLiftToJSON :: (Generic1 f, GToJSON One (Rep1 f)) => Options -> (a -> Value) -> ([a] -> Value) -> f a -> Value genericLiftToJSON opts tj tjl = gToJSON opts (To1Args tj tjl) . from1 -- | A configurable generic JSON encoder. This function applied to -- 'defaultOptions' is used as the default for 'toEncoding' when the type -- is an instance of 'Generic'. genericToEncoding :: (Generic a, GToEncoding Zero (Rep a)) => Options -> a -> Encoding genericToEncoding opts = gToEncoding opts NoToArgs . from -- | A configurable generic JSON encoder. This function applied to -- 'defaultOptions' is used as the default for 'liftToEncoding' when the type -- is an instance of 'Generic1'. genericLiftToEncoding :: (Generic1 f, GToEncoding One (Rep1 f)) => Options -> (a -> Encoding) -> ([a] -> Encoding) -> f a -> Encoding genericLiftToEncoding opts te tel = gToEncoding opts (To1Args te tel) . from1 ------------------------------------------------------------------------------- -- Class ------------------------------------------------------------------------------- -- | A type that can be converted to JSON. -- -- An example type and instance: -- -- @ -- \-- Allow ourselves to write 'Text' literals. -- {-\# LANGUAGE OverloadedStrings #-} -- -- data Coord = Coord { x :: Double, y :: Double } -- -- instance ToJSON Coord where -- toJSON (Coord x y) = 'object' [\"x\" '.=' x, \"y\" '.=' y] -- -- toEncoding (Coord x y) = 'pairs' (\"x\" '.=' x '<>' \"y\" '.=' y) -- @ -- -- Instead of manually writing your 'ToJSON' instance, there are two options -- to do it automatically: -- -- * "Data.Aeson.TH" provides Template Haskell functions which will derive an -- instance at compile time. The generated instance is optimized for your type -- so will probably be more efficient than the following two options: -- -- * The compiler can provide a default generic implementation for -- 'toJSON'. -- -- To use the second, simply add a @deriving 'Generic'@ clause to your -- datatype and declare a 'ToJSON' instance for your datatype without giving -- definitions for 'toJSON' or 'toEncoding'. -- -- For example, the previous example can be simplified to a more -- minimal instance: -- -- @ -- {-\# LANGUAGE DeriveGeneric \#-} -- -- import "GHC.Generics" -- -- data Coord = Coord { x :: Double, y :: Double } deriving 'Generic' -- -- instance ToJSON Coord where -- toEncoding = 'genericToEncoding' 'defaultOptions' -- @ -- -- Why do we provide an implementation for 'toEncoding' here? The -- 'toEncoding' function is a relatively new addition to this class. -- To allow users of older versions of this library to upgrade without -- having to edit all of their instances or encounter surprising -- incompatibilities, the default implementation of 'toEncoding' uses -- 'toJSON'. This produces correct results, but since it performs an -- intermediate conversion to a 'Value', it will be less efficient -- than directly emitting an 'Encoding'. Our one-liner definition of -- 'toEncoding' above bypasses the intermediate 'Value'. -- -- If @DefaultSignatures@ doesn't give exactly the results you want, -- you can customize the generic encoding with only a tiny amount of -- effort, using 'genericToJSON' and 'genericToEncoding' with your -- preferred 'Options': -- -- @ -- instance ToJSON Coord where -- toJSON = 'genericToJSON' 'defaultOptions' -- toEncoding = 'genericToEncoding' 'defaultOptions' -- @ class ToJSON a where -- | Convert a Haskell value to a JSON-friendly intermediate type. toJSON :: a -> Value default toJSON :: (Generic a, GToJSON Zero (Rep a)) => a -> Value toJSON = genericToJSON defaultOptions -- | Encode a Haskell value as JSON. -- -- The default implementation of this method creates an -- intermediate 'Value' using 'toJSON'. This provides -- source-level compatibility for people upgrading from older -- versions of this library, but obviously offers no performance -- advantage. -- -- To benefit from direct encoding, you /must/ provide an -- implementation for this method. The easiest way to do so is by -- having your types implement 'Generic' using the @DeriveGeneric@ -- extension, and then have GHC generate a method body as follows. -- -- @ -- instance ToJSON Coord where -- toEncoding = 'genericToEncoding' 'defaultOptions' -- @ toEncoding :: a -> Encoding toEncoding = E.value . toJSON {-# INLINE toEncoding #-} toJSONList :: [a] -> Value toJSONList = listValue toJSON {-# INLINE toJSONList #-} toEncodingList :: [a] -> Encoding toEncodingList = listEncoding toEncoding {-# INLINE toEncodingList #-} ------------------------------------------------------------------------------- -- Object key-value pairs ------------------------------------------------------------------------------- -- | A key-value pair for encoding a JSON object. class KeyValue kv where (.=) :: ToJSON v => Text -> v -> kv infixr 8 .= instance KeyValue Series where name .= value = E.pair name (toEncoding value) {-# INLINE (.=) #-} instance KeyValue Pair where name .= value = (name, toJSON value) {-# INLINE (.=) #-} ------------------------------------------------------------------------------- -- Classes and types for map keys ------------------------------------------------------------------------------- -- | Typeclass for types that can be used as the key of a map-like container -- (like 'Map' or 'HashMap'). For example, since 'Text' has a 'ToJSONKey' -- instance and 'Char' has a 'ToJSON' instance, we can encode a value of -- type 'Map' 'Text' 'Char': -- -- >>> LBC8.putStrLn $ encode $ Map.fromList [("foo" :: Text, 'a')] -- {"foo":"a"} -- -- Since 'Int' also has a 'ToJSONKey' instance, we can similarly write: -- -- >>> LBC8.putStrLn $ encode $ Map.fromList [(5 :: Int, 'a')] -- {"5":"a"} -- -- JSON documents only accept strings as object keys. For any type -- from @base@ that has a natural textual representation, it can be -- expected that its 'ToJSONKey' instance will choose that representation. -- -- For data types that lack a natural textual representation, an alternative -- is provided. The map-like container is represented as a JSON array -- instead of a JSON object. Each value in the array is an array with -- exactly two values. The first is the key and the second is the value. -- -- For example, values of type '[Text]' cannot be encoded to a -- string, so a 'Map' with keys of type '[Text]' is encoded as follows: -- -- >>> LBC8.putStrLn $ encode $ Map.fromList [(["foo","bar","baz" :: Text], 'a')] -- [[["foo","bar","baz"],"a"]] -- -- The default implementation of 'ToJSONKey' chooses this method of -- encoding a key, using the 'ToJSON' instance of the type. -- -- To use your own data type as the key in a map, all that is needed -- is to write a 'ToJSONKey' (and possibly a 'FromJSONKey') instance -- for it. If the type cannot be trivially converted to and from 'Text', -- it is recommended that 'ToJSONKeyValue' is used. Since the default -- implementations of the typeclass methods can build this from a -- 'ToJSON' instance, there is nothing that needs to be written: -- -- > data Foo = Foo { fooAge :: Int, fooName :: Text } -- > deriving (Eq,Ord,Generic) -- > instance ToJSON Foo -- > instance ToJSONKey Foo -- -- That's it. We can now write: -- -- >>> let m = Map.fromList [(Foo 4 "bar",'a'),(Foo 6 "arg",'b')] -- >>> LBC8.putStrLn $ encode m -- [[{"fooName":"bar","fooAge":4},"a"],[{"fooName":"arg","fooAge":6},"b"]] -- -- The next case to consider is if we have a type that is a -- newtype wrapper around 'Text'. The recommended approach is to use -- generalized newtype deriving: -- -- > newtype RecordId = RecordId { getRecordId :: Text} -- > deriving (Eq,Ord,ToJSONKey) -- -- Then we may write: -- -- >>> LBC8.putStrLn $ encode $ Map.fromList [(RecordId "abc",'a')] -- {"abc":"a"} -- -- Simple sum types are a final case worth considering. Suppose we have: -- -- > data Color = Red | Green | Blue -- > deriving (Show,Read,Eq,Ord) -- -- It is possible to get the 'ToJSONKey' instance for free as we did -- with 'Foo'. However, in this case, we have a natural way to go to -- and from 'Text' that does not require any escape sequences. So, in -- this example, 'ToJSONKeyText' will be used instead of 'ToJSONKeyValue'. -- The 'Show' instance can be used to help write 'ToJSONKey': -- -- > instance ToJSONKey Color where -- > toJSONKey = ToJSONKeyText f g -- > where f = Text.pack . show -- > g = text . Text.pack . show -- > -- text function is from Data.Aeson.Encoding -- -- The situation of needing to turning function @a -> Text@ into -- a 'ToJSONKeyFunction' is common enough that a special combinator -- is provided for it. The above instance can be rewritten as: -- -- > instance ToJSONKey Color where -- > toJSONKey = toJSONKeyText (Text.pack . show) -- -- The performance of the above instance can be improved by -- not using 'String' as an intermediate step when converting to -- 'Text'. One option for improving performance would be to use -- template haskell machinery from the @text-show@ package. However, -- even with the approach, the 'Encoding' (a wrapper around a bytestring -- builder) is generated by encoding the 'Text' to a 'ByteString', -- an intermediate step that could be avoided. The fastest possible -- implementation would be: -- -- > -- Assuming that OverloadedStrings is enabled -- > instance ToJSONKey Color where -- > toJSONKey = ToJSONKeyText f g -- > where f x = case x of {Red -> "Red";Green ->"Green";Blue -> "Blue"} -- > g x = case x of {Red -> text "Red";Green -> text "Green";Blue -> text "Blue"} -- > -- text function is from Data.Aeson.Encoding -- -- This works because GHC can lift the encoded values out of the case -- statements, which means that they are only evaluated once. This -- approach should only be used when there is a serious need to -- maximize performance. class ToJSONKey a where -- | Strategy for rendering the key for a map-like container. toJSONKey :: ToJSONKeyFunction a default toJSONKey :: ToJSON a => ToJSONKeyFunction a toJSONKey = ToJSONKeyValue toJSON toEncoding -- | This is similar in spirit to the 'showsList' method of 'Show'. -- It makes it possible to give 'String' keys special treatment -- without using @OverlappingInstances@. End users should always -- be able to use the default implementation of this method. toJSONKeyList :: ToJSONKeyFunction [a] default toJSONKeyList :: ToJSON a => ToJSONKeyFunction [a] toJSONKeyList = ToJSONKeyValue toJSON toEncoding data ToJSONKeyFunction a = ToJSONKeyText !(a -> Text) !(a -> Encoding' Text) -- ^ key is encoded to string, produces object | ToJSONKeyValue !(a -> Value) !(a -> Encoding) -- ^ key is encoded to value, produces array -- | Helper for creating textual keys. -- -- @ -- instance 'ToJSONKey' MyKey where -- 'toJSONKey' = 'toJSONKeyText' myKeyToText -- where -- myKeyToText = Text.pack . show -- or showt from text-show -- @ toJSONKeyText :: (a -> Text) -> ToJSONKeyFunction a toJSONKeyText f = ToJSONKeyText f (E.text . f) -- | TODO: should this be exported? toJSONKeyTextEnc :: (a -> Encoding' Text) -> ToJSONKeyFunction a toJSONKeyTextEnc e = ToJSONKeyText tot e where -- TODO: dropAround is also used in stringEncoding, which is unfortunate atm tot = T.dropAround (== '"') . T.decodeLatin1 . lazyToStrictByteString . E.encodingToLazyByteString . e -- | Contravariant map, as 'ToJSONKeyFunction' is a contravariant functor. contramapToJSONKeyFunction :: (b -> a) -> ToJSONKeyFunction a -> ToJSONKeyFunction b contramapToJSONKeyFunction h x = case x of ToJSONKeyText f g -> ToJSONKeyText (f . h) (g . h) ToJSONKeyValue f g -> ToJSONKeyValue (f . h) (g . h) ------------------------------------------------------------------------------- -- Lifings of FromJSON and ToJSON to unary and binary type constructors ------------------------------------------------------------------------------- -- | Lifting of the 'ToJSON' class to unary type constructors. -- -- Instead of manually writing your 'ToJSON1' instance, there are two options -- to do it automatically: -- -- * "Data.Aeson.TH" provides Template Haskell functions which will derive an -- instance at compile time. The generated instance is optimized for your type -- so will probably be more efficient than the following two options: -- -- * The compiler can provide a default generic implementation for -- 'toJSON1'. -- -- To use the second, simply add a @deriving 'Generic1'@ clause to your -- datatype and declare a 'ToJSON1' instance for your datatype without giving -- definitions for 'liftToJSON' or 'liftToEncoding'. -- -- For example: -- -- @ -- {-\# LANGUAGE DeriveGeneric \#-} -- -- import "GHC.Generics" -- -- data Pair = Pair { pairFst :: a, pairSnd :: b } deriving 'Generic1' -- -- instance ToJSON a => ToJSON1 (Pair a) -- @ -- -- If @DefaultSignatures@ doesn't give exactly the results you want, -- you can customize the generic encoding with only a tiny amount of -- effort, using 'genericLiftToJSON' and 'genericLiftToEncoding' with -- your preferred 'Options': -- -- @ -- instance ToJSON a => ToJSON1 (Pair a) where -- liftToJSON = 'genericLiftToJSON' 'defaultOptions' -- liftToEncoding = 'genericLiftToEncoding' 'defaultOptions' -- @ class ToJSON1 f where liftToJSON :: (a -> Value) -> ([a] -> Value) -> f a -> Value default liftToJSON :: (Generic1 f, GToJSON One (Rep1 f)) => (a -> Value) -> ([a] -> Value) -> f a -> Value liftToJSON = genericLiftToJSON defaultOptions liftToJSONList :: (a -> Value) -> ([a] -> Value) -> [f a] -> Value liftToJSONList f g = listValue (liftToJSON f g) liftToEncoding :: (a -> Encoding) -> ([a] -> Encoding) -> f a -> Encoding default liftToEncoding :: (Generic1 f, GToEncoding One (Rep1 f)) => (a -> Encoding) -> ([a] -> Encoding) -> f a -> Encoding liftToEncoding = genericLiftToEncoding defaultOptions liftToEncodingList :: (a -> Encoding) -> ([a] -> Encoding) -> [f a] -> Encoding liftToEncodingList f g = listEncoding (liftToEncoding f g) -- | Lift the standard 'toJSON' function through the type constructor. toJSON1 :: (ToJSON1 f, ToJSON a) => f a -> Value toJSON1 = liftToJSON toJSON toJSONList {-# INLINE toJSON1 #-} -- | Lift the standard 'toEncoding' function through the type constructor. toEncoding1 :: (ToJSON1 f, ToJSON a) => f a -> Encoding toEncoding1 = liftToEncoding toEncoding toEncodingList {-# INLINE toEncoding1 #-} -- | Lifting of the 'ToJSON' class to binary type constructors. -- -- Instead of manually writing your 'ToJSON2' instance, "Data.Aeson.TH" -- provides Template Haskell functions which will derive an instance at compile time. -- -- The compiler cannot provide a default generic implementation for 'liftToJSON2', -- unlike 'toJSON' and 'liftToJSON'. class ToJSON2 f where liftToJSON2 :: (a -> Value) -> ([a] -> Value) -> (b -> Value) -> ([b] -> Value) -> f a b -> Value liftToJSONList2 :: (a -> Value) -> ([a] -> Value) -> (b -> Value) -> ([b] -> Value) -> [f a b] -> Value liftToJSONList2 fa ga fb gb = listValue (liftToJSON2 fa ga fb gb) liftToEncoding2 :: (a -> Encoding) -> ([a] -> Encoding) -> (b -> Encoding) -> ([b] -> Encoding) -> f a b -> Encoding liftToEncodingList2 :: (a -> Encoding) -> ([a] -> Encoding) -> (b -> Encoding) -> ([b] -> Encoding) -> [f a b] -> Encoding liftToEncodingList2 fa ga fb gb = listEncoding (liftToEncoding2 fa ga fb gb) -- | Lift the standard 'toJSON' function through the type constructor. toJSON2 :: (ToJSON2 f, ToJSON a, ToJSON b) => f a b -> Value toJSON2 = liftToJSON2 toJSON toJSONList toJSON toJSONList {-# INLINE toJSON2 #-} -- | Lift the standard 'toEncoding' function through the type constructor. toEncoding2 :: (ToJSON2 f, ToJSON a, ToJSON b) => f a b -> Encoding toEncoding2 = liftToEncoding2 toEncoding toEncodingList toEncoding toEncodingList {-# INLINE toEncoding2 #-} ------------------------------------------------------------------------------- -- Encoding functions ------------------------------------------------------------------------------- -- | Helper function to use with 'liftToEncoding'. -- Useful when writing own 'ToJSON1' instances. -- -- @ -- newtype F a = F [a] -- -- -- This instance encodes String as an array of chars -- instance 'ToJSON1' F where -- 'liftToJSON' tj _ (F xs) = 'liftToJSON' tj ('listValue' tj) xs -- 'liftToEncoding' te _ (F xs) = 'liftToEncoding' te ('listEncoding' te) xs -- -- instance 'Data.Aeson.FromJSON.FromJSON1' F where -- 'Data.Aeson.FromJSON.liftParseJSON' p _ v = F \<$\> 'Data.Aeson.FromJSON.liftParseJSON' p ('Data.Aeson.FromJSON.listParser' p) v -- @ listEncoding :: (a -> Encoding) -> [a] -> Encoding listEncoding = E.list {-# INLINE listEncoding #-} -- | Helper function to use with 'liftToJSON', see 'listEncoding'. listValue :: (a -> Value) -> [a] -> Value listValue f = Array . V.fromList . map f {-# INLINE listValue #-} ------------------------------------------------------------------------------- -- [] instances ------------------------------------------------------------------------------- -- These are needed for key-class default definitions instance ToJSON1 [] where liftToJSON _ to' = to' {-# INLINE liftToJSON #-} liftToEncoding _ to' = to' {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON [a] where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- Generic toJSON / toEncoding ------------------------------------------------------------------------------- -------------------------------------------------------------------------------- -- Generic toJSON instance OVERLAPPABLE_ (GToJSON arity a) => GToJSON arity (M1 i c a) where -- Meta-information, which is not handled elsewhere, is ignored: gToJSON opts targs = gToJSON opts targs . unM1 instance (ToJSON a) => GToJSON arity (K1 i a) where -- Constant values are encoded using their ToJSON instance: gToJSON _opts _ = toJSON . unK1 instance GToJSON One Par1 where -- Direct occurrences of the last type parameter are encoded with the -- function passed in as an argument: gToJSON _opts (To1Args tj _) = tj . unPar1 instance (ToJSON1 f) => GToJSON One (Rec1 f) where -- Recursive occurrences of the last type parameter are encoded using their -- ToJSON1 instance: gToJSON _opts (To1Args tj tjl) = liftToJSON tj tjl . unRec1 instance GToJSON arity U1 where -- Empty constructors are encoded to an empty array: gToJSON _opts _ _ = emptyArray instance (ConsToJSON arity a) => GToJSON arity (C1 c a) where -- Constructors need to be encoded differently depending on whether they're -- a record or not. This distinction is made by 'consToJSON': gToJSON opts targs = consToJSON opts targs . unM1 instance ( WriteProduct arity a, WriteProduct arity b , ProductSize a, ProductSize b ) => GToJSON arity (a :*: b) where -- Products are encoded to an array. Here we allocate a mutable vector of -- the same size as the product and write the product's elements to it using -- 'writeProduct': gToJSON opts targs p = Array $ V.create $ do mv <- VM.unsafeNew lenProduct writeProduct opts targs mv 0 lenProduct p return mv where lenProduct = (unTagged2 :: Tagged2 (a :*: b) Int -> Int) productSize instance ( AllNullary (a :+: b) allNullary , SumToJSON arity (a :+: b) allNullary ) => GToJSON arity (a :+: b) where -- If all constructors of a sum datatype are nullary and the -- 'allNullaryToStringTag' option is set they are encoded to -- strings. This distinction is made by 'sumToJSON': gToJSON opts targs = (unTagged :: Tagged allNullary Value -> Value) . sumToJSON opts targs instance (ToJSON1 f, GToJSON One g) => GToJSON One (f :.: g) where -- If an occurrence of the last type parameter is nested inside two -- composed types, it is encoded by using the outermost type's ToJSON1 -- instance to generically encode the innermost type: gToJSON opts targs = let gtj = gToJSON opts targs in liftToJSON gtj (listValue gtj) . unComp1 -------------------------------------------------------------------------------- -- Generic toEncoding instance OVERLAPPABLE_ (GToEncoding arity a) => GToEncoding arity (M1 i c a) where -- Meta-information, which is not handled elsewhere, is ignored: gToEncoding opts targs = gToEncoding opts targs . unM1 instance (ToJSON a) => GToEncoding arity (K1 i a) where -- Constant values are encoded using their ToJSON instance: gToEncoding _opts _ = toEncoding . unK1 instance GToEncoding One Par1 where -- Direct occurrences of the last type parameter are encoded with the -- function passed in as an argument: gToEncoding _opts (To1Args te _) = te . unPar1 instance (ToJSON1 f) => GToEncoding One (Rec1 f) where -- Recursive occurrences of the last type parameter are encoded using their -- ToEncoding1 instance: gToEncoding _opts (To1Args te tel) = liftToEncoding te tel . unRec1 instance GToEncoding arity U1 where -- Empty constructors are encoded to an empty array: gToEncoding _opts _ _ = E.emptyArray_ instance (ConsToEncoding arity a) => GToEncoding arity (C1 c a) where -- Constructors need to be encoded differently depending on whether they're -- a record or not. This distinction is made by 'consToEncoding': gToEncoding opts targs = consToEncoding opts targs . unM1 instance ( EncodeProduct arity a , EncodeProduct arity b ) => GToEncoding arity (a :*: b) where -- Products are encoded to an array. Here we allocate a mutable vector of -- the same size as the product and write the product's elements to it using -- 'encodeProduct': gToEncoding opts targs p = E.list E.retagEncoding [encodeProduct opts targs p] instance ( AllNullary (a :+: b) allNullary , SumToEncoding arity (a :+: b) allNullary ) => GToEncoding arity (a :+: b) where -- If all constructors of a sum datatype are nullary and the -- 'allNullaryToStringTag' option is set they are encoded to -- strings. This distinction is made by 'sumToEncoding': gToEncoding opts targs = (unTagged :: Tagged allNullary Encoding -> Encoding) . sumToEncoding opts targs instance (ToJSON1 f, GToEncoding One g) => GToEncoding One (f :.: g) where -- If an occurrence of the last type parameter is nested inside two -- composed types, it is encoded by using the outermost type's ToJSON1 -- instance to generically encode the innermost type: gToEncoding opts targs = let gte = gToEncoding opts targs in liftToEncoding gte (listEncoding gte) . unComp1 -------------------------------------------------------------------------------- class SumToJSON arity f allNullary where sumToJSON :: Options -> ToArgs Value arity a -> f a -> Tagged allNullary Value instance ( GetConName f , TaggedObjectPairs arity f , ObjectWithSingleFieldObj arity f , TwoElemArrayObj arity f , UntaggedValueObj arity f ) => SumToJSON arity f True where sumToJSON opts targs | allNullaryToStringTag opts = Tagged . String . pack . constructorTagModifier opts . getConName | otherwise = Tagged . nonAllNullarySumToJSON opts targs instance ( TwoElemArrayObj arity f , TaggedObjectPairs arity f , ObjectWithSingleFieldObj arity f , UntaggedValueObj arity f ) => SumToJSON arity f False where sumToJSON opts targs = Tagged . nonAllNullarySumToJSON opts targs nonAllNullarySumToJSON :: ( TwoElemArrayObj arity f , TaggedObjectPairs arity f , ObjectWithSingleFieldObj arity f , UntaggedValueObj arity f ) => Options -> ToArgs Value arity a -> f a -> Value nonAllNullarySumToJSON opts targs = case sumEncoding opts of TaggedObject{..} -> object . taggedObjectPairs opts targs tagFieldName contentsFieldName ObjectWithSingleField -> Object . objectWithSingleFieldObj opts targs TwoElemArray -> Array . twoElemArrayObj opts targs UntaggedValue -> untaggedValueObj opts targs -------------------------------------------------------------------------------- class SumToEncoding arity f allNullary where sumToEncoding :: Options -> ToArgs Encoding arity a -> f a -> Tagged allNullary Encoding instance ( GetConName f , TaggedObjectEnc arity f , ObjectWithSingleFieldEnc arity f , TwoElemArrayEnc arity f , UntaggedValueEnc arity f ) => SumToEncoding arity f True where sumToEncoding opts targs | allNullaryToStringTag opts = Tagged . toEncoding . constructorTagModifier opts . getConName | otherwise = Tagged . nonAllNullarySumToEncoding opts targs instance ( TwoElemArrayEnc arity f , TaggedObjectEnc arity f , ObjectWithSingleFieldEnc arity f , UntaggedValueEnc arity f ) => SumToEncoding arity f False where sumToEncoding opts targs = Tagged . nonAllNullarySumToEncoding opts targs nonAllNullarySumToEncoding :: ( TwoElemArrayEnc arity f , TaggedObjectEnc arity f , ObjectWithSingleFieldEnc arity f , UntaggedValueEnc arity f ) => Options -> ToArgs Encoding arity a -> f a -> Encoding nonAllNullarySumToEncoding opts targs = case sumEncoding opts of TaggedObject{..} -> taggedObjectEnc opts targs tagFieldName contentsFieldName ObjectWithSingleField -> objectWithSingleFieldEnc opts targs TwoElemArray -> twoElemArrayEnc opts targs UntaggedValue -> untaggedValueEnc opts targs -------------------------------------------------------------------------------- class TaggedObjectPairs arity f where taggedObjectPairs :: Options -> ToArgs Value arity a -> String -> String -> f a -> [Pair] instance ( TaggedObjectPairs arity a , TaggedObjectPairs arity b ) => TaggedObjectPairs arity (a :+: b) where taggedObjectPairs opts targs tagFieldName contentsFieldName (L1 x) = taggedObjectPairs opts targs tagFieldName contentsFieldName x taggedObjectPairs opts targs tagFieldName contentsFieldName (R1 x) = taggedObjectPairs opts targs tagFieldName contentsFieldName x instance ( IsRecord a isRecord , TaggedObjectPairs' arity a isRecord , Constructor c ) => TaggedObjectPairs arity (C1 c a) where taggedObjectPairs opts targs tagFieldName contentsFieldName = (pack tagFieldName .= constructorTagModifier opts (conName (undefined :: t c a p)) :) . (unTagged :: Tagged isRecord [Pair] -> [Pair]) . taggedObjectPairs' opts targs contentsFieldName . unM1 class TaggedObjectPairs' arity f isRecord where taggedObjectPairs' :: Options -> ToArgs Value arity a -> String -> f a -> Tagged isRecord [Pair] instance OVERLAPPING_ TaggedObjectPairs' arity U1 False where taggedObjectPairs' _ _ _ _ = Tagged [] instance (RecordToPairs arity f) => TaggedObjectPairs' arity f True where taggedObjectPairs' opts targs _ = Tagged . toList . recordToPairs opts targs instance (GToJSON arity f) => TaggedObjectPairs' arity f False where taggedObjectPairs' opts targs contentsFieldName = Tagged . (:[]) . (pack contentsFieldName .=) . gToJSON opts targs -------------------------------------------------------------------------------- class TaggedObjectEnc arity f where taggedObjectEnc :: Options -> ToArgs Encoding arity a -> String -> String -> f a -> Encoding instance ( TaggedObjectEnc arity a , TaggedObjectEnc arity b ) => TaggedObjectEnc arity (a :+: b) where taggedObjectEnc opts targs tagFieldName contentsFieldName (L1 x) = taggedObjectEnc opts targs tagFieldName contentsFieldName x taggedObjectEnc opts targs tagFieldName contentsFieldName (R1 x) = taggedObjectEnc opts targs tagFieldName contentsFieldName x instance ( IsRecord a isRecord , TaggedObjectEnc' arity a isRecord , Constructor c ) => TaggedObjectEnc arity (C1 c a) where taggedObjectEnc opts targs tagFieldName contentsFieldName v = E.pairs (E.pair key val) where key :: Text key = pack tagFieldName val = toEncoding (constructorTagModifier opts (conName (undefined :: t c a p))) >< ((unTagged :: Tagged isRecord Encoding -> Encoding) . taggedObjectEnc' opts targs contentsFieldName . unM1 $ v) class TaggedObjectEnc' arity f isRecord where taggedObjectEnc' :: Options -> ToArgs Encoding arity a -> String -> f a -> Tagged isRecord Encoding instance OVERLAPPING_ TaggedObjectEnc' arity U1 False where taggedObjectEnc' _ _ _ _ = Tagged E.empty instance (RecordToEncoding arity f) => TaggedObjectEnc' arity f True where taggedObjectEnc' opts targs _ = Tagged . (E.comma ><) . fst . recordToEncoding opts targs instance (GToEncoding arity f) => TaggedObjectEnc' arity f False where taggedObjectEnc' opts targs contentsFieldName = Tagged . (\z -> E.comma >< toEncoding contentsFieldName >< E.colon >< z) . gToEncoding opts targs -------------------------------------------------------------------------------- -- | Get the name of the constructor of a sum datatype. class GetConName f where getConName :: f a -> String instance (GetConName a, GetConName b) => GetConName (a :+: b) where getConName (L1 x) = getConName x getConName (R1 x) = getConName x instance (Constructor c) => GetConName (C1 c a) where getConName = conName -------------------------------------------------------------------------------- class TwoElemArrayObj arity f where twoElemArrayObj :: Options -> ToArgs Value arity a -> f a -> V.Vector Value instance ( TwoElemArrayObj arity a , TwoElemArrayObj arity b ) => TwoElemArrayObj arity (a :+: b) where twoElemArrayObj opts targs (L1 x) = twoElemArrayObj opts targs x twoElemArrayObj opts targs (R1 x) = twoElemArrayObj opts targs x instance ( GToJSON arity a , ConsToJSON arity a , Constructor c ) => TwoElemArrayObj arity (C1 c a) where twoElemArrayObj opts targs x = V.create $ do mv <- VM.unsafeNew 2 VM.unsafeWrite mv 0 $ String $ pack $ constructorTagModifier opts $ conName (undefined :: t c a p) VM.unsafeWrite mv 1 $ gToJSON opts targs x return mv -------------------------------------------------------------------------------- class TwoElemArrayEnc arity f where twoElemArrayEnc :: Options -> ToArgs Encoding arity a -> f a -> Encoding instance ( TwoElemArrayEnc arity a , TwoElemArrayEnc arity b ) => TwoElemArrayEnc arity (a :+: b) where twoElemArrayEnc opts targs (L1 x) = twoElemArrayEnc opts targs x twoElemArrayEnc opts targs (R1 x) = twoElemArrayEnc opts targs x instance ( GToEncoding arity a , ConsToEncoding arity a , Constructor c ) => TwoElemArrayEnc arity (C1 c a) where twoElemArrayEnc opts targs x = E.list id [ toEncoding (constructorTagModifier opts (conName (undefined :: t c a p))) , gToEncoding opts targs x ] -------------------------------------------------------------------------------- class ConsToJSON arity f where consToJSON :: Options -> ToArgs Value arity a -> f a -> Value class ConsToJSON' arity f isRecord where consToJSON' :: Options -> ToArgs Value arity a -> Bool -- ^ Are we a record with one field? -> f a -> Tagged isRecord Value instance ( IsRecord f isRecord , ConsToJSON' arity f isRecord ) => ConsToJSON arity f where consToJSON opts targs = (unTagged :: Tagged isRecord Value -> Value) . consToJSON' opts targs (isUnary (undefined :: f a)) instance (RecordToPairs arity f) => ConsToJSON' arity f True where consToJSON' opts targs isUn f = let vals = toList $ recordToPairs opts targs f in case (unwrapUnaryRecords opts,isUn,vals) of (True,True,[(_,val)]) -> Tagged val _ -> Tagged $ object vals instance GToJSON arity f => ConsToJSON' arity f False where consToJSON' opts targs _ = Tagged . gToJSON opts targs -------------------------------------------------------------------------------- class ConsToEncoding arity f where consToEncoding :: Options -> ToArgs Encoding arity a -> f a -> Encoding class ConsToEncoding' arity f isRecord where consToEncoding' :: Options -> ToArgs Encoding arity a -> Bool -- ^ Are we a record with one field? -> f a -> Tagged isRecord Encoding instance ( IsRecord f isRecord , ConsToEncoding' arity f isRecord ) => ConsToEncoding arity f where consToEncoding opts targs = (unTagged :: Tagged isRecord Encoding -> Encoding) . consToEncoding' opts targs (isUnary (undefined :: f a)) instance (RecordToEncoding arity f) => ConsToEncoding' arity f True where consToEncoding' opts targs isUn x = let (enc, mbVal) = recordToEncoding opts targs x in case (unwrapUnaryRecords opts, isUn, mbVal) of (True, True, Just val) -> Tagged val _ -> Tagged $ E.wrapObject enc instance GToEncoding arity f => ConsToEncoding' arity f False where consToEncoding' opts targs _ = Tagged . gToEncoding opts targs -------------------------------------------------------------------------------- class RecordToPairs arity f where recordToPairs :: Options -> ToArgs Value arity a -> f a -> DList Pair instance ( RecordToPairs arity a , RecordToPairs arity b ) => RecordToPairs arity (a :*: b) where recordToPairs opts targs (a :*: b) = recordToPairs opts targs a <> recordToPairs opts targs b instance (Selector s, GToJSON arity a) => RecordToPairs arity (S1 s a) where recordToPairs = fieldToPair instance OVERLAPPING_ (Selector s, ToJSON a) => RecordToPairs arity (S1 s (K1 i (Maybe a))) where recordToPairs opts _ (M1 k1) | omitNothingFields opts , K1 Nothing <- k1 = DList.empty recordToPairs opts targs m1 = fieldToPair opts targs m1 fieldToPair :: (Selector s, GToJSON arity a) => Options -> ToArgs Value arity p -> S1 s a p -> DList Pair fieldToPair opts targs m1 = pure ( pack $ fieldLabelModifier opts $ selName m1 , gToJSON opts targs (unM1 m1) ) -------------------------------------------------------------------------------- class RecordToEncoding arity f where -- 1st element: whole thing -- 2nd element: in case the record has only 1 field, just the value -- of the field (without the key); 'Nothing' otherwise recordToEncoding :: Options -> ToArgs Encoding arity a -> f a -> (Encoding, Maybe Encoding) instance ( RecordToEncoding arity a , RecordToEncoding arity b ) => RecordToEncoding arity (a :*: b) where recordToEncoding opts targs (a :*: b) | omitNothingFields opts = (E.econcat $ intersperse E.comma $ filter (not . E.nullEncoding) [ fst (recordToEncoding opts targs a) , fst (recordToEncoding opts targs b) ] , Nothing) recordToEncoding opts targs (a :*: b) = (fst (recordToEncoding opts targs a) >< E.comma >< fst (recordToEncoding opts targs b), Nothing) instance (Selector s, GToEncoding arity a) => RecordToEncoding arity (S1 s a) where recordToEncoding = fieldToEncoding instance OVERLAPPING_ (Selector s, ToJSON a) => RecordToEncoding arity (S1 s (K1 i (Maybe a))) where recordToEncoding opts _ (M1 k1) | omitNothingFields opts , K1 Nothing <- k1 = (E.empty, Nothing) recordToEncoding opts targs m1 = fieldToEncoding opts targs m1 fieldToEncoding :: (Selector s, GToEncoding arity a) => Options -> ToArgs Encoding arity p -> S1 s a p -> (Encoding, Maybe Encoding) fieldToEncoding opts targs m1 = let keyBuilder = toEncoding (fieldLabelModifier opts $ selName m1) valueBuilder = gToEncoding opts targs (unM1 m1) in (keyBuilder >< E.colon >< valueBuilder, Just valueBuilder) -------------------------------------------------------------------------------- class WriteProduct arity f where writeProduct :: Options -> ToArgs Value arity a -> VM.MVector s Value -> Int -- ^ index -> Int -- ^ length -> f a -> ST s () instance ( WriteProduct arity a , WriteProduct arity b ) => WriteProduct arity (a :*: b) where writeProduct opts targs mv ix len (a :*: b) = do writeProduct opts targs mv ix lenL a writeProduct opts targs mv ixR lenR b where lenL = len `unsafeShiftR` 1 lenR = len - lenL ixR = ix + lenL instance OVERLAPPABLE_ (GToJSON arity a) => WriteProduct arity a where writeProduct opts targs mv ix _ = VM.unsafeWrite mv ix . gToJSON opts targs -------------------------------------------------------------------------------- class EncodeProduct arity f where encodeProduct :: Options -> ToArgs Encoding arity a -> f a -> Encoding' E.InArray instance ( EncodeProduct arity a , EncodeProduct arity b ) => EncodeProduct arity (a :*: b) where encodeProduct opts targs (a :*: b) | omitNothingFields opts = E.econcat $ intersperse E.comma $ filter (not . E.nullEncoding) [encodeProduct opts targs a, encodeProduct opts targs b] encodeProduct opts targs (a :*: b) = encodeProduct opts targs a >*< encodeProduct opts targs b instance OVERLAPPABLE_ (GToEncoding arity a) => EncodeProduct arity a where encodeProduct opts targs a = E.retagEncoding $ gToEncoding opts targs a -------------------------------------------------------------------------------- class ObjectWithSingleFieldObj arity f where objectWithSingleFieldObj :: Options -> ToArgs Value arity a -> f a -> Object instance ( ObjectWithSingleFieldObj arity a , ObjectWithSingleFieldObj arity b ) => ObjectWithSingleFieldObj arity (a :+: b) where objectWithSingleFieldObj opts targs (L1 x) = objectWithSingleFieldObj opts targs x objectWithSingleFieldObj opts targs (R1 x) = objectWithSingleFieldObj opts targs x instance ( GToJSON arity a , ConsToJSON arity a , Constructor c ) => ObjectWithSingleFieldObj arity (C1 c a) where objectWithSingleFieldObj opts targs = H.singleton typ . gToJSON opts targs where typ = pack $ constructorTagModifier opts $ conName (undefined :: t c a p) -------------------------------------------------------------------------------- class ObjectWithSingleFieldEnc arity f where objectWithSingleFieldEnc :: Options -> ToArgs Encoding arity a -> f a -> Encoding instance ( ObjectWithSingleFieldEnc arity a , ObjectWithSingleFieldEnc arity b ) => ObjectWithSingleFieldEnc arity (a :+: b) where objectWithSingleFieldEnc opts targs (L1 x) = objectWithSingleFieldEnc opts targs x objectWithSingleFieldEnc opts targs (R1 x) = objectWithSingleFieldEnc opts targs x instance ( GToEncoding arity a , ConsToEncoding arity a , Constructor c ) => ObjectWithSingleFieldEnc arity (C1 c a) where objectWithSingleFieldEnc opts targs v = E.pairs (E.pair key val) where key :: Text key = pack (constructorTagModifier opts (conName (undefined :: t c a p))) val :: Encoding' Value val = gToEncoding opts targs v -------------------------------------------------------------------------------- class UntaggedValueObj arity f where untaggedValueObj :: Options -> ToArgs Value arity a -> f a -> Value instance ( UntaggedValueObj arity a , UntaggedValueObj arity b ) => UntaggedValueObj arity (a :+: b) where untaggedValueObj opts targs (L1 x) = untaggedValueObj opts targs x untaggedValueObj opts targs (R1 x) = untaggedValueObj opts targs x instance OVERLAPPABLE_ ( GToJSON arity a , ConsToJSON arity a ) => UntaggedValueObj arity (C1 c a) where untaggedValueObj = gToJSON instance OVERLAPPING_ ( Constructor c ) => UntaggedValueObj arity (C1 c U1) where untaggedValueObj opts _ _ = toJSON $ constructorTagModifier opts $ conName (undefined :: t c U1 p) -------------------------------------------------------------------------------- class UntaggedValueEnc arity f where untaggedValueEnc :: Options -> ToArgs Encoding arity a -> f a -> Encoding instance ( UntaggedValueEnc arity a , UntaggedValueEnc arity b ) => UntaggedValueEnc arity (a :+: b) where untaggedValueEnc opts targs (L1 x) = untaggedValueEnc opts targs x untaggedValueEnc opts targs (R1 x) = untaggedValueEnc opts targs x instance OVERLAPPABLE_ ( GToEncoding arity a , ConsToEncoding arity a ) => UntaggedValueEnc arity (C1 c a) where untaggedValueEnc = gToEncoding instance OVERLAPPING_ ( Constructor c ) => UntaggedValueEnc arity (C1 c U1) where untaggedValueEnc opts _ _ = toEncoding $ constructorTagModifier opts $ conName (undefined :: t c U1 p) ------------------------------------------------------------------------------- -- Instances ------------------------------------------------------------------------------- ------------------------------------------------------------------------------- -- base ------------------------------------------------------------------------------- instance ToJSON2 Const where liftToJSON2 t _ _ _ (Const x) = t x {-# INLINE liftToJSON2 #-} liftToEncoding2 t _ _ _ (Const x) = t x {-# INLINE liftToEncoding2 #-} instance ToJSON a => ToJSON1 (Const a) where liftToJSON _ _ (Const x) = toJSON x {-# INLINE liftToJSON #-} liftToEncoding _ _ (Const x) = toEncoding x {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Const a b) where toJSON (Const x) = toJSON x {-# INLINE toJSON #-} toEncoding (Const x) = toEncoding x {-# INLINE toEncoding #-} instance ToJSON1 Maybe where liftToJSON t _ (Just a) = t a liftToJSON _ _ Nothing = Null {-# INLINE liftToJSON #-} liftToEncoding t _ (Just a) = t a liftToEncoding _ _ Nothing = E.null_ {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (Maybe a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON2 Either where liftToJSON2 toA _ _toB _ (Left a) = Object $ H.singleton "Left" (toA a) liftToJSON2 _toA _ toB _ (Right b) = Object $ H.singleton "Right" (toB b) {-# INLINE liftToJSON2 #-} liftToEncoding2 toA _ _toB _ (Left a) = E.pairs $ E.pair "Left" $ toA a liftToEncoding2 _toA _ toB _ (Right b) = E.pairs $ E.pair "Right" $ toB b {-# INLINE liftToEncoding2 #-} instance (ToJSON a) => ToJSON1 (Either a) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b) => ToJSON (Either a b) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance ToJSON Bool where toJSON = Bool {-# INLINE toJSON #-} toEncoding = E.bool {-# INLINE toEncoding #-} instance ToJSONKey Bool where toJSONKey = toJSONKeyText $ \x -> if x then "true" else "false" instance ToJSON Ordering where toJSON = toJSON . orderingToText toEncoding = toEncoding . orderingToText orderingToText :: Ordering -> T.Text orderingToText o = case o of LT -> "LT" EQ -> "EQ" GT -> "GT" instance ToJSON () where toJSON _ = emptyArray {-# INLINE toJSON #-} toEncoding _ = emptyArray_ {-# INLINE toEncoding #-} instance ToJSON Char where toJSON = String . T.singleton {-# INLINE toJSON #-} toJSONList = String . T.pack {-# INLINE toJSONList #-} toEncoding = E.string . (:[]) {-# INLINE toEncoding #-} toEncodingList = E.string {-# INLINE toEncodingList #-} instance ToJSON Double where toJSON = realFloatToJSON {-# INLINE toJSON #-} toEncoding = E.double {-# INLINE toEncoding #-} instance ToJSONKey Double where toJSONKey = toJSONKeyTextEnc E.doubleText {-# INLINE toJSONKey #-} instance ToJSON Number where toJSON (D d) = toJSON d toJSON (I i) = toJSON i {-# INLINE toJSON #-} toEncoding (D d) = toEncoding d toEncoding (I i) = toEncoding i {-# INLINE toEncoding #-} instance ToJSON Float where toJSON = realFloatToJSON {-# INLINE toJSON #-} toEncoding = E.float {-# INLINE toEncoding #-} instance ToJSONKey Float where toJSONKey = toJSONKeyTextEnc E.floatText {-# INLINE toJSONKey #-} instance (ToJSON a, Integral a) => ToJSON (Ratio a) where toJSON r = object [ "numerator" .= numerator r , "denominator" .= denominator r ] {-# INLINE toJSON #-} toEncoding r = E.pairs $ "numerator" .= numerator r <> "denominator" .= denominator r {-# INLINE toEncoding #-} instance HasResolution a => ToJSON (Fixed a) where toJSON = Number . realToFrac {-# INLINE toJSON #-} toEncoding = E.scientific . realToFrac {-# INLINE toEncoding #-} instance HasResolution a => ToJSONKey (Fixed a) where toJSONKey = toJSONKeyTextEnc (E.scientificText . realToFrac) {-# INLINE toJSONKey #-} instance ToJSON Int where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.int {-# INLINE toEncoding #-} instance ToJSONKey Int where toJSONKey = toJSONKeyTextEnc E.intText {-# INLINE toJSONKey #-} instance ToJSON Integer where toJSON = Number . fromInteger {-# INLINE toJSON #-} toEncoding = E.integer {-# INLINE toEncoding #-} instance ToJSONKey Integer where toJSONKey = toJSONKeyTextEnc E.integerText {-# INLINE toJSONKey #-} instance ToJSON Natural where toJSON = toJSON . toInteger {-# INLINE toJSON #-} toEncoding = toEncoding . toInteger {-# INLINE toEncoding #-} instance ToJSONKey Natural where toJSONKey = toJSONKeyTextEnc (E.integerText . toInteger) {-# INLINE toJSONKey #-} instance ToJSON Int8 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.int8 {-# INLINE toEncoding #-} instance ToJSONKey Int8 where toJSONKey = toJSONKeyTextEnc E.int8Text {-# INLINE toJSONKey #-} instance ToJSON Int16 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.int16 {-# INLINE toEncoding #-} instance ToJSONKey Int16 where toJSONKey = toJSONKeyTextEnc E.int16Text {-# INLINE toJSONKey #-} instance ToJSON Int32 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.int32 {-# INLINE toEncoding #-} instance ToJSONKey Int32 where toJSONKey = toJSONKeyTextEnc E.int32Text {-# INLINE toJSONKey #-} instance ToJSON Int64 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.int64 {-# INLINE toEncoding #-} instance ToJSONKey Int64 where toJSONKey = toJSONKeyTextEnc E.int64Text {-# INLINE toJSONKey #-} instance ToJSON Word where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.word {-# INLINE toEncoding #-} instance ToJSONKey Word where toJSONKey = toJSONKeyTextEnc E.wordText {-# INLINE toJSONKey #-} instance ToJSON Word8 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.word8 {-# INLINE toEncoding #-} instance ToJSONKey Word8 where toJSONKey = toJSONKeyTextEnc E.word8Text {-# INLINE toJSONKey #-} instance ToJSON Word16 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.word16 {-# INLINE toEncoding #-} instance ToJSONKey Word16 where toJSONKey = toJSONKeyTextEnc E.word16Text {-# INLINE toJSONKey #-} instance ToJSON Word32 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.word32 {-# INLINE toEncoding #-} instance ToJSONKey Word32 where toJSONKey = toJSONKeyTextEnc E.word32Text {-# INLINE toJSONKey #-} instance ToJSON Word64 where toJSON = Number . fromIntegral {-# INLINE toJSON #-} toEncoding = E.word64 {-# INLINE toEncoding #-} instance ToJSONKey Word64 where toJSONKey = toJSONKeyTextEnc E.word64Text {-# INLINE toJSONKey #-} instance ToJSON Text where toJSON = String {-# INLINE toJSON #-} toEncoding = E.text {-# INLINE toEncoding #-} instance ToJSONKey Text where toJSONKey = toJSONKeyText id {-# INLINE toJSONKey #-} instance ToJSON LT.Text where toJSON = String . LT.toStrict {-# INLINE toJSON #-} toEncoding = E.lazyText {-# INLINE toEncoding #-} instance ToJSONKey LT.Text where toJSONKey = toJSONKeyText LT.toStrict instance ToJSON Version where toJSON = toJSON . showVersion {-# INLINE toJSON #-} toEncoding = toEncoding . showVersion {-# INLINE toEncoding #-} instance ToJSONKey Version where toJSONKey = toJSONKeyText (T.pack . showVersion) ------------------------------------------------------------------------------- -- semigroups NonEmpty ------------------------------------------------------------------------------- instance ToJSON1 NonEmpty where liftToJSON t _ = listValue t . NE.toList {-# INLINE liftToJSON #-} liftToEncoding t _ = listEncoding t . NE.toList {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (NonEmpty a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- scientific ------------------------------------------------------------------------------- instance ToJSON Scientific where toJSON = Number {-# INLINE toJSON #-} toEncoding = E.scientific {-# INLINE toEncoding #-} instance ToJSONKey Scientific where toJSONKey = toJSONKeyTextEnc E.scientificText ------------------------------------------------------------------------------- -- DList ------------------------------------------------------------------------------- instance ToJSON1 DList.DList where liftToJSON t _ = listValue t . toList {-# INLINE liftToJSON #-} liftToEncoding t _ = listEncoding t . toList {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (DList.DList a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- transformers - Functors ------------------------------------------------------------------------------- instance ToJSON1 Identity where liftToJSON t _ (Identity a) = t a {-# INLINE liftToJSON #-} liftToJSONList _ tl xs = tl (map runIdentity xs) {-# INLINE liftToJSONList #-} liftToEncoding t _ (Identity a) = t a {-# INLINE liftToEncoding #-} liftToEncodingList _ tl xs = tl (map runIdentity xs) {-# INLINE liftToEncodingList #-} instance (ToJSON a) => ToJSON (Identity a) where toJSON = toJSON1 {-# INLINE toJSON #-} toJSONList = liftToJSONList toJSON toJSONList {-# INLINE toJSONList #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} toEncodingList = liftToEncodingList toEncoding toEncodingList {-# INLINE toEncodingList #-} instance (ToJSONKey a, ToJSON a) => ToJSONKey (Identity a) where toJSONKey = contramapToJSONKeyFunction runIdentity toJSONKey toJSONKeyList = contramapToJSONKeyFunction (map runIdentity) toJSONKeyList instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Compose f g) where liftToJSON tv tvl (Compose x) = liftToJSON g gl x where g = liftToJSON tv tvl gl = liftToJSONList tv tvl {-# INLINE liftToJSON #-} liftToJSONList te tel xs = liftToJSONList g gl (map getCompose xs) where g = liftToJSON te tel gl = liftToJSONList te tel {-# INLINE liftToJSONList #-} liftToEncoding te tel (Compose x) = liftToEncoding g gl x where g = liftToEncoding te tel gl = liftToEncodingList te tel {-# INLINE liftToEncoding #-} liftToEncodingList te tel xs = liftToEncodingList g gl (map getCompose xs) where g = liftToEncoding te tel gl = liftToEncodingList te tel {-# INLINE liftToEncodingList #-} instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Compose f g a) where toJSON = toJSON1 {-# INLINE toJSON #-} toJSONList = liftToJSONList toJSON toJSONList {-# INLINE toJSONList #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} toEncodingList = liftToEncodingList toEncoding toEncodingList {-# INLINE toEncodingList #-} instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Product f g) where liftToJSON tv tvl (Pair x y) = liftToJSON2 tx txl ty tyl (x, y) where tx = liftToJSON tv tvl txl = liftToJSONList tv tvl ty = liftToJSON tv tvl tyl = liftToJSONList tv tvl liftToEncoding te tel (Pair x y) = liftToEncoding2 tx txl ty tyl (x, y) where tx = liftToEncoding te tel txl = liftToEncodingList te tel ty = liftToEncoding te tel tyl = liftToEncodingList te tel instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Product f g a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance (ToJSON1 f, ToJSON1 g) => ToJSON1 (Sum f g) where liftToJSON tv tvl (InL x) = Object $ H.singleton "InL" (liftToJSON tv tvl x) liftToJSON tv tvl (InR y) = Object $ H.singleton "InR" (liftToJSON tv tvl y) liftToEncoding te tel (InL x) = E.pairs $ E.pair "InL" $ liftToEncoding te tel x liftToEncoding te tel (InR y) = E.pairs $ E.pair "InR" $ liftToEncoding te tel y instance (ToJSON1 f, ToJSON1 g, ToJSON a) => ToJSON (Sum f g a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- containers ------------------------------------------------------------------------------- instance ToJSON1 Seq.Seq where liftToJSON t _ = listValue t . toList {-# INLINE liftToJSON #-} liftToEncoding t _ = listEncoding t . toList {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (Seq.Seq a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Set.Set where liftToJSON t _ = listValue t . Set.toList {-# INLINE liftToJSON #-} liftToEncoding t _ = listEncoding t . Set.toList {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (Set.Set a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON IntSet.IntSet where toJSON = toJSON . IntSet.toList {-# INLINE toJSON #-} toEncoding = toEncoding . IntSet.toList {-# INLINE toEncoding #-} instance ToJSON1 IntMap.IntMap where liftToJSON t tol = liftToJSON to' tol' . IntMap.toList where to' = liftToJSON2 toJSON toJSONList t tol tol' = liftToJSONList2 toJSON toJSONList t tol {-# INLINE liftToJSON #-} liftToEncoding t tol = liftToEncoding to' tol' . IntMap.toList where to' = liftToEncoding2 toEncoding toEncodingList t tol tol' = liftToEncodingList2 toEncoding toEncodingList t tol {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (IntMap.IntMap a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSONKey k => ToJSON1 (M.Map k) where liftToJSON g _ = case toJSONKey of ToJSONKeyText f _ -> Object . mapHashKeyVal f g ToJSONKeyValue f _ -> Array . V.fromList . map (toJSONPair f g) . M.toList {-# INLINE liftToJSON #-} liftToEncoding g _ = case toJSONKey of ToJSONKeyText _ f -> dict f g M.foldrWithKey ToJSONKeyValue _ f -> listEncoding (pairEncoding f) . M.toList where pairEncoding f (a, b) = E.list id [f a, g b] {-# INLINE liftToEncoding #-} instance (ToJSON v, ToJSONKey k) => ToJSON (M.Map k v) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Tree.Tree where liftToJSON t tol = go where go (Tree.Node root branches) = liftToJSON2 t tol to' tol' (root, branches) to' = liftToJSON go (listValue go) tol' = liftToJSONList go (listValue go) {-# INLINE liftToJSON #-} liftToEncoding t tol = go where go (Tree.Node root branches) = liftToEncoding2 t tol to' tol' (root, branches) to' = liftToEncoding go (listEncoding go) tol' = liftToEncodingList go (listEncoding go) {-# INLINE liftToEncoding #-} instance (ToJSON v) => ToJSON (Tree.Tree v) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- vector ------------------------------------------------------------------------------- instance ToJSON1 Vector where liftToJSON t _ = Array . V.map t {-# INLINE liftToJSON #-} liftToEncoding t _ = listEncoding t . V.toList {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (Vector a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} encodeVector :: (ToJSON a, VG.Vector v a) => v a -> Encoding encodeVector = listEncoding toEncoding . VG.toList {-# INLINE encodeVector #-} vectorToJSON :: (VG.Vector v a, ToJSON a) => v a -> Value vectorToJSON = Array . V.map toJSON . V.convert {-# INLINE vectorToJSON #-} instance (Storable a, ToJSON a) => ToJSON (VS.Vector a) where toJSON = vectorToJSON {-# INLINE toJSON #-} toEncoding = encodeVector {-# INLINE toEncoding #-} instance (VP.Prim a, ToJSON a) => ToJSON (VP.Vector a) where toJSON = vectorToJSON {-# INLINE toJSON #-} toEncoding = encodeVector {-# INLINE toEncoding #-} instance (VG.Vector VU.Vector a, ToJSON a) => ToJSON (VU.Vector a) where toJSON = vectorToJSON {-# INLINE toJSON #-} toEncoding = encodeVector {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- unordered-containers ------------------------------------------------------------------------------- instance ToJSON1 HashSet.HashSet where liftToJSON t _ = listValue t . HashSet.toList {-# INLINE liftToJSON #-} liftToEncoding t _ = listEncoding t . HashSet.toList {-# INLINE liftToEncoding #-} instance (ToJSON a) => ToJSON (HashSet.HashSet a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSONKey k => ToJSON1 (H.HashMap k) where liftToJSON g _ = case toJSONKey of ToJSONKeyText f _ -> Object . mapKeyVal f g ToJSONKeyValue f _ -> Array . V.fromList . map (toJSONPair f g) . H.toList {-# INLINE liftToJSON #-} -- liftToEncoding :: forall a. (a -> Encoding) -> ([a] -> Encoding) -> H.HashMap k a -> Encoding liftToEncoding g _ = case toJSONKey of ToJSONKeyText _ f -> dict f g H.foldrWithKey ToJSONKeyValue _ f -> listEncoding (pairEncoding f) . H.toList where pairEncoding f (a, b) = E.list id [f a, g b] {-# INLINE liftToEncoding #-} instance (ToJSON v, ToJSONKey k) => ToJSON (H.HashMap k v) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- aeson ------------------------------------------------------------------------------- instance ToJSON Value where toJSON a = a {-# INLINE toJSON #-} toEncoding = E.value {-# INLINE toEncoding #-} instance ToJSON DotNetTime where toJSON = toJSON . dotNetTime toEncoding = toEncoding . dotNetTime dotNetTime :: DotNetTime -> String dotNetTime (DotNetTime t) = secs ++ formatMillis t ++ ")/" where secs = formatTime defaultTimeLocale "/Date(%s" t formatMillis :: (FormatTime t) => t -> String formatMillis = take 3 . formatTime defaultTimeLocale "%q" ------------------------------------------------------------------------------- -- time ------------------------------------------------------------------------------- instance ToJSON Day where toJSON = stringEncoding . E.day toEncoding = E.day instance ToJSONKey Day where toJSONKey = toJSONKeyTextEnc E.day instance ToJSON TimeOfDay where toJSON = stringEncoding . E.timeOfDay toEncoding = E.timeOfDay instance ToJSONKey TimeOfDay where toJSONKey = toJSONKeyTextEnc E.timeOfDay instance ToJSON LocalTime where toJSON = stringEncoding . E.localTime toEncoding = E.localTime instance ToJSONKey LocalTime where toJSONKey = toJSONKeyTextEnc E.localTime instance ToJSON ZonedTime where toJSON = stringEncoding . E.zonedTime toEncoding = E.zonedTime instance ToJSONKey ZonedTime where toJSONKey = toJSONKeyTextEnc E.zonedTime instance ToJSON UTCTime where toJSON = stringEncoding . E.utcTime toEncoding = E.utcTime instance ToJSONKey UTCTime where toJSONKey = toJSONKeyTextEnc E.utcTime -- | Encode something t a JSON string. stringEncoding :: Encoding' Text -> Value stringEncoding = String . T.dropAround (== '"') . T.decodeLatin1 . lazyToStrictByteString . E.encodingToLazyByteString {-# INLINE stringEncoding #-} instance ToJSON NominalDiffTime where toJSON = Number . realToFrac {-# INLINE toJSON #-} toEncoding = E.scientific . realToFrac {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- base Monoid/Semigroup ------------------------------------------------------------------------------- instance ToJSON1 Monoid.Dual where liftToJSON t _ = t . Monoid.getDual {-# INLINE liftToJSON #-} liftToEncoding t _ = t . Monoid.getDual {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Monoid.Dual a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Monoid.First where liftToJSON t to' = liftToJSON t to' . Monoid.getFirst {-# INLINE liftToJSON #-} liftToEncoding t to' = liftToEncoding t to' . Monoid.getFirst {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Monoid.First a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Monoid.Last where liftToJSON t to' = liftToJSON t to' . Monoid.getLast {-# INLINE liftToJSON #-} liftToEncoding t to' = liftToEncoding t to' . Monoid.getLast {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Monoid.Last a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Semigroup.Min where liftToJSON t _ (Semigroup.Min x) = t x {-# INLINE liftToJSON #-} liftToEncoding t _ (Semigroup.Min x) = t x {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Semigroup.Min a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Semigroup.Max where liftToJSON t _ (Semigroup.Max x) = t x {-# INLINE liftToJSON #-} liftToEncoding t _ (Semigroup.Max x) = t x {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Semigroup.Max a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Semigroup.First where liftToJSON t _ (Semigroup.First x) = t x {-# INLINE liftToJSON #-} liftToEncoding t _ (Semigroup.First x) = t x {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Semigroup.First a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Semigroup.Last where liftToJSON t _ (Semigroup.Last x) = t x {-# INLINE liftToJSON #-} liftToEncoding t _ (Semigroup.Last x) = t x {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Semigroup.Last a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Semigroup.WrappedMonoid where liftToJSON t _ (Semigroup.WrapMonoid x) = t x {-# INLINE liftToJSON #-} liftToEncoding t _ (Semigroup.WrapMonoid x) = t x {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Semigroup.WrappedMonoid a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSON1 Semigroup.Option where liftToJSON t to' = liftToJSON t to' . Semigroup.getOption {-# INLINE liftToJSON #-} liftToEncoding t to' = liftToEncoding t to' . Semigroup.getOption {-# INLINE liftToEncoding #-} instance ToJSON a => ToJSON (Semigroup.Option a) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- tagged ------------------------------------------------------------------------------- instance ToJSON (Proxy a) where toJSON _ = Null {-# INLINE toJSON #-} toEncoding _ = E.null_ {-# INLINE toEncoding #-} instance ToJSON1 (Tagged a) where liftToJSON t _ (Tagged x) = t x {-# INLINE liftToJSON #-} liftToEncoding t _ (Tagged x) = t x {-# INLINE liftToEncoding #-} instance ToJSON b => ToJSON (Tagged a b) where toJSON = toJSON1 {-# INLINE toJSON #-} toEncoding = toEncoding1 {-# INLINE toEncoding #-} instance ToJSONKey b => ToJSONKey (Tagged a b) where toJSONKey = contramapToJSONKeyFunction unTagged toJSONKey toJSONKeyList = contramapToJSONKeyFunction (fmap unTagged) toJSONKeyList ------------------------------------------------------------------------------- -- Instances for converting t map keys ------------------------------------------------------------------------------- instance (ToJSON a, ToJSON b) => ToJSONKey (a,b) instance (ToJSON a, ToJSON b, ToJSON c) => ToJSONKey (a,b,c) instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSONKey (a,b,c,d) instance ToJSONKey Char where toJSONKey = ToJSONKeyText T.singleton (E.string . (:[])) toJSONKeyList = toJSONKeyText T.pack instance (ToJSONKey a, ToJSON a) => ToJSONKey [a] where toJSONKey = toJSONKeyList ------------------------------------------------------------------------------- -- Tuple instances ------------------------------------------------------------------------------- instance ToJSON2 (,) where liftToJSON2 toA _ toB _ (a, b) = Array $ V.create $ do mv <- VM.unsafeNew 2 VM.unsafeWrite mv 0 (toA a) VM.unsafeWrite mv 1 (toB b) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toA _ toB _ (a, b) = E.list id [toA a, toB b] {-# INLINE liftToEncoding2 #-} instance (ToJSON a) => ToJSON1 ((,) a) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b) => ToJSON (a, b) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a) => ToJSON2 ((,,) a) where liftToJSON2 toB _ toC _ (a, b, c) = Array $ V.create $ do mv <- VM.unsafeNew 3 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toB b) VM.unsafeWrite mv 2 (toC c) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toB _ toC _ (a, b, c) = E.list id [ toEncoding a , toB b , toC c ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b) => ToJSON1 ((,,) a b) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON (a, b, c) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b) => ToJSON2 ((,,,) a b) where liftToJSON2 toC _ toD _ (a, b, c, d) = Array $ V.create $ do mv <- VM.unsafeNew 4 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toC c) VM.unsafeWrite mv 3 (toD d) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toC _ toD _ (a, b, c, d) = E.list id [ toEncoding a , toEncoding b , toC c , toD d ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON1 ((,,,) a b c) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON (a, b, c, d) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c) => ToJSON2 ((,,,,) a b c) where liftToJSON2 toD _ toE _ (a, b, c, d, e) = Array $ V.create $ do mv <- VM.unsafeNew 5 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toD d) VM.unsafeWrite mv 4 (toE e) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toD _ toE _ (a, b, c, d, e) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toD d , toE e ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON1 ((,,,,) a b c d) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON (a, b, c, d, e) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d) => ToJSON2 ((,,,,,) a b c d) where liftToJSON2 toE _ toF _ (a, b, c, d, e, f) = Array $ V.create $ do mv <- VM.unsafeNew 6 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toE e) VM.unsafeWrite mv 5 (toF f) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toE _ toF _ (a, b, c, d, e, f) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toE e , toF f ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON1 ((,,,,,) a b c d e) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON (a, b, c, d, e, f) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e) => ToJSON2 ((,,,,,,) a b c d e) where liftToJSON2 toF _ toG _ (a, b, c, d, e, f, g) = Array $ V.create $ do mv <- VM.unsafeNew 7 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toF f) VM.unsafeWrite mv 6 (toG g) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toF _ toG _ (a, b, c, d, e, f, g) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toF f , toG g ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON1 ((,,,,,,) a b c d e f) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON (a, b, c, d, e, f, g) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f) => ToJSON2 ((,,,,,,,) a b c d e f) where liftToJSON2 toG _ toH _ (a, b, c, d, e, f, g, h) = Array $ V.create $ do mv <- VM.unsafeNew 8 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toG g) VM.unsafeWrite mv 7 (toH h) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toG _ toH _ (a, b, c, d, e, f, g, h) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toG g , toH h ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON1 ((,,,,,,,) a b c d e f g) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON (a, b, c, d, e, f, g, h) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g) => ToJSON2 ((,,,,,,,,) a b c d e f g) where liftToJSON2 toH _ toI _ (a, b, c, d, e, f, g, h, i) = Array $ V.create $ do mv <- VM.unsafeNew 9 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toH h) VM.unsafeWrite mv 8 (toI i) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toH _ toI _ (a, b, c, d, e, f, g, h, i) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toH h , toI i ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON1 ((,,,,,,,,) a b c d e f g h) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON (a, b, c, d, e, f, g, h, i) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h) => ToJSON2 ((,,,,,,,,,) a b c d e f g h) where liftToJSON2 toI _ toJ _ (a, b, c, d, e, f, g, h, i, j) = Array $ V.create $ do mv <- VM.unsafeNew 10 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toJSON h) VM.unsafeWrite mv 8 (toI i) VM.unsafeWrite mv 9 (toJ j) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toI _ toJ _ (a, b, c, d, e, f, g, h, i, j) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toEncoding h , toI i , toJ j ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON1 ((,,,,,,,,,) a b c d e f g h i) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON (a, b, c, d, e, f, g, h, i, j) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i) => ToJSON2 ((,,,,,,,,,,) a b c d e f g h i) where liftToJSON2 toJ _ toK _ (a, b, c, d, e, f, g, h, i, j, k) = Array $ V.create $ do mv <- VM.unsafeNew 11 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toJSON h) VM.unsafeWrite mv 8 (toJSON i) VM.unsafeWrite mv 9 (toJ j) VM.unsafeWrite mv 10 (toK k) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toJ _ toK _ (a, b, c, d, e, f, g, h, i, j, k) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toEncoding h , toEncoding i , toJ j , toK k ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON1 ((,,,,,,,,,,) a b c d e f g h i j) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON (a, b, c, d, e, f, g, h, i, j, k) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j) => ToJSON2 ((,,,,,,,,,,,) a b c d e f g h i j) where liftToJSON2 toK _ toL _ (a, b, c, d, e, f, g, h, i, j, k, l) = Array $ V.create $ do mv <- VM.unsafeNew 12 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toJSON h) VM.unsafeWrite mv 8 (toJSON i) VM.unsafeWrite mv 9 (toJSON j) VM.unsafeWrite mv 10 (toK k) VM.unsafeWrite mv 11 (toL l) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toK _ toL _ (a, b, c, d, e, f, g, h, i, j, k, l) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toEncoding h , toEncoding i , toEncoding j , toK k , toL l ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON1 ((,,,,,,,,,,,) a b c d e f g h i j k) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k) => ToJSON2 ((,,,,,,,,,,,,) a b c d e f g h i j k) where liftToJSON2 toL _ toM _ (a, b, c, d, e, f, g, h, i, j, k, l, m) = Array $ V.create $ do mv <- VM.unsafeNew 13 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toJSON h) VM.unsafeWrite mv 8 (toJSON i) VM.unsafeWrite mv 9 (toJSON j) VM.unsafeWrite mv 10 (toJSON k) VM.unsafeWrite mv 11 (toL l) VM.unsafeWrite mv 12 (toM m) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toL _ toM _ (a, b, c, d, e, f, g, h, i, j, k, l, m) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toEncoding h , toEncoding i , toEncoding j , toEncoding k , toL l , toM m ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON1 ((,,,,,,,,,,,,) a b c d e f g h i j k l) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l) => ToJSON2 ((,,,,,,,,,,,,,) a b c d e f g h i j k l) where liftToJSON2 toM _ toN _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) = Array $ V.create $ do mv <- VM.unsafeNew 14 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toJSON h) VM.unsafeWrite mv 8 (toJSON i) VM.unsafeWrite mv 9 (toJSON j) VM.unsafeWrite mv 10 (toJSON k) VM.unsafeWrite mv 11 (toJSON l) VM.unsafeWrite mv 12 (toM m) VM.unsafeWrite mv 13 (toN n) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toM _ toN _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toEncoding h , toEncoding i , toEncoding j , toEncoding k , toEncoding l , toM m , toN n ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON1 ((,,,,,,,,,,,,,) a b c d e f g h i j k l m) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m) => ToJSON2 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m) where liftToJSON2 toN _ toO _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = Array $ V.create $ do mv <- VM.unsafeNew 15 VM.unsafeWrite mv 0 (toJSON a) VM.unsafeWrite mv 1 (toJSON b) VM.unsafeWrite mv 2 (toJSON c) VM.unsafeWrite mv 3 (toJSON d) VM.unsafeWrite mv 4 (toJSON e) VM.unsafeWrite mv 5 (toJSON f) VM.unsafeWrite mv 6 (toJSON g) VM.unsafeWrite mv 7 (toJSON h) VM.unsafeWrite mv 8 (toJSON i) VM.unsafeWrite mv 9 (toJSON j) VM.unsafeWrite mv 10 (toJSON k) VM.unsafeWrite mv 11 (toJSON l) VM.unsafeWrite mv 12 (toJSON m) VM.unsafeWrite mv 13 (toN n) VM.unsafeWrite mv 14 (toO o) return mv {-# INLINE liftToJSON2 #-} liftToEncoding2 toN _ toO _ (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) = E.list id [ toEncoding a , toEncoding b , toEncoding c , toEncoding d , toEncoding e , toEncoding f , toEncoding g , toEncoding h , toEncoding i , toEncoding j , toEncoding k , toEncoding l , toEncoding m , toN n , toO o ] {-# INLINE liftToEncoding2 #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n) => ToJSON1 ((,,,,,,,,,,,,,,) a b c d e f g h i j k l m n) where liftToJSON = liftToJSON2 toJSON toJSONList {-# INLINE liftToJSON #-} liftToEncoding = liftToEncoding2 toEncoding toEncodingList {-# INLINE liftToEncoding #-} instance (ToJSON a, ToJSON b, ToJSON c, ToJSON d, ToJSON e, ToJSON f, ToJSON g, ToJSON h, ToJSON i, ToJSON j, ToJSON k, ToJSON l, ToJSON m, ToJSON n, ToJSON o) => ToJSON (a, b, c, d, e, f, g, h, i, j, k, l, m, n, o) where toJSON = toJSON2 {-# INLINE toJSON #-} toEncoding = toEncoding2 {-# INLINE toEncoding #-} ------------------------------------------------------------------------------- -- pre-bytestring-0.10 compatibility ------------------------------------------------------------------------------- {-# INLINE lazyToStrictByteString #-} lazyToStrictByteString :: L.ByteString -> S.ByteString #if MIN_VERSION_bytestring(0,10,0) lazyToStrictByteString = L.toStrict #else lazyToStrictByteString = packChunks -- packChunks is taken from the blaze-builder package. -- | Pack the chunks of a lazy bytestring into a single strict bytestring. packChunks :: L.ByteString -> S.ByteString packChunks lbs = do S.unsafeCreate (fromIntegral $ L.length lbs) (copyChunks lbs) where copyChunks !L.Empty !_pf = return () copyChunks !(L.Chunk (S.PS fpbuf o l) lbs') !pf = do withForeignPtr fpbuf $ \pbuf -> copyBytes pf (pbuf `plusPtr` o) l copyChunks lbs' (pf `plusPtr` l) #endif
tolysz/prepare-ghcjs
spec-lts8/aeson/Data/Aeson/Types/ToJSON.hs
Haskell
bsd-3-clause
97,291
{- (c) The University of Glasgow 2006 (c) The GRASP/AQUA Project, Glasgow University, 1993-1998 \section[IdInfo]{@IdInfos@: Non-essential information about @Ids@} (And a pretty good illustration of quite a few things wrong with Haskell. [WDP 94/11]) -} module IdInfo ( -- * The IdDetails type IdDetails(..), pprIdDetails, coVarDetails, isCoVarDetails, RecSelParent(..), -- * The IdInfo type IdInfo, -- Abstract vanillaIdInfo, noCafIdInfo, -- ** The OneShotInfo type OneShotInfo(..), oneShotInfo, noOneShotInfo, hasNoOneShotInfo, setOneShotInfo, -- ** Zapping various forms of Info zapLamInfo, zapFragileInfo, zapDemandInfo, zapUsageInfo, -- ** The ArityInfo type ArityInfo, unknownArity, arityInfo, setArityInfo, ppArityInfo, callArityInfo, setCallArityInfo, -- ** Demand and strictness Info strictnessInfo, setStrictnessInfo, demandInfo, setDemandInfo, pprStrictness, -- ** Unfolding Info unfoldingInfo, setUnfoldingInfo, setUnfoldingInfoLazily, -- ** The InlinePragInfo type InlinePragInfo, inlinePragInfo, setInlinePragInfo, -- ** The OccInfo type OccInfo(..), isDeadOcc, isStrongLoopBreaker, isWeakLoopBreaker, occInfo, setOccInfo, InsideLam, OneBranch, insideLam, notInsideLam, oneBranch, notOneBranch, -- ** The RuleInfo type RuleInfo(..), emptyRuleInfo, isEmptyRuleInfo, ruleInfoFreeVars, ruleInfoRules, setRuleInfoHead, ruleInfo, setRuleInfo, -- ** The CAFInfo type CafInfo(..), ppCafInfo, mayHaveCafRefs, cafInfo, setCafInfo, -- ** Tick-box Info TickBoxOp(..), TickBoxId, ) where import CoreSyn import Class import {-# SOURCE #-} PrimOp (PrimOp) import Name import VarSet import BasicTypes import DataCon import TyCon import {-# SOURCE #-} PatSyn import ForeignCall import Outputable import Module import Demand -- infixl so you can say (id `set` a `set` b) infixl 1 `setRuleInfo`, `setArityInfo`, `setInlinePragInfo`, `setUnfoldingInfo`, `setOneShotInfo`, `setOccInfo`, `setCafInfo`, `setStrictnessInfo`, `setDemandInfo` {- ************************************************************************ * * IdDetails * * ************************************************************************ -} -- | The 'IdDetails' of an 'Id' give stable, and necessary, -- information about the Id. data IdDetails = VanillaId -- | The 'Id' for a record selector | RecSelId { sel_tycon :: RecSelParent , sel_naughty :: Bool -- True <=> a "naughty" selector which can't actually exist, for example @x@ in: -- data T = forall a. MkT { x :: a } } -- See Note [Naughty record selectors] in TcTyClsDecls | DataConWorkId DataCon -- ^ The 'Id' is for a data constructor /worker/ | DataConWrapId DataCon -- ^ The 'Id' is for a data constructor /wrapper/ -- [the only reasons we need to know is so that -- a) to support isImplicitId -- b) when desugaring a RecordCon we can get -- from the Id back to the data con] | ClassOpId Class -- ^ The 'Id' is a superclass selector, -- or class operation of a class | PrimOpId PrimOp -- ^ The 'Id' is for a primitive operator | FCallId ForeignCall -- ^ The 'Id' is for a foreign call | TickBoxOpId TickBoxOp -- ^ The 'Id' is for a HPC tick box (both traditional and binary) | DFunId Bool -- ^ A dictionary function. -- Bool = True <=> the class has only one method, so may be -- implemented with a newtype, so it might be bad -- to be strict on this dictionary | CoVarId -- ^ A coercion variable data RecSelParent = RecSelData TyCon | RecSelPatSyn PatSyn deriving Eq -- Either `TyCon` or `PatSyn` depending -- on the origin of the record selector. -- For a data type family, this is the -- /instance/ 'TyCon' not the family 'TyCon' instance Outputable RecSelParent where ppr p = case p of RecSelData ty_con -> ppr ty_con RecSelPatSyn ps -> ppr ps -- | Just a synonym for 'CoVarId'. Written separately so it can be -- exported in the hs-boot file. coVarDetails :: IdDetails coVarDetails = CoVarId -- | Check if an 'IdDetails' says 'CoVarId'. isCoVarDetails :: IdDetails -> Bool isCoVarDetails CoVarId = True isCoVarDetails _ = False instance Outputable IdDetails where ppr = pprIdDetails pprIdDetails :: IdDetails -> SDoc pprIdDetails VanillaId = empty pprIdDetails other = brackets (pp other) where pp VanillaId = panic "pprIdDetails" pp (DataConWorkId _) = text "DataCon" pp (DataConWrapId _) = text "DataConWrapper" pp (ClassOpId {}) = text "ClassOp" pp (PrimOpId _) = text "PrimOp" pp (FCallId _) = text "ForeignCall" pp (TickBoxOpId _) = text "TickBoxOp" pp (DFunId nt) = text "DFunId" <> ppWhen nt (text "(nt)") pp (RecSelId { sel_naughty = is_naughty }) = brackets $ text "RecSel" <> ppWhen is_naughty (text "(naughty)") pp CoVarId = text "CoVarId" {- ************************************************************************ * * \subsection{The main IdInfo type} * * ************************************************************************ -} -- | An 'IdInfo' gives /optional/ information about an 'Id'. If -- present it never lies, but it may not be present, in which case there -- is always a conservative assumption which can be made. -- -- Two 'Id's may have different info even though they have the same -- 'Unique' (and are hence the same 'Id'); for example, one might lack -- the properties attached to the other. -- -- Most of the 'IdInfo' gives information about the value, or definition, of -- the 'Id', independent of its usage. Exceptions to this -- are 'demandInfo', 'occInfo', 'oneShotInfo' and 'callArityInfo'. data IdInfo = IdInfo { arityInfo :: !ArityInfo, -- ^ 'Id' arity ruleInfo :: RuleInfo, -- ^ Specialisations of the 'Id's function which exist -- See Note [Specialisations and RULES in IdInfo] unfoldingInfo :: Unfolding, -- ^ The 'Id's unfolding cafInfo :: CafInfo, -- ^ 'Id' CAF info oneShotInfo :: OneShotInfo, -- ^ Info about a lambda-bound variable, if the 'Id' is one inlinePragInfo :: InlinePragma, -- ^ Any inline pragma atached to the 'Id' occInfo :: OccInfo, -- ^ How the 'Id' occurs in the program strictnessInfo :: StrictSig, -- ^ A strictness signature demandInfo :: Demand, -- ^ ID demand information callArityInfo :: !ArityInfo -- ^ How this is called. -- n <=> all calls have at least n arguments } -- Setters setRuleInfo :: IdInfo -> RuleInfo -> IdInfo setRuleInfo info sp = sp `seq` info { ruleInfo = sp } setInlinePragInfo :: IdInfo -> InlinePragma -> IdInfo setInlinePragInfo info pr = pr `seq` info { inlinePragInfo = pr } setOccInfo :: IdInfo -> OccInfo -> IdInfo setOccInfo info oc = oc `seq` info { occInfo = oc } -- Try to avoid spack leaks by seq'ing setUnfoldingInfoLazily :: IdInfo -> Unfolding -> IdInfo setUnfoldingInfoLazily info uf -- Lazy variant to avoid looking at the = -- unfolding of an imported Id unless necessary info { unfoldingInfo = uf } -- (In this case the demand-zapping is redundant.) setUnfoldingInfo :: IdInfo -> Unfolding -> IdInfo setUnfoldingInfo info uf = -- We don't seq the unfolding, as we generate intermediate -- unfoldings which are just thrown away, so evaluating them is a -- waste of time. -- seqUnfolding uf `seq` info { unfoldingInfo = uf } setArityInfo :: IdInfo -> ArityInfo -> IdInfo setArityInfo info ar = info { arityInfo = ar } setCallArityInfo :: IdInfo -> ArityInfo -> IdInfo setCallArityInfo info ar = info { callArityInfo = ar } setCafInfo :: IdInfo -> CafInfo -> IdInfo setCafInfo info caf = info { cafInfo = caf } setOneShotInfo :: IdInfo -> OneShotInfo -> IdInfo setOneShotInfo info lb = {-lb `seq`-} info { oneShotInfo = lb } setDemandInfo :: IdInfo -> Demand -> IdInfo setDemandInfo info dd = dd `seq` info { demandInfo = dd } setStrictnessInfo :: IdInfo -> StrictSig -> IdInfo setStrictnessInfo info dd = dd `seq` info { strictnessInfo = dd } -- | Basic 'IdInfo' that carries no useful information whatsoever vanillaIdInfo :: IdInfo vanillaIdInfo = IdInfo { cafInfo = vanillaCafInfo, arityInfo = unknownArity, ruleInfo = emptyRuleInfo, unfoldingInfo = noUnfolding, oneShotInfo = NoOneShotInfo, inlinePragInfo = defaultInlinePragma, occInfo = NoOccInfo, demandInfo = topDmd, strictnessInfo = nopSig, callArityInfo = unknownArity } -- | More informative 'IdInfo' we can use when we know the 'Id' has no CAF references noCafIdInfo :: IdInfo noCafIdInfo = vanillaIdInfo `setCafInfo` NoCafRefs -- Used for built-in type Ids in MkId. {- ************************************************************************ * * \subsection[arity-IdInfo]{Arity info about an @Id@} * * ************************************************************************ For locally-defined Ids, the code generator maintains its own notion of their arities; so it should not be asking... (but other things besides the code-generator need arity info!) -} -- | An 'ArityInfo' of @n@ tells us that partial application of this -- 'Id' to up to @n-1@ value arguments does essentially no work. -- -- That is not necessarily the same as saying that it has @n@ leading -- lambdas, because coerces may get in the way. -- -- The arity might increase later in the compilation process, if -- an extra lambda floats up to the binding site. type ArityInfo = Arity -- | It is always safe to assume that an 'Id' has an arity of 0 unknownArity :: Arity unknownArity = 0 :: Arity ppArityInfo :: Int -> SDoc ppArityInfo 0 = empty ppArityInfo n = hsep [text "Arity", int n] {- ************************************************************************ * * \subsection{Inline-pragma information} * * ************************************************************************ -} -- | Tells when the inlining is active. -- When it is active the thing may be inlined, depending on how -- big it is. -- -- If there was an @INLINE@ pragma, then as a separate matter, the -- RHS will have been made to look small with a Core inline 'Note' -- -- The default 'InlinePragInfo' is 'AlwaysActive', so the info serves -- entirely as a way to inhibit inlining until we want it type InlinePragInfo = InlinePragma {- ************************************************************************ * * Strictness * * ************************************************************************ -} pprStrictness :: StrictSig -> SDoc pprStrictness sig = ppr sig {- ************************************************************************ * * RuleInfo * * ************************************************************************ Note [Specialisations and RULES in IdInfo] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Generally speaking, a GlobalId has an *empty* RuleInfo. All their RULES are contained in the globally-built rule-base. In principle, one could attach the to M.f the RULES for M.f that are defined in M. But we don't do that for instance declarations and so we just treat them all uniformly. The EXCEPTION is PrimOpIds, which do have rules in their IdInfo. That is jsut for convenience really. However, LocalIds may have non-empty RuleInfo. We treat them differently because: a) they might be nested, in which case a global table won't work b) the RULE might mention free variables, which we use to keep things alive In TidyPgm, when the LocalId becomes a GlobalId, its RULES are stripped off and put in the global list. -} -- | Records the specializations of this 'Id' that we know about -- in the form of rewrite 'CoreRule's that target them data RuleInfo = RuleInfo [CoreRule] DVarSet -- Locally-defined free vars of *both* LHS and RHS -- of rules. I don't think it needs to include the -- ru_fn though. -- Note [Rule dependency info] in OccurAnal -- | Assume that no specilizations exist: always safe emptyRuleInfo :: RuleInfo emptyRuleInfo = RuleInfo [] emptyDVarSet isEmptyRuleInfo :: RuleInfo -> Bool isEmptyRuleInfo (RuleInfo rs _) = null rs -- | Retrieve the locally-defined free variables of both the left and -- right hand sides of the specialization rules ruleInfoFreeVars :: RuleInfo -> DVarSet ruleInfoFreeVars (RuleInfo _ fvs) = fvs ruleInfoRules :: RuleInfo -> [CoreRule] ruleInfoRules (RuleInfo rules _) = rules -- | Change the name of the function the rule is keyed on on all of the 'CoreRule's setRuleInfoHead :: Name -> RuleInfo -> RuleInfo setRuleInfoHead fn (RuleInfo rules fvs) = RuleInfo (map (setRuleIdName fn) rules) fvs {- ************************************************************************ * * \subsection[CG-IdInfo]{Code generator-related information} * * ************************************************************************ -} -- CafInfo is used to build Static Reference Tables (see simplStg/SRT.hs). -- | Records whether an 'Id' makes Constant Applicative Form references data CafInfo = MayHaveCafRefs -- ^ Indicates that the 'Id' is for either: -- -- 1. A function or static constructor -- that refers to one or more CAFs, or -- -- 2. A real live CAF | NoCafRefs -- ^ A function or static constructor -- that refers to no CAFs. deriving (Eq, Ord) -- | Assumes that the 'Id' has CAF references: definitely safe vanillaCafInfo :: CafInfo vanillaCafInfo = MayHaveCafRefs mayHaveCafRefs :: CafInfo -> Bool mayHaveCafRefs MayHaveCafRefs = True mayHaveCafRefs _ = False instance Outputable CafInfo where ppr = ppCafInfo ppCafInfo :: CafInfo -> SDoc ppCafInfo NoCafRefs = text "NoCafRefs" ppCafInfo MayHaveCafRefs = empty {- ************************************************************************ * * \subsection{Bulk operations on IdInfo} * * ************************************************************************ -} -- | This is used to remove information on lambda binders that we have -- setup as part of a lambda group, assuming they will be applied all at once, -- but turn out to be part of an unsaturated lambda as in e.g: -- -- > (\x1. \x2. e) arg1 zapLamInfo :: IdInfo -> Maybe IdInfo zapLamInfo info@(IdInfo {occInfo = occ, demandInfo = demand}) | is_safe_occ occ && is_safe_dmd demand = Nothing | otherwise = Just (info {occInfo = safe_occ, demandInfo = topDmd}) where -- The "unsafe" occ info is the ones that say I'm not in a lambda -- because that might not be true for an unsaturated lambda is_safe_occ (OneOcc in_lam _ _) = in_lam is_safe_occ _other = True safe_occ = case occ of OneOcc _ once int_cxt -> OneOcc insideLam once int_cxt _other -> occ is_safe_dmd dmd = not (isStrictDmd dmd) -- | Remove all demand info on the 'IdInfo' zapDemandInfo :: IdInfo -> Maybe IdInfo zapDemandInfo info = Just (info {demandInfo = topDmd}) -- | Remove usage (but not strictness) info on the 'IdInfo' zapUsageInfo :: IdInfo -> Maybe IdInfo zapUsageInfo info = Just (info {demandInfo = zapUsageDemand (demandInfo info)}) zapFragileInfo :: IdInfo -> Maybe IdInfo -- ^ Zap info that depends on free variables zapFragileInfo info = Just (info `setRuleInfo` emptyRuleInfo `setUnfoldingInfo` noUnfolding `setOccInfo` zapFragileOcc occ) where occ = occInfo info {- ************************************************************************ * * \subsection{TickBoxOp} * * ************************************************************************ -} type TickBoxId = Int -- | Tick box for Hpc-style coverage data TickBoxOp = TickBox Module {-# UNPACK #-} !TickBoxId instance Outputable TickBoxOp where ppr (TickBox mod n) = text "tick" <+> ppr (mod,n)
oldmanmike/ghc
compiler/basicTypes/IdInfo.hs
Haskell
bsd-3-clause
18,548
module IxEnvMT (HasEnv(..), MT(..), at, Z, S, Top, Under, WithEnv, withEnv, mapEnv) where import MT import Control_Monad_Fix import Control.Monad(liftM,MonadPlus(..)) newtype WithEnv e m a = E { unE :: e -> m a } withEnv :: e -> WithEnv e m a -> m a withEnv e (E f) = f e mapEnv :: Monad m => (e2 -> e1) -> WithEnv e1 m a -> WithEnv e2 m a mapEnv f (E m) = E (\e -> m (f e)) -------------------------------------------------------------------------------- instance Monad m => Functor (WithEnv e m) where fmap = liftM instance Monad m => Monad (WithEnv e m) where return = lift . return E m >>= f = E (\e -> do x <- m e; unE (f x) e) E m >> n = E (\e -> m e >> withEnv e n) fail = lift . fail instance MT (WithEnv e) where lift = E . const instance MonadPlus m => MonadPlus (WithEnv e m) where mzero = lift mzero E a `mplus` E b = E (\e -> a e `mplus` b e) -------------------------------------------------------------------------------- -- Features -------------------------------------------------------------------- instance Monad m => HasEnv (WithEnv e m) Z e where getEnv _ = E return inModEnv _ = mapEnv instance HasEnv m ix e => HasEnv (WithEnv e' m) (S ix) e where getEnv (Next ix) = lift (getEnv ix) inModEnv (Next ix) f m = E (\e -> inModEnv ix f (withEnv e m)) instance HasState m ix s => HasState (WithEnv e m) ix s where updSt ix = lift . updSt ix instance HasOutput m ix o => HasOutput (WithEnv e m) ix o where outputTree ix = lift . outputTree ix instance HasExcept m x => HasExcept (WithEnv e m) x where raise = lift . raise handle h (E m) = E (\e -> handle (withEnv e . h) (m e)) instance HasCont m => HasCont (WithEnv e m) where callcc f = E (\e -> callcc (\k -> withEnv e $ f (lift . k))) instance MonadFix m => MonadFix (WithEnv e m) where mfix f = E (\e -> mfix (withEnv e . f)) instance HasBaseMonad m n => HasBaseMonad (WithEnv e m) n where inBase = lift . inBase instance HasRefs m r => HasRefs (WithEnv e m) r where newRef = lift . newRef readRef = lift . readRef writeRef r = lift . writeRef r
mpickering/HaRe
old/tools/base/lib/Monads/IxEnvMT.hs
Haskell
bsd-3-clause
2,210
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE NoImplicitPrelude #-} ----------------------------------------------------------------------------- -- | -- Module : Data.Functor -- Copyright : (c) The University of Glasgow 2001 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : provisional -- Portability : portable -- -- Functors: uniform action over a parameterized type, generalizing the -- 'Data.List.map' function on lists. module Data.Functor ( Functor(fmap), (<$), ($>), (<$>), (<&>), void, ) where import GHC.Base ( Functor(..), flip ) -- $setup -- Allow the use of Prelude in doctests. -- >>> import Prelude hiding ((<$>)) infixl 4 <$> -- | An infix synonym for 'fmap'. -- -- The name of this operator is an allusion to '$'. -- Note the similarities between their types: -- -- > ($) :: (a -> b) -> a -> b -- > (<$>) :: Functor f => (a -> b) -> f a -> f b -- -- Whereas '$' is function application, '<$>' is function -- application lifted over a 'Functor'. -- -- ==== __Examples__ -- -- Convert from a @'Maybe' 'Int'@ to a @'Maybe' 'String'@ using 'show': -- -- >>> show <$> Nothing -- Nothing -- >>> show <$> Just 3 -- Just "3" -- -- Convert from an @'Either' 'Int' 'Int'@ to an @'Either' 'Int'@ -- 'String' using 'show': -- -- >>> show <$> Left 17 -- Left 17 -- >>> show <$> Right 17 -- Right "17" -- -- Double each element of a list: -- -- >>> (*2) <$> [1,2,3] -- [2,4,6] -- -- Apply 'even' to the second element of a pair: -- -- >>> even <$> (2,2) -- (2,True) -- (<$>) :: Functor f => (a -> b) -> f a -> f b (<$>) = fmap infixl 4 $> -- | Flipped version of '<$>'. -- -- @ -- ('<&>') = 'flip' 'fmap' -- @ -- -- @since 4.11.0.0 -- -- ==== __Examples__ -- Apply @(+1)@ to a list, a 'Data.Maybe.Just' and a 'Data.Either.Right': -- -- >>> Just 2 <&> (+1) -- Just 3 -- -- >>> [1,2,3] <&> (+1) -- [2,3,4] -- -- >>> Right 3 <&> (+1) -- Right 4 -- (<&>) :: Functor f => f a -> (a -> b) -> f b as <&> f = f <$> as infixl 1 <&> -- | Flipped version of '<$'. -- -- @since 4.7.0.0 -- -- ==== __Examples__ -- -- Replace the contents of a @'Maybe' 'Int'@ with a constant 'String': -- -- >>> Nothing $> "foo" -- Nothing -- >>> Just 90210 $> "foo" -- Just "foo" -- -- Replace the contents of an @'Either' 'Int' 'Int'@ with a constant -- 'String', resulting in an @'Either' 'Int' 'String'@: -- -- >>> Left 8675309 $> "foo" -- Left 8675309 -- >>> Right 8675309 $> "foo" -- Right "foo" -- -- Replace each element of a list with a constant 'String': -- -- >>> [1,2,3] $> "foo" -- ["foo","foo","foo"] -- -- Replace the second element of a pair with a constant 'String': -- -- >>> (1,2) $> "foo" -- (1,"foo") -- ($>) :: Functor f => f a -> b -> f b ($>) = flip (<$) -- | @'void' value@ discards or ignores the result of evaluation, such -- as the return value of an 'System.IO.IO' action. -- -- ==== __Examples__ -- -- Replace the contents of a @'Maybe' 'Int'@ with unit: -- -- >>> void Nothing -- Nothing -- >>> void (Just 3) -- Just () -- -- Replace the contents of an @'Either' 'Int' 'Int'@ with unit, -- resulting in an @'Either' 'Int' '()'@: -- -- >>> void (Left 8675309) -- Left 8675309 -- >>> void (Right 8675309) -- Right () -- -- Replace every element of a list with unit: -- -- >>> void [1,2,3] -- [(),(),()] -- -- Replace the second element of a pair with unit: -- -- >>> void (1,2) -- (1,()) -- -- Discard the result of an 'System.IO.IO' action: -- -- >>> mapM print [1,2] -- 1 -- 2 -- [(),()] -- >>> void $ mapM print [1,2] -- 1 -- 2 -- void :: Functor f => f a -> f () void x = () <$ x
rahulmutt/ghcvm
libraries/base/Data/Functor.hs
Haskell
bsd-3-clause
3,644
{-# LANGUAGE MultiParamTypeClasses, FunctionalDependencies, FlexibleInstances, UndecidableInstances, FlexibleContexts #-} -- UndecidableInstances now needed because the Coverage Condition fails module ShouldFail where -- A stripped down functional-dependency -- example that causes GHC 4.08.1 to crash with: -- "basicTypes/Var.lhs:194: Non-exhaustive patterns in function readMutTyVar" -- Reported by Thomas Hallgren Nov 00 -- July 07: I'm changing this from "should fail" to "should succeed" -- See Note [Important subtlety in oclose] in FunDeps primDup :: Int -> IO Int primDup = undefined dup () = call primDup -- call :: Call c h => c -> h -- -- call primDup :: {Call (Int -> IO Int) h} => h with -- Using the instance decl gives -- call primDup :: {Call (IO Int) h'} => Int -> h' -- The functional dependency means that h must be constant -- Hence program is rejected because it can't find an instance -- for {Call (IO Int) h'} class Call c h | c -> h where call :: c -> h instance Call c h => Call (Int->c) (Int->h) where call f = call . f
ezyang/ghc
testsuite/tests/typecheck/should_fail/tcfail093.hs
Haskell
bsd-3-clause
1,102
newtype MkT2 a = MkT2 [Maybe a] deriving Show f :: t Int -> t Int f x = x f2 :: t Int -> t Int -> (t Int, t Int) f2 x y = (x,y)
urbanslug/ghc
testsuite/tests/ghci.debugger/scripts/print029.hs
Haskell
bsd-3-clause
129
{-# LANGUAGE LambdaCase #-} module WykopProfile ( indexProfile , addedProfile , publishedProfile , commentedProfile , diggedProfile , buriedProfile , observeProfile , unobserveProfile , blockProfile , unblockProfile , followersProfile , followedProfile , favoritesProfile , module WykopTypes ) where import WykopTypes import WykopUtils indexProfile :: Keys -> String -> IO (Maybe Profile) indexProfile k login = get k [] [] res where res = "profile/index/" ++ login addedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link]) addedProfile k login page = get k [] (mPageToGet page) res where res = "profile/added/" ++ login publishedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link]) publishedProfile k login page = get k [] (mPageToGet page) res where res = "profile/published/" ++ login commentedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link]) commentedProfile k login page = get k [] (mPageToGet page) res where res = "profile/commented/" ++ login diggedProfile :: Keys -> String -> Maybe Int -> IO (Maybe [Link]) diggedProfile k login page = get k [] (mPageToGet page) res where res = "profile/digged/" ++ login buriedProfile :: Keys -> String -> Userkey -> Maybe Int -> IO (Maybe [Link]) buriedProfile k login userKey page = get k [] getData res where getData = (mPageToGet page) ++ (toGet userKey) res = "profile/buried/" ++ login observeProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool]) observeProfile k login userKey = get k [] (toGet userKey) res where res = "profile/observe/" ++ login unobserveProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool]) unobserveProfile k login userKey = get k [] (toGet userKey) res where res = "profile/unobserve/" ++ login blockProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool]) blockProfile k login userKey = get k [] (toGet userKey) res where res = "profile/block/" ++ login unblockProfile :: Keys -> String -> Userkey -> IO (Maybe [Bool]) unblockProfile k login userKey = get k [] (toGet userKey) res where res = "profile/unblock/" ++ login followersProfile :: Keys -> String -> Maybe Userkey -> Maybe Int -> IO (Maybe [Profile]) followersProfile k login userKey page = get k [] getData res where getData = (mPageToGet page) ++ (mToGet userKey) res = "profile/followers/" ++ login followedProfile :: Keys -> String -> Maybe Userkey -> Maybe Int -> IO (Maybe [Profile]) followedProfile k login userKey page = get k [] getData res where getData = (mPageToGet page) ++ (mToGet userKey) res = "profile/followed/" ++ login favoritesProfile :: Keys -> String -> Maybe Int -> Maybe Int -> IO (Maybe [Link]) favoritesProfile k login id page = get k [] (mPageToGet page) res where res = "profile/favorites/" ++ login ++ idToURL idToURL = case id of Just x -> "/" ++ show x _ -> ""
mikusp/hwykop
WykopProfile.hs
Haskell
mit
3,055
{-# LANGUAGE OverloadedStrings #-} module System.PassengerCheck.Health (queuedRequests, status) where import System.PassengerCheck.Types import System.Nagios.Plugin (CheckStatus(..)) import Data.Text (Text) queuedRequests :: PassengerStatus -> Integer queuedRequests stat = requestsInTopLevelQueue stat + sum (requestsInLocalQueues stat) status :: PassengerStatus -> (CheckStatus, Text) status stat | percentFull >= (0.9 :: Double) = (Critical, "Queue is at or above 90% full") | percentFull >= (0.5 :: Double) = (Warning, "Queue is at or above 50% full") | otherwise = (OK, "Queue is less than 50% full") where percentFull = fromIntegral (queuedRequests stat) / fromIntegral (maxPoolSize stat)
stackbuilders/passenger-check
src/System/PassengerCheck/Health.hs
Haskell
mit
755
--This script orients sequences according to a PWM. The sequences must --be in the first column of the input file --Author: Tristan Bepler (tbepler@gmail.com) import qualified Data.Map as Map import System.Environment import System.Exit import Debug.Trace data Orientation = Fwd | Rvs main = getArgs >>= parse >>= putStr parse ["-h"] = help >> exit parse [x,y] = do input <- readFile x pwm <- readFile y return $ assign (input, pwm) parse xs = usage >> exit usage = putStrLn "Usage: orientseqs [-h] Seqs_File PWM_File" help = do putStrLn "This script takes a file containing sequences in the first column and orients them according to the given PWM.\nThe input file should not have a header.\n" usage exit = exitWith ExitSuccess die = exitWith (ExitFailure 1) assign (inputFile, pwmFile) = unlines $ filter (not.isEmpty) $ map (process pwm) $ lines inputFile where pwm::(Map.Map Char [Double]) pwm = readPWM pwmFile isEmpty [] = True isEmpty x = False process pwm row = unwords $ (str orientation):rest where entries = words row rest = tail $ entries orientation = orient pwm $ head $ entries str Fwd = head $ entries str Rvs = rvsCompliment $ head $ entries selectScore pwm [] = [] selectScore pwm row = unwords [str,name,(orientAsString orientation),score] where entries = words row str = head entries name = entries !! 1 score = entries !! (index orientation) orientation = orient pwm str index Fwd = 3 index Rvs = 4 orientAsString Fwd = "Fwd" orientAsString Rvs = "Rvs" orient pwm str = if score < rvsScore then Rvs else Fwd where score = scoreCenter pwm str rvsScore = scoreCenter pwm rvscomp rvscomp = rvsCompliment str scoreCenter pwm str = score pwm center where center = take pwmLen $ drop flank str flank = (strLen - pwmLen) `div` 2 strLen = length str pwmLen = pwmLength pwm maxScore pwm str = maximum $ map (score pwm) substrs where substrs = map ((take len).flip drop str) [0..((length str)-len)] len = pwmLength pwm rvsCompliment str = reverse $ map (comp) str where comp 'A' = 'T' comp 'T' = 'A' comp 'G' = 'C' comp 'C' = 'G' comp x = error ("Unknown base: " ++ [x]) pwmLength pwm = minimum $ map (length) $ Map.elems pwm score pwm str = foldl (add') 0 $ zip [0..] str where add' s t = (score' t) + s score' (i,c) = score'' i $ Map.lookup c pwm where score'' i (Just xs) = xs !! i score'' i (Nothing) = error ("Character "++[c]++" not scorable by PWM: " ++ (show pwm)) readPWM input = foldl (insert) Map.empty rows where rows = extractPWMRows $ lines input insert m x = Map.insert (key row) (values row) m where row = words x key xs = head $ head xs values xs = map (read) $ tail xs extractPWMRows (x:xs) = if matches row then map (unwords.extract.words) (take 4 (x:xs)) else extractPWMRows xs where row = words x matches ("A:":ys) = True matches ("C:":ys) = True matches ("G:":ys) = True matches ("T:":ys) = True matches ys = False extract ("A:":ys) = "A" : ys extract ("C:":ys) = "C" : ys extract ("G:":ys) = "G" : ys extract ("T:":ys) = "T" : ys extract ys = ys
tbepler/PBM-Analysis
orientseqs.hs
Haskell
mit
3,061
module GHCJS.DOM.DeviceProximityEvent ( ) where
manyoo/ghcjs-dom
ghcjs-dom-webkit/src/GHCJS/DOM/DeviceProximityEvent.hs
Haskell
mit
50
module Inputs (getInput) where import qualified Data.Map as M import Board (Coords, Board, getBlankCoords, getNeighbors) getKeyMoveMapping :: Board -> M.Map Char Coords getKeyMoveMapping board = case getBlankCoords board of Just coords -> buildMap coords Nothing -> M.empty where buildMap (x,y) = M.fromList $ map (mapF (x,y)) (getNeighbors (x,y)) mapF (x,y) (x',y') | x' > x = ('a', (x',y')) | x' < x = ('d', (x',y')) | y' > y = ('w', (x',y')) | y' < y = ('s', (x',y')) getInput :: Board -> IO (Maybe Coords) getInput board = do key <- getChar keyMap <- return $ getKeyMoveMapping board return $ M.lookup key keyMap
lshemesh/Slide
src/Inputs.hs
Haskell
mit
674
module Network.Gazelle.Types.Artist ( Artist(..), ArtistEntry(..), ArtistStatistics(..), Tag(..) ) where import Network.Gazelle.Types.Gazelle import Network.Gazelle.Types.Id import Data.Aeson import Data.Scientific import Data.Text (Text) import Network.API.Builder data Tag = Tag { tagName :: Text, tagCount :: Integer } deriving Show instance FromJSON Tag where parseJSON = withObject "Tag" $ \o -> Tag <$> o .: "name" <*> o .: "count" data ArtistStatistics = ArtistStatistics { asNumGroups :: Integer, asNumTorrents :: Integer, asNumSeeders :: Integer, asNumLeechers :: Integer, asNumSnatches :: Integer } deriving Show instance FromJSON ArtistStatistics where parseJSON = withObject "ArtistStatistics" $ \o -> ArtistStatistics <$> o .: "numGroups" <*> o .: "numTorrents" <*> o .: "numSeeders" <*> o .: "numLeechers" <*> o .: "numSnatches" data ArtistEntry = ArtistEntry { aeId :: Integer, aeName :: Text, aeScore :: Integer, aeSimilarId :: SimilarId } deriving Show instance FromJSON ArtistEntry where parseJSON = withObject "ArtistEntry" $ \o -> ArtistEntry <$> o .: "artistId" <*> o .: "name" <*> o .: "score" <*> o .: "similarId" data Artist = Artist { aId :: ArtistID, aName :: Text, aNotificationsEnabled :: Bool, aHasBookmarked :: Bool, aImage :: Text, aBody :: Text, aVanityHouse :: Bool, aTags :: [Tag], aSimilarArtists :: [ArtistEntry], aStatistics :: ArtistStatistics } deriving Show instance FromJSON Artist where parseJSON = withObject "Artist" $ \o -> Artist <$> o .: "id" <*> o .: "name" <*> o .: "notificationsEnabled" <*> o .: "hasBookmarked" <*> o .: "image" <*> o .: "body" <*> o .: "vanityHouse" <*> o .: "tags" <*> o .: "similarArtists" <*> o .: "statistics" instance Receivable Artist where receive = useResponseFromJSON
mr/gazelle
src/Network/Gazelle/Types/Artist.hs
Haskell
mit
2,047
module Glucose.Test.IR.Core where import Control.Comonad import Data.Text (Text) import Glucose.Identifier (Identifier (..)) import Glucose.IR import Glucose.Parser.Source import Glucose.Test.Source -- * Without source locations constantAnywhere :: Text -> Literal -> FromSource (Definition ann) constantAnywhere name lit = definitionAnywhere name (Literal lit) constructorAnywhere :: Text -> Text -> Int -> FromSource (Definition ann) constructorAnywhere ty ctor index = definitionAnywhere ctor $ Constructor (fromSource $ Identifier ty) index definitionAnywhere :: Text -> Expression ann -> FromSource (Definition ann) definitionAnywhere name value = definition (fromSource name) (fromSource value) -- * With source locations constant :: FromSource Text -> FromSource Literal -> FromSource (Definition ann) constant name lit = definition name (Literal <$> lit) constructor :: FromSource Text -> FromSource Text -> Int -> FromSource (Definition ann) constructor ty ctor index = definition ctor $ ctor $> Constructor (Identifier <$> ty) index definition :: FromSource Text -> FromSource (Expression ann) -> FromSource (Definition ann) definition name value = Definition <$> duplicate (Identifier <$> name) <*> duplicate value reference :: RefKind ann -> FromSource Text -> Type ann -> FromSource (Expression ann) reference kind name ty = (\n -> Reference kind (Identifier n) ty) <$> name
sardonicpresence/glucose
test/Glucose/Test/IR/Core.hs
Haskell
mit
1,431
module Main where import System.Environment import PropDoc.Core import PropDoc.Input.JavaProps -- IO main = do args <- getArgs name <- return (head args) contents <- readFile name let defs = props nameValuePair Nothing [] (lines contents) putStr $ foldr (\a acc -> acc ++ (show a)) "" defs
chrislewis/PropDoc
src/Main.hs
Haskell
mit
302
{-# LANGUAGE OverloadedStrings #-} module Data.IP.Extra where import Data.IP -- Return a List of AddrRanges within a given AddrRange -- that have the given netmask. If the given netmask is -- greater than the netmask of the original AddrRange, -- an empty List is returned. ranges :: AddrRange IPv4 -> Int -> [AddrRange IPv4] ranges addrRange mask | m > mask = [] | otherwise = [makeAddrRange (intToIPv4 i) mask | i <- [first,first+step..last]] where (r, m) = addrRangePair addrRange first = iPv4ToInt r last = first+(2^(32-m))-1 step = 2^(32-mask) iPv4ToInt :: IPv4 -> Int iPv4ToInt i = let (o1:o2:o3:o4:_) = fromIPv4 i oct n pow = n*((256::Int)^(pow::Int)) in (oct o1 3) + (oct o2 2) + (oct o3 1) + o4 intToIPv4 :: Int -> IPv4 intToIPv4 i = let (i', o4) = i `divMod` 256 (i'', o3) = i' `divMod` 256 (o1, o2) = i'' `divMod` 256 in toIPv4 [o1, o2, o3, o4]
rjosephwright/awstemplate
src/Data/IP/Extra.hs
Haskell
mit
909
module Main where import Prelude import qualified Hasql.Connection as A import qualified Hasql.Session as B import qualified Hasql.Transaction as C import qualified Hasql.Transaction.Sessions as G import qualified Main.Statements as D import qualified Main.Transactions as E import qualified Control.Concurrent.Async as F main = bracket acquire release use where acquire = (,) <$> acquire <*> acquire where acquire = join $ fmap (either (fail . show) return) $ A.acquire connectionSettings where connectionSettings = A.settings "localhost" 5432 "postgres" "" "postgres" release (connection1, connection2) = do transaction connection1 E.dropSchema A.release connection1 A.release connection2 use (connection1, connection2) = do try (transaction connection1 E.dropSchema) :: IO (Either SomeException ()) transaction connection1 E.createSchema success <- fmap and (traverse runTest tests) if success then exitSuccess else exitFailure where runTest test = test connection1 connection2 tests = [readAndWriteTransactionsTest, transactionsTest, transactionAndQueryTest] session connection session = B.run session connection >>= either (fail . show) return transaction connection transaction = session connection (G.transaction G.RepeatableRead G.Write transaction) type Test = A.Connection -> A.Connection -> IO Bool transactionsTest :: Test transactionsTest connection1 connection2 = do id1 <- session connection1 (B.statement 0 D.createAccount) id2 <- session connection1 (B.statement 0 D.createAccount) async1 <- F.async (replicateM_ 1000 (transaction connection1 (E.transfer id1 id2 1))) async2 <- F.async (replicateM_ 1000 (transaction connection2 (E.transfer id1 id2 1))) F.wait async1 F.wait async2 balance1 <- session connection1 (B.statement id1 D.getBalance) balance2 <- session connection1 (B.statement id2 D.getBalance) traceShowM balance1 traceShowM balance2 return (balance1 == Just 2000 && balance2 == Just (-2000)) readAndWriteTransactionsTest :: Test readAndWriteTransactionsTest connection1 connection2 = do id1 <- session connection1 (B.statement 0 D.createAccount) id2 <- session connection1 (B.statement 0 D.createAccount) async1 <- F.async (replicateM_ 1000 (transaction connection1 (E.transfer id1 id2 1))) async2 <- F.async (replicateM_ 1000 (transaction connection2 (C.statement id1 D.getBalance))) F.wait async1 F.wait async2 balance1 <- session connection1 (B.statement id1 D.getBalance) balance2 <- session connection1 (B.statement id2 D.getBalance) traceShowM balance1 traceShowM balance2 return (balance1 == Just 1000 && balance2 == Just (-1000)) transactionAndQueryTest :: Test transactionAndQueryTest connection1 connection2 = do id1 <- session connection1 (B.statement 0 D.createAccount) id2 <- session connection1 (B.statement 0 D.createAccount) async1 <- F.async (transaction connection1 (E.transferTimes 200 id1 id2 1)) async2 <- F.async (session connection2 (replicateM_ 200 (B.statement (id1, 1) D.modifyBalance))) F.wait async1 F.wait async2 balance1 <- session connection1 (B.statement id1 D.getBalance) balance2 <- session connection1 (B.statement id2 D.getBalance) traceShowM balance1 traceShowM balance2 return (balance1 == Just 400 && balance2 == Just (-200))
nikita-volkov/hasql-transaction
conflicts-test/Main.hs
Haskell
mit
3,586
module HsSearch.SearchResultTest ( getBinaryFileSearchResultTests , getMultiLineSearchResultTests , getSingleLineSearchResultTests ) where import qualified Data.ByteString.Char8 as BC import HsSearch.Config import HsSearch.SearchResult import HsSearch.SearchSettings import Test.Framework import Test.Framework.Providers.HUnit import Test.HUnit hiding (Test) testFileLineNum = 10 testFileMatchStartIndex = 15 testFileMatchEndIndex = 23 testFileLine = BC.pack "\tpublic class Searcher\n" getBinaryFileSearchResultTests :: IO [Test] getBinaryFileSearchResultTests = do xsearchPath <- getXsearchPath let binaryFilePath = xsearchPath ++ "/csharp/CsSearch/CsSearch/Searcher.exe" let binaryFileSearchResult = blankSearchResult { filePath=binaryFilePath , lineNum=0 , matchStartIndex=0 , matchEndIndex=0 } let settings = defaultSearchSettings let formattedResult = formatSearchResult settings binaryFileSearchResult let expectedFormat = binaryFilePath ++ " matches at [0:0]" return [testCase "binaryFileSearchResult" (formattedResult @?= expectedFormat)] getSingleLineSearchResultTests :: IO [Test] getSingleLineSearchResultTests = do xsearchPath <- getXsearchPath let testFilePath = xsearchPath ++ "/csharp/CsSearch/CsSearch/Searcher.cs" let singleLineSearchResult = blankSearchResult { filePath=testFilePath , lineNum=testFileLineNum , matchStartIndex=testFileMatchStartIndex , matchEndIndex=testFileMatchEndIndex , line=testFileLine } let settings = defaultSearchSettings { colorize=False } let formattedResult = formatSearchResult settings singleLineSearchResult let expectedFormat = testFilePath ++ ": " ++ show testFileLineNum ++ ": [" ++ show testFileMatchStartIndex ++ ":" ++ show testFileMatchEndIndex ++ "]: " ++ trimLeadingWhitespace (BC.unpack testFileLine) return [testCase "singleLineSearchResult" (formattedResult @?= expectedFormat)] getMultiLineSearchResultTests :: IO [Test] getMultiLineSearchResultTests = do xsearchPath <- getXsearchPath let testFilePath = xsearchPath ++ "/csharp/CsSearch/CsSearch/Searcher.cs" let lb = [ BC.pack "namespace CsSearch\n" , BC.pack "{\n" ] let la = [ BC.pack "\t{\n" , BC.pack "\t\tprivate readonly FileTypes _fileTypes;\n" ] let multiLineSearchResult = blankSearchResult { filePath=testFilePath , lineNum=testFileLineNum , matchStartIndex=testFileMatchStartIndex , matchEndIndex=testFileMatchEndIndex , line=testFileLine , beforeLines=lb , afterLines=la } let settings = defaultSearchSettings { colorize=False, linesBefore=2, linesAfter=2 } let formattedResult = formatSearchResult settings multiLineSearchResult let expectedFormat = replicate 80 '=' ++ "\n" ++ testFilePath ++ ": " ++ show testFileLineNum ++ ": [" ++ show testFileMatchStartIndex ++ ":" ++ show testFileMatchEndIndex ++ "]\n" ++ replicate 80 '-' ++ "\n" ++ " 8 | namespace CsSearch\n" ++ " 9 | {\n" ++ "> 10 | \tpublic class Searcher\n" ++ " 11 | \t{\n" ++ " 12 | \t\tprivate readonly FileTypes _fileTypes;\n" return [testCase "multiLineSearchResult" (formattedResult @?= expectedFormat)]
clarkcb/xsearch
haskell/hssearch/test/HsSearch/SearchResultTest.hs
Haskell
mit
4,148
module Shipper ( startShipper, Event, Input(..), Output(..), ) where import Shipper.Inputs import Shipper.Outputs import Shipper.Types import Shipper.Event (maxPacketSize) import Control.Concurrent.STM (atomically) import Control.Concurrent.STM.TBQueue import Control.Concurrent import Control.Monad import System.ZMQ4 (curveKeyPair) -- How long inputs must sleep when there is no more input to read waitTime :: Int waitTime = 1000000 -- 1s queueSize :: Int queueSize = maxPacketSize startShipper :: [ConfigSegment] -> IO () startShipper segments = do -- Events read from inputs come through this 'channel' in_ch <- atomically $ newTBQueue queueSize when (null inputSegments) $ error "No inputs specified" when (null outputSegments) $ error "No outputs specified" -- Do something useful for each input segment, we hand all inputs the same -- channel to stream events over forM_ inputSegments $ \(InputSegment i) -> case i of FileInput _ _ -> forkIO $ startFileInput in_ch i waitTime ZMQ4Input _ _ -> forkIO $ startZMQ4Input in_ch i waitTime k <- curveKeyPair -- Output segments however, each get thier own channel. This is so that -- inputs all block when any given output blocks. That way we don't leak -- any memory and outputs don't get out of sync when a single output dies. out_chs <- forM outputSegments $ \(OutputSegment o) -> do out_chan <- atomically $ newTBQueue queueSize case o of Debug -> forkIO $ startDebugOutput out_chan waitTime ZMQ4Output _ _ _ -> forkIO $ startZMQ4Output out_chan waitTime o k Redis _ _ _ _ -> forkIO $ startRedisOutput out_chan waitTime o return out_chan forever $ do -- For every event that comes in, try to send it to every output -- channel. This way, if an output gets clogged we can block all the -- way back to every input magically, and no output should get more -- than one event more than another. event <- atomically $ readTBQueue in_ch forM_ out_chs $ \ch -> atomically $ writeTBQueue ch event where isInputSegment (InputSegment _) = True isInputSegment _ = False isOutputSegment (OutputSegment _) = True isOutputSegment _ = False inputSegments = filter isInputSegment segments outputSegments = filter isOutputSegment segments
christian-marie/pill-bug
Shipper.hs
Haskell
mit
2,463
module FactorTable (computeForList, toList) where import qualified Data.Map as M import qualified Data.List as L import qualified Data.Set as S import Types toList :: FactorTable -> [(Int, [Int])] toList = M.toList computeForList :: [Int] -> FactorTable computeForList = L.foldl' insert empty . L.sort . unique insert :: FactorTable -> Int -> FactorTable insert _ 0 = error "0 is not a valid divisor" insert table n = M.insert n divisors table where divisors = filter (`divides` n) candidates -- Note: Because the numbers we insert are coming in -- already sorted, we only have to check prior keys. candidates = M.keys table empty :: FactorTable empty = M.empty divides :: Int -> Int -> Bool divides x y = y `mod` x == 0 unique :: Ord a => [a] -> [a] unique = S.toList . S.fromList ---------- -- Note: These functions aren't used. They're simply illustrations -- of how to invert the factoring procedure so that divisors point to -- divisees rather than the other way around. As you can see, they're -- very simple modifications of `computeForList` and `insert` above. computeForList' = L.foldl' insert' empty . reverse . L.sort . unique insert' table n = M.insert n divisees table where divisees = filter (n `divides`) candidates candidates = M.keys table
mg50avant/factorizer
src/FactorTable.hs
Haskell
mit
1,305
module ChatCore.Util.Error where import Control.Exception import Control.Monad -- | Runs the given IO action and wraps the return value in Nothing if an -- exception is raised. tryMaybe :: IO a -> IO (Maybe a) tryMaybe f = catch (Just `liftM` f) (return . catchNothing) where catchNothing :: SomeException -> Maybe a catchNothing _ = Nothing
Forkk/ChatCore
ChatCore/Util/Error.hs
Haskell
mit
355
module Exercise where data OA = Add | Mul data OB = EQu | GTh | LTh data ExprA = Const Int | Var Char | OpA OA ExprA ExprA | If ExprB ExprA ExprA data ExprB = OpB OB ExprA ExprA evalA :: ExprA -> Int evalA (Const a) = a evalA (OpA Add expr1 expr2) = (evalA expr1) + (evalA expr2) evalA (OpA Mul expr1 expr2) = (evalA expr1) * (evalA expr2) evalA (If exprB expr1 expr2) | (evalB exprB) = evalA expr1 | otherwise = evalA expr2 evalB :: ExprB -> Bool evalB (OpB EQu expr1 expr2) = evalA expr1 == evalA expr2 evalB (OpB GTh expr1 expr2) = evalA expr1 > evalA expr2 evalB (OpB LTh expr1 expr2) = evalA expr1 < evalA expr2
tcoenraad/functioneel-programmeren
2012/opg2a.hs
Haskell
mit
683
{-# OPTIONS_GHC -Wall -} {- - Module about terms. Used after parsing eMOD - - - Copyright 2013 -- name removed for blind review, all rights reserved! Please push a git request to receive author's name! -- - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -} module Terms(Term(..),(.+.)) where import qualified Data.Set as Set data Term = T_AND Term Term | T_OR Term Term | T_NOT Term | T_XOR Term Term | T_ITE Term Term Term | T_QTRDY String | T_QIRDY String | T_QDATA String Int | T_OTRDY String | T_IIRDY String | T_IDATA String Int | T_FLOPV String | T_RESET | T_UNKNOWN Int | T_INPUT String -- same as unknown, only named | T_VALUE Bool deriving (Eq,Ord,Show) type TermSet = Set.Set Term (.+.) :: TermSet -> TermSet -> TermSet (.+.) = Set.union
DatePaper616/code
Terms.hs
Haskell
apache-2.0
1,324
{-# LANGUAGE LambdaCase, Rank2Types #-} module Drasil.ExtractDocDesc (getDocDesc, egetDocDesc, ciGetDocDesc, sentencePlate) where import Control.Lens((^.)) import Drasil.DocumentLanguage.Core import Drasil.Sections.SpecificSystemDescription (inDataConstTbl, outDataConstTbl) import Language.Drasil hiding (Manual, Vector, Verb) import Theory.Drasil (Theory(..)) import Data.List(transpose) import Data.Functor.Constant (Constant(Constant)) import Data.Generics.Multiplate (appendPlate, foldFor, purePlate, preorderFold) secConPlate :: Monoid b => (forall a. HasContents a => [a] -> b) -> ([Section] -> b) -> DLPlate (Constant b) secConPlate mCon mSec = preorderFold $ purePlate { refSec = Constant <$> \(RefProg c _) -> mCon [c], introSub = Constant <$> \case (IOrgSec _ _ s _) -> mSec [s] _ -> mempty, --gsdSec = Constant <$> \case -- (GSDProg _) -> mempty, gsdSub = Constant <$> \case (SysCntxt c) -> mCon c (UsrChars c) -> mCon c (SystCons c s) -> mCon c `mappend` mSec s, pdSec = Constant <$> \(PDProg _ s _) -> mSec s, pdSub = Constant <$> \case (TermsAndDefs _ _) -> mempty (PhySysDesc _ _ lc c) -> mCon [lc] `mappend` mCon c (Goals _ _) -> mempty, scsSub = Constant <$> \case (Constraints _ c) -> mCon [inDataConstTbl c] (CorrSolnPpties c cs) -> mCon [outDataConstTbl c] `mappend` mCon cs _ -> mempty, reqSub = Constant <$> \case (FReqsSub' _ c) -> mCon c (FReqsSub _ c) -> mCon c (NonFReqsSub _) -> mempty, offShelfSec = Constant <$> \(OffShelfSolnsProg c) -> mCon c, appendSec = Constant <$> \(AppndxProg c) -> mCon c } exprPlate :: DLPlate (Constant [Expr]) exprPlate = sentencePlate (concatMap sentToExp) `appendPlate` secConPlate (concatMap egetCon') (concatMap egetSec) `appendPlate` (preorderFold $ purePlate { scsSub = Constant <$> \case (TMs _ _ t) -> let r = concatMap (\x -> x ^. invariants ++ defExp (x ^. defined_quant ++ x ^. defined_fun) ++ r (x ^. valid_context)) in r t (DDs _ _ d _) -> map sy d ++ defExp d (GDs _ _ g _) -> expRel g (IMs _ _ i _) -> expRel i _ -> [], auxConsSec = Constant <$> \(AuxConsProg _ qdef) -> defExp qdef })where defExp :: DefiningExpr a => [a] -> [Expr] defExp = map (^. defnExpr) expRel :: ExprRelat a => [a] -> [Expr] expRel = map (^. relat) sentToExp :: Sentence -> [Expr] sentToExp ((:+:) s1 s2) = sentToExp s1 ++ sentToExp s2 sentToExp (E e) = [e] sentToExp _ = [] fmGetDocDesc :: DLPlate (Constant [a]) -> DocDesc -> [a] fmGetDocDesc p = concatMap (foldFor docSec p) egetDocDesc :: DocDesc -> [Expr] egetDocDesc = fmGetDocDesc exprPlate egetSec :: Section -> [Expr] egetSec (Section _ sc _ ) = concatMap egetSecCon sc egetSecCon :: SecCons -> [Expr] egetSecCon (Sub s) = egetSec s egetSecCon (Con c) = egetCon' c egetCon' :: HasContents a => a -> [Expr] egetCon' = egetCon . (^. accessContents) egetCon :: RawContent -> [Expr] egetCon (EqnBlock e) = [e] egetCon (Defini _ []) = [] egetCon (Defini dt (hd:tl)) = concatMap egetCon' (snd hd) ++ egetCon (Defini dt tl) egetCon _ = [] sentencePlate :: Monoid a => ([Sentence] -> a) -> DLPlate (Constant a) sentencePlate f = appendPlate (secConPlate (f . concatMap getCon') $ f . concatMap getSec) $ preorderFold $ purePlate { introSec = Constant . f <$> \(IntroProg s1 s2 _) -> [s1, s2], introSub = Constant . f <$> \case (IPurpose s) -> [s] (IScope s) -> [s] (IChar s1 s2 s3) -> concat [s1, s2, s3] (IOrgSec s1 _ _ s2) -> [s1, s2], stkSub = Constant . f <$> \case (Client _ s) -> [s] (Cstmr _) -> [], pdSec = Constant . f <$> \(PDProg s _ _) -> [s], pdSub = Constant . f <$> \case (TermsAndDefs Nothing cs) -> def cs (TermsAndDefs (Just s) cs) -> s : def cs (PhySysDesc _ s _ _) -> s (Goals s c) -> s ++ def c, scsSub = Constant . f <$> \case (Assumptions c) -> def c (TMs s _ t) -> let r = mappend s . concatMap (\x -> def (x ^. operations) ++ def (x ^. defined_quant) ++ notes [x] ++ r (x ^. valid_context)) in r t (DDs s _ d _) -> s ++ der d ++ notes d (GDs s _ d _) -> def d ++ s ++ der d ++ notes d (IMs s _ d _) -> s ++ der d ++ notes d (Constraints s _) -> [s] (CorrSolnPpties _ _) -> [], reqSub = Constant . f <$> \case (FReqsSub' c _) -> def c (FReqsSub c _) -> def c (NonFReqsSub c) -> def c, lcsSec = Constant . f <$> \(LCsProg c) -> def c, ucsSec = Constant . f <$> \(UCsProg c) -> def c, traceSec = Constant . f <$> \(TraceabilityProg progs) -> concatMap (\(TraceConfig _ ls s _ _) -> s : ls) progs, auxConsSec = Constant . f <$> \(AuxConsProg _ qdef) -> def qdef } where def :: Definition a => [a] -> [Sentence] def = map (^. defn) der :: HasDerivation a => [a] -> [Sentence] der = concatMap (getDerivSent . (^. derivations)) getDerivSent :: Maybe Derivation -> [Sentence] getDerivSent Nothing = [] getDerivSent (Just (Derivation h s)) = h : s notes :: HasAdditionalNotes a => [a] -> [Sentence] notes = concatMap (^. getNotes) getDocDesc :: DocDesc -> [Sentence] getDocDesc = fmGetDocDesc (sentencePlate id) getSec :: Section -> [Sentence] getSec (Section t sc _ ) = t : concatMap getSecCon sc getSecCon :: SecCons -> [Sentence] getSecCon (Sub s) = getSec s getSecCon (Con c) = getCon' c getCon' :: HasContents a => a -> [Sentence] getCon' = getCon . (^. accessContents) getCon :: RawContent -> [Sentence] getCon (Table s1 s2 t _) = isVar (s1, transpose s2) ++ [t] getCon (Paragraph s) = [s] getCon EqnBlock{} = [] getCon (DerivBlock h d) = h : concatMap getCon d getCon (Enumeration lst) = getLT lst getCon (Figure l _ _) = [l] getCon (Bib bref) = getBib bref getCon (Graph [(s1, s2)] _ _ l) = [s1, s2, l] getCon Graph{} = [] getCon (Defini _ []) = [] getCon (Defini dt (hd:fs)) = concatMap getCon' (snd hd) ++ getCon (Defini dt fs) -- This function is used in collecting sentence from table. -- Since only the table's first Column titled "Var" should be collected, -- this function is used to filter out only the first Column of Sentence. isVar :: ([Sentence], [[Sentence]]) -> [Sentence] isVar (S "Var" : _, hd1 : _) = hd1 isVar (_ : tl, _ : tl1) = isVar (tl, tl1) isVar ([], _) = [] isVar (_, []) = [] getBib :: (HasFields c) => [c] -> [Sentence] getBib a = map getField $ concatMap (^. getFields) a getField :: CiteField -> Sentence getField (Address s) = S s getField Author{} = EmptyS getField (BookTitle s) = S s getField Chapter{} = EmptyS getField Edition{} = EmptyS getField Editor{} = EmptyS getField HowPublished{} = EmptyS getField (Institution s) = S s getField (Journal s) = S s getField Month{} = EmptyS getField (Note s) = S s getField Number{} = EmptyS getField (Organization s) = S s getField Pages{} = EmptyS getField (Publisher s) = S s getField (School s) = S s getField (Series s) = S s getField (Title s) = S s getField (Type s) = S s getField Volume{} = EmptyS getField Year{} = EmptyS getLT :: ListType -> [Sentence] getLT (Bullet it) = concatMap (getIL . fst) it getLT (Numeric it) = concatMap (getIL . fst) it getLT (Simple lp) = concatMap getLP lp getLT (Desc lp) = concatMap getLP lp getLT (Definitions lp) = concatMap getLP lp getLP :: ListTuple -> [Sentence] getLP (t, it, _) = t : getIL it getIL :: ItemType -> [Sentence] getIL (Flat s) = [s] getIL (Nested h lt) = h : getLT lt ciPlate :: DLPlate (Constant [CI]) ciPlate = preorderFold $ purePlate { introSub = Constant <$> \case (IOrgSec _ ci _ _) -> [ci] _ -> [], stkSub = Constant <$> \case (Client ci _) -> [ci] (Cstmr ci) -> [ci], auxConsSec = Constant <$> \(AuxConsProg ci _) -> [ci] } ciGetDocDesc :: DocDesc -> [CI] ciGetDocDesc = fmGetDocDesc ciPlate
JacquesCarette/literate-scientific-software
code/drasil-docLang/Drasil/ExtractDocDesc.hs
Haskell
bsd-2-clause
7,905
module Permissions.Onping.InternalSpec (main, spec) where import Test.Hspec import Permissions.Onping.Internal import Database.Persist import Persist.Mongo.Settings import Data.Aeson import Control.Applicative import qualified Data.Yaml as Y import Data.Traversable import Data.Maybe main :: IO () main = do hspec $ spec spec :: Spec spec = do describe "getSuperUserList" $ do it "should pull a super user list from the supplied runDB with config.yml" $ do emdbc <- Y.decodeFileEither "config.yml" case emdbc of Left _ -> do print "error decoding config.yml" False `shouldBe` True Right mdbc -> do meusr <- runDBConf mdbc $ selectFirst [] [Asc UserId] let muid = entityKey <$> meusr rslt <- traverse getSuperUserList muid (isJust rslt) `shouldBe` True
smurphy8/onping-permissions
test/Permissions/Onping/InternalSpec.hs
Haskell
bsd-3-clause
920
{-# LANGUAGE MultiParamTypeClasses, FlexibleInstances, PatternGuards #-} {- Implements a proof state, some primitive tactics for manipulating proofs, and some high level commands for introducing new theorems, evaluation/checking inside the proof system, etc. --} module Idris.Core.ProofTerm(ProofTerm, Goal(..), mkProofTerm, getProofTerm, updateSolved, updateSolvedTerm, updateSolvedTerm', bound_in, bound_in_term, refocus, Hole, RunTactic', goal, atHole) where import Idris.Core.Typecheck import Idris.Core.Evaluate import Idris.Core.TT import Control.Monad.State.Strict import Data.List import Debug.Trace data TermPath = Top | AppL TermPath Term | AppR Term TermPath | InBind Name BinderPath Term | InScope Name (Binder Term) TermPath deriving Show data BinderPath = Binder (Binder TermPath) | LetT TermPath Term | LetV Term TermPath | GuessT TermPath Term | GuessV Term TermPath deriving Show replaceTop :: TermPath -> TermPath -> TermPath replaceTop p Top = p replaceTop p (AppL l t) = AppL (replaceTop p l) t replaceTop p (AppR t r) = AppR t (replaceTop p r) replaceTop p (InBind n bp sc) = InBind n (replaceTopB p bp) sc where replaceTopB p (Binder b) = Binder (fmap (replaceTop p) b) replaceTopB p (LetT t v) = LetT (replaceTop p t) v replaceTopB p (LetV t v) = LetV t (replaceTop p v) replaceTopB p (GuessT t v) = GuessT (replaceTop p t) v replaceTopB p (GuessV t v) = GuessV t (replaceTop p v) replaceTop p (InScope n b sc) = InScope n b (replaceTop p sc) rebuildTerm :: Term -> TermPath -> Term rebuildTerm tm Top = tm rebuildTerm tm (AppL p a) = App (rebuildTerm tm p) a rebuildTerm tm (AppR f p) = App f (rebuildTerm tm p) rebuildTerm tm (InScope n b p) = Bind n b (rebuildTerm tm p) rebuildTerm tm (InBind n bp sc) = Bind n (rebuildBinder tm bp) sc rebuildBinder :: Term -> BinderPath -> Binder Term rebuildBinder tm (Binder p) = fmap (rebuildTerm tm) p rebuildBinder tm (LetT p t) = Let (rebuildTerm tm p) t rebuildBinder tm (LetV v p) = Let v (rebuildTerm tm p) rebuildBinder tm (GuessT p t) = Guess (rebuildTerm tm p) t rebuildBinder tm (GuessV v p) = Guess v (rebuildTerm tm p) findHole :: Name -> Env -> Term -> Maybe (TermPath, Env, Term) findHole n env t = fh' env Top t where fh' env path tm@(Bind x h sc) | hole h && n == x = Just (path, env, tm) fh' env path (App f a) | Just (p, env', tm) <- fh' env path a = Just (AppR f p, env', tm) | Just (p, env', tm) <- fh' env path f = Just (AppL p a, env', tm) fh' env path (Bind x b sc) | Just (bp, env', tm) <- fhB env path b = Just (InBind x bp sc, env', tm) | Just (p, env', tm) <- fh' ((x,b):env) path sc = Just (InScope x b p, env', tm) fh' _ _ _ = Nothing fhB env path (Let t v) | Just (p, env', tm) <- fh' env path t = Just (LetT p v, env', tm) | Just (p, env', tm) <- fh' env path v = Just (LetV t p, env', tm) fhB env path (Guess t v) | Just (p, env', tm) <- fh' env path t = Just (GuessT p v, env', tm) | Just (p, env', tm) <- fh' env path v = Just (GuessV t p, env', tm) fhB env path b | Just (p, env', tm) <- fh' env path (binderTy b) = Just (Binder (fmap (\_ -> p) b), env', tm) fhB _ _ _ = Nothing data ProofTerm = PT { -- wholeterm :: Term, path :: TermPath, subterm_env :: Env, subterm :: Term, updates :: [(Name, Term)] } deriving Show type RunTactic' a = Context -> Env -> Term -> StateT a TC Term type Hole = Maybe Name -- Nothing = default hole, first in list in proof state refocus :: Hole -> ProofTerm -> ProofTerm refocus h t = let res = refocus' h t in res -- trace ("OLD: " ++ show t ++ "\n" ++ -- "REFOCUSSED " ++ show h ++ ": " ++ show res) res refocus' (Just n) pt@(PT path env tm ups) | Just (p', env', tm') <- findHole n env tm = PT (replaceTop p' path) env' tm' ups | Just (p', env', tm') <- findHole n [] (rebuildTerm tm (updateSolvedPath ups path)) = PT p' env' tm' [] | otherwise = pt refocus' _ pt = pt data Goal = GD { premises :: Env, goalType :: Binder Term } mkProofTerm :: Term -> ProofTerm mkProofTerm tm = PT Top [] tm [] getProofTerm :: ProofTerm -> Term getProofTerm (PT path _ sub ups) = rebuildTerm sub (updateSolvedPath ups path) same :: Eq a => Maybe a -> a -> Bool same Nothing n = True same (Just x) n = x == n hole :: Binder b -> Bool hole (Hole _) = True hole (Guess _ _) = True hole _ = False updateSolvedTerm :: [(Name, Term)] -> Term -> Term updateSolvedTerm xs x = fst $ updateSolvedTerm' xs x updateSolvedTerm' [] x = (x, False) updateSolvedTerm' xs x = -- updateSolved' xs x where -- This version below saves allocations, because it doesn't need to reallocate -- the term if there are no updates to do. -- The Bool is ugly, and probably 'Maybe' would be less ugly, but >>= is -- the wrong combinator. Feel free to tidy up as long as it's still as cheap :). updateSolved' xs x where updateSolved' [] x = (x, False) updateSolved' xs (Bind n (Hole ty) t) | Just v <- lookup n xs = case xs of [_] -> (subst n v t, True) -- some may be Vs! Can't assume -- explicit names _ -> let (t', _) = updateSolved' xs t in (subst n v t', True) updateSolved' xs tm@(Bind n b t) | otherwise = let (t', ut) = updateSolved' xs t (b', ub) = updateSolvedB' xs b in if ut || ub then (Bind n b' t', True) else (tm, False) updateSolved' xs t@(App f a) = let (f', uf) = updateSolved' xs f (a', ua) = updateSolved' xs a in if uf || ua then (App f' a', True) else (t, False) updateSolved' xs t@(P _ n@(MN _ _) _) | Just v <- lookup n xs = (v, True) updateSolved' xs t = (t, False) updateSolvedB' xs b@(Let t v) = let (t', ut) = updateSolved' xs t (v', uv) = updateSolved' xs v in if ut || uv then (Let t' v', True) else (b, False) updateSolvedB' xs b@(Guess t v) = let (t', ut) = updateSolved' xs t (v', uv) = updateSolved' xs v in if ut || uv then (Guess t' v', True) else (b, False) updateSolvedB' xs b = let (ty', u) = updateSolved' xs (binderTy b) in if u then (b { binderTy = ty' }, u) else (b, False) noneOf ns (P _ n _) | n `elem` ns = False noneOf ns (App f a) = noneOf ns a && noneOf ns f noneOf ns (Bind n (Hole ty) t) = n `notElem` ns && noneOf ns ty && noneOf ns t noneOf ns (Bind n b t) = noneOf ns t && noneOfB ns b where noneOfB ns (Let t v) = noneOf ns t && noneOf ns v noneOfB ns (Guess t v) = noneOf ns t && noneOf ns v noneOfB ns b = noneOf ns (binderTy b) noneOf ns _ = True updateEnv [] e = e updateEnv ns [] = [] updateEnv ns ((n, b) : env) = (n, fmap (updateSolvedTerm ns) b) : updateEnv ns env updateSolvedPath [] t = t updateSolvedPath ns Top = Top updateSolvedPath ns (AppL p r) = AppL (updateSolvedPath ns p) (updateSolvedTerm ns r) updateSolvedPath ns (AppR l p) = AppR (updateSolvedTerm ns l) (updateSolvedPath ns p) updateSolvedPath ns (InBind n b sc) = InBind n (updateSolvedPathB b) (updateSolvedTerm ns sc) where updateSolvedPathB (Binder b) = Binder (fmap (updateSolvedPath ns) b) updateSolvedPathB (LetT p v) = LetT (updateSolvedPath ns p) (updateSolvedTerm ns v) updateSolvedPathB (LetV v p) = LetV (updateSolvedTerm ns v) (updateSolvedPath ns p) updateSolvedPathB (GuessT p v) = GuessT (updateSolvedPath ns p) (updateSolvedTerm ns v) updateSolvedPathB (GuessV v p) = GuessV (updateSolvedTerm ns v) (updateSolvedPath ns p) updateSolvedPath ns (InScope n (Hole ty) t) | Just v <- lookup n ns = case ns of [_] -> updateSolvedPath [(n,v)] t _ -> updateSolvedPath ns $ updateSolvedPath [(n,v)] t updateSolvedPath ns (InScope n b sc) = InScope n (fmap (updateSolvedTerm ns) b) (updateSolvedPath ns sc) updateSolved :: [(Name, Term)] -> ProofTerm -> ProofTerm updateSolved xs pt@(PT path env sub ups) = PT path -- (updateSolvedPath xs path) (updateEnv xs (filter (\(n, t) -> n `notElem` map fst xs) env)) (updateSolvedTerm xs sub) (ups ++ xs) goal :: Hole -> ProofTerm -> TC Goal goal h pt@(PT path env sub ups) -- | OK ginf <- g env sub = return ginf | otherwise = g [] (rebuildTerm sub (updateSolvedPath ups path)) where g :: Env -> Term -> TC Goal g env (Bind n b@(Guess _ _) sc) | same h n = return $ GD env b | otherwise = gb env b `mplus` g ((n, b):env) sc g env (Bind n b sc) | hole b && same h n = return $ GD env b | otherwise = g ((n, b):env) sc `mplus` gb env b g env (App f a) = g env a `mplus` g env f g env t = fail "Can't find hole" gb env (Let t v) = g env v `mplus` g env t gb env (Guess t v) = g env v `mplus` g env t gb env t = g env (binderTy t) atHole :: Hole -> RunTactic' a -> Context -> Env -> ProofTerm -> StateT a TC (ProofTerm, Bool) atHole h f c e pt -- @(PT path env sub) = do let PT path env sub ups = refocus h pt (tm, u) <- atH f c env sub return (PT path env tm ups, u) -- if u then return (PT path env tm ups, u) -- else do let PT path env sub ups = refocus h pt -- (tm, u) <- atH f c env sub -- return (PT path env tm ups, u) where updated o = do o' <- o return (o', True) ulift2 f c env op a b = do (b', u) <- atH f c env b if u then return (op a b', True) else do (a', u) <- atH f c env a return (op a' b', u) -- Search the things most likely to contain the binding first! atH :: RunTactic' a -> Context -> Env -> Term -> StateT a TC (Term, Bool) atH f c env binder@(Bind n b@(Guess t v) sc) | same h n = updated (f c env binder) | otherwise = do -- binder first (b', u) <- ulift2 f c env Guess t v if u then return (Bind n b' sc, True) else do (sc', u) <- atH f c ((n, b) : env) sc return (Bind n b' sc', u) atH f c env binder@(Bind n b sc) | hole b && same h n = updated (f c env binder) | otherwise -- scope first = do (sc', u) <- atH f c ((n, b) : env) sc if u then return (Bind n b sc', True) else do (b', u) <- atHb f c env b return (Bind n b' sc', u) atH tac c env (App f a) = ulift2 tac c env App f a atH tac c env t = return (t, False) atHb f c env (Let t v) = ulift2 f c env Let t v atHb f c env (Guess t v) = ulift2 f c env Guess t v atHb f c env t = do (ty', u) <- atH f c env (binderTy t) return (t { binderTy = ty' }, u) bound_in :: ProofTerm -> [Name] bound_in (PT path _ tm ups) = bound_in_term (rebuildTerm tm (updateSolvedPath ups path)) bound_in_term :: Term -> [Name] bound_in_term (Bind n b sc) = n : bi b ++ bound_in_term sc where bi (Let t v) = bound_in_term t ++ bound_in_term v bi (Guess t v) = bound_in_term t ++ bound_in_term v bi b = bound_in_term (binderTy b) bound_in_term (App f a) = bound_in_term f ++ bound_in_term a bound_in_term _ = []
andyarvanitis/Idris-dev
src/Idris/Core/ProofTerm.hs
Haskell
bsd-3-clause
12,408
module Main (main) where import Control.Arrow import Crypto.Random import Data.Ratio import Crypto.Ed25519.Pure import Text.Read import Data.Thyme.Clock import System.IO import System.FilePath import qualified Data.Yaml as Y import qualified Data.Set as Set import Data.Map.Strict (Map) import qualified Data.Map.Strict as Map import Juno.Types configDir :: String configDir = "run/conf" nodes :: [NodeID] nodes = iterate (\n@(NodeID h p _) -> n {_port = p + 1, _fullAddr = "tcp://" ++ h ++ ":" ++ show (p+1)}) (NodeID "127.0.0.1" 10000 "tcp://127.0.0.1:10000") makeKeys :: CryptoRandomGen g => Int -> g -> [(PrivateKey,PublicKey)] makeKeys 0 _ = [] makeKeys n g = case generateKeyPair g of Left err -> error $ show err Right (p,priv,g') -> (p,priv) : makeKeys (n-1) g' keyMaps :: [(PrivateKey,PublicKey)] -> (Map NodeID PrivateKey, Map NodeID PublicKey) keyMaps ls = (Map.fromList $ zip nodes (fst <$> ls), Map.fromList $ zip nodes (snd <$> ls)) main :: IO () main = do putStrLn "Number of cluster nodes?" hFlush stdout mn <- fmap readMaybe getLine putStrLn "Number of client nodes?" hFlush stdout cn <- fmap readMaybe getLine putStrLn "Enable logging for Followers (True/False)?" hFlush stdout debugFollower <- fmap readMaybe getLine case (mn,cn,debugFollower) of (Just n,Just c,Just df)-> do g <- newGenIO :: IO SystemRandom let keyMaps' = keyMaps $ makeKeys (n+c) g let clientIds = take c $ drop n nodes let isAClient nid _ = Set.member nid (Set.fromList clientIds) let isNotAClient nid _ = not $ Set.member nid (Set.fromList clientIds) let clusterKeyMaps = (Map.filterWithKey isNotAClient *** Map.filterWithKey isNotAClient) keyMaps' let clientKeyMaps = (Map.filterWithKey isAClient *** Map.filterWithKey isAClient) keyMaps' let clusterConfs = createClusterConfig df clusterKeyMaps (snd clientKeyMaps) <$> take n nodes let clientConfs = createClientConfig df (snd clusterKeyMaps) clientKeyMaps <$> clientIds mapM_ (\c' -> Y.encodeFile (configDir </> show (_port $ _nodeId c') ++ "-cluster.yaml") c') clusterConfs mapM_ (\c' -> Y.encodeFile (configDir </> show (_port $ _nodeId c') ++ "-client.yaml") c') clientConfs _ -> putStrLn "Failed to read either input into a number, please try again" createClusterConfig :: Bool -> (Map NodeID PrivateKey, Map NodeID PublicKey) -> Map NodeID PublicKey -> NodeID -> Config createClusterConfig debugFollower (privMap, pubMap) clientPubMap nid = Config { _otherNodes = Set.delete nid $ Map.keysSet pubMap , _nodeId = nid , _publicKeys = pubMap , _clientPublicKeys = Map.union pubMap clientPubMap -- NOTE: [2016 04 26] all nodes are client (support API signing) , _myPrivateKey = privMap Map.! nid , _myPublicKey = pubMap Map.! nid , _electionTimeoutRange = (3000000,6000000) , _heartbeatTimeout = 1500000 -- seems like a while... , _batchTimeDelta = fromSeconds' (1%100) -- default to 10ms , _enableDebug = True , _clientTimeoutLimit = 50000 , _dontDebugFollower = not debugFollower , _apiPort = 8000 } createClientConfig :: Bool -> Map NodeID PublicKey -> (Map NodeID PrivateKey, Map NodeID PublicKey) -> NodeID -> Config createClientConfig debugFollower clusterPubMap (privMap, pubMap) nid = Config { _otherNodes = Map.keysSet clusterPubMap , _nodeId = nid , _publicKeys = clusterPubMap , _clientPublicKeys = pubMap , _myPrivateKey = privMap Map.! nid , _myPublicKey = pubMap Map.! nid , _electionTimeoutRange = (3000000,6000000) , _heartbeatTimeout = 1500000 , _batchTimeDelta = fromSeconds' (1%100) -- default to 10ms , _enableDebug = False , _clientTimeoutLimit = 50000 , _dontDebugFollower = not debugFollower , _apiPort = 8000 }
haroldcarr/juno
app/GenerateConfigFiles.hs
Haskell
bsd-3-clause
3,940
{-# LANGUAGE DeriveDataTypeable #-} module Internal.Types ( DTree (..) , DElemType (..) , DAttr (..) ) where import qualified Data.Text as T import Data.Data import qualified Data.Map as M -- DOM tree: either a text node or an element with some children data DTree = DText T.Text | DElem DElemType DAttr [DTree] deriving (Eq) instance Show DTree where show (DText t) = T.unpack t show (DElem et attr ch) = open ++ children ++ close where open = "<" ++ show et ++ printedAttrs ++ ">" printedAttrs = let pa = show attr in if pa == "" then "" else " " ++ pa close = "</" ++ show et ++ ">" children = concatMap show ch -- Supported HTML elements data DElemType = DHtml | DBody | DDiv | DH1 | DP deriving (Eq, Ord, Data, Typeable) instance Show DElemType where show et = case et of DHtml -> "html" DBody -> "body" DDiv -> "div" DH1 -> "h1" DP -> "p" -- HTML element attributes (key-value pairs) newtype DAttr = DAttr { attributes :: M.Map T.Text T.Text } deriving (Eq) instance Show DAttr where show (DAttr attr) = let fn (k, v) = T.unpack k ++ "=\"" ++ T.unpack v ++ "\"" in unwords . map fn $ M.toList attr
qnnguyen/howser
src/Internal/Types.hs
Haskell
bsd-3-clause
1,354
module Network.Email.Render where import Data.Monoid import Data.Maybe import Control.Monad import Data.Bits import qualified Data.Map as M import qualified Data.ByteString.Char8 as B import qualified Data.ByteString.Lazy.Char8 as BL import qualified Data.ByteString.Builder as BL import qualified Data.Text as T import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Builder as TB import qualified Data.Text.Lazy.Encoding as TL import qualified Data.ByteString.Base64.Lazy as B64 import Control.Monad.Catch import Network.Email.Header.Types import qualified Network.Email.Header.Read as H import qualified Network.Email.Header.Render as R import qualified Network.Email.Charset as C import Network.Email.Types mail :: Mail -> BL.ByteString mail = BL.toLazyByteString . mailPart where mailPart ml = hdrs <> BL.byteString "\r\n" <> body <> parts where hdrs = BL.lazyByteString $ TL.encodeUtf8 $ TB.toLazyText $ R.outputHeaders $ mailHeaders ml body = BL.lazyByteString $ mailBody ml parts = case H.boundary (mailHeaders ml) of Nothing -> mempty Just x -> let bnd = BL.byteString "\r\n--" <> BL.byteString x ps = mconcat $ map (\p -> bnd <> mailPart p) $ mailParts ml in ps <> bnd <> BL.byteString "--\r\n" -- | Encode binary body using headers, return 'Mail'. encodeBinary :: MonadThrow m => Headers -> BL.ByteString -> m Mail encodeBinary hdrs body = do enc <- encoder return SimpleMail { mailHeaders = hdrs , mailBody = BL.toLazyByteString $ enc body } where encoding = fromMaybe "8bit" $ H.contentTransferEncoding hdrs encoder = case encoding of "quoted-printable" -> return quotedPrintable "base64" -> return $ BL.lazyByteString . B64.encode "8bit" -> return BL.lazyByteString "7bit" -> return BL.lazyByteString _ -> throwM $ UnknownEncoding encoding quotedPrintable :: BL.ByteString -> BL.Builder quotedPrintable = mconcat . map qp . BL.unpack where qp c | isGood c = BL.char8 c | otherwise = BL.byteString "=" <> hex c isGood c = c /= '=' && (c == '\r' || c == '\n' || (c >= '\33' && c <= '\166')) hex (fromEnum -> c) = table (c `shiftR` 8) <> table (c .&. 0xFF) table n = BL.char8 $ "0123456789ABCDEF" `B.index` n -- | Encode text body using headers, return 'Mail'. encodeText :: MonadThrow m => Headers -> TL.Text -> m Mail encodeText hdrs body = do unless (typ == "text") $ throwM NotAText cs <- maybe (throwM $ UnknownCharset charset) return $ C.lookupCharset charset let bin = toCrlf $ C.fromUnicode cs $ TL.toStrict body encodeBinary hdrs bin where (MimeType typ _, pars) = fromMaybe (MimeType "text" "plain", M.empty) $ H.contentType hdrs charset = maybe "us-ascii" T.unpack $ M.lookup "charset" pars toCrlf :: B.ByteString -> BL.ByteString toCrlf = BL.toLazyByteString . mconcat . map conv . B.unpack where conv '\n' = BL.byteString "\r\n" conv a = BL.char8 a
abbradar/email
src/Network/Email/Render.hs
Haskell
bsd-3-clause
3,202
module Algebra.Structures.Group ( module Algebra.Structures.Monoid , Group(..) , (<->) ) where import Algebra.Structures.Monoid class Monoid a => Group a where neg :: a -> a (<->) :: Group a => a -> a -> a a <-> b = a <+> neg b infixl 6 <->
Alex128/abstract-math
src/Algebra/Structures/Group.hs
Haskell
bsd-3-clause
268
{-# LANGUAGE DeriveDataTypeable #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE TypeFamilies #-} {-# OPTIONS_GHC -fno-warn-unused-imports #-} {-# OPTIONS_GHC -fno-warn-unused-binds #-} {-# OPTIONS_GHC -fno-warn-unused-matches #-} -- Derived from AWS service descriptions, licensed under Apache 2.0. -- | -- Module : Network.AWS.S3.PutObjectACL -- Copyright : (c) 2013-2015 Brendan Hay -- License : Mozilla Public License, v. 2.0. -- Maintainer : Brendan Hay <brendan.g.hay@gmail.com> -- Stability : auto-generated -- Portability : non-portable (GHC extensions) -- -- uses the acl subresource to set the access control list (ACL) -- permissions for an object that already exists in a bucket -- -- /See:/ <http://docs.aws.amazon.com/AmazonS3/latest/API/PutObjectACL.html AWS API Reference> for PutObjectACL. module Mismi.S3.Patch.PutObjectACL ( -- * Creating a Request putObjectACL , PutObjectACL -- * Request Lenses , poaGrantReadACP , poaRequestPayer , poaGrantWriteACP , poaGrantRead , poaGrantFullControl , poaContentMD5 , poaAccessControlPolicy , poaGrantWrite , poaACL , poaBucket , poaKey -- * Destructuring the Response , putObjectACLResponse , PutObjectACLResponse -- * Response Lenses , poarsRequestCharged , poarsResponseStatus ) where import Control.Lens (Lens', lens) import Network.AWS.Prelude import Network.AWS.Request import Network.AWS.Response import Network.AWS.S3.Types -- | /See:/ 'putObjectACL' smart constructor. data PutObjectACL = PutObjectACL' { _poaGrantReadACP :: !(Maybe Text) , _poaRequestPayer :: !(Maybe RequestPayer) , _poaGrantWriteACP :: !(Maybe Text) , _poaGrantRead :: !(Maybe Text) , _poaGrantFullControl :: !(Maybe Text) , _poaContentMD5 :: !(Maybe Text) , _poaAccessControlPolicy :: !(Maybe AccessControlPolicy) , _poaGrantWrite :: !(Maybe Text) , _poaACL :: !(Maybe ObjectCannedACL) , _poaBucket :: !BucketName , _poaKey :: !ObjectKey } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'PutObjectACL' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'poaGrantReadACP' -- -- * 'poaRequestPayer' -- -- * 'poaGrantWriteACP' -- -- * 'poaGrantRead' -- -- * 'poaGrantFullControl' -- -- * 'poaContentMD5' -- -- * 'poaAccessControlPolicy' -- -- * 'poaGrantWrite' -- -- * 'poaACL' -- -- * 'poaBucket' -- -- * 'poaKey' putObjectACL :: BucketName -- ^ 'poaBucket' -> ObjectKey -- ^ 'poaKey' -> PutObjectACL putObjectACL pBucket_ pKey_ = PutObjectACL' { _poaGrantReadACP = Nothing , _poaRequestPayer = Nothing , _poaGrantWriteACP = Nothing , _poaGrantRead = Nothing , _poaGrantFullControl = Nothing , _poaContentMD5 = Nothing , _poaAccessControlPolicy = Nothing , _poaGrantWrite = Nothing , _poaACL = Nothing , _poaBucket = pBucket_ , _poaKey = pKey_ } -- | Allows grantee to read the bucket ACL. poaGrantReadACP :: Lens' PutObjectACL (Maybe Text) poaGrantReadACP = lens _poaGrantReadACP (\ s a -> s{_poaGrantReadACP = a}); -- | Undocumented member. poaRequestPayer :: Lens' PutObjectACL (Maybe RequestPayer) poaRequestPayer = lens _poaRequestPayer (\ s a -> s{_poaRequestPayer = a}); -- | Allows grantee to write the ACL for the applicable bucket. poaGrantWriteACP :: Lens' PutObjectACL (Maybe Text) poaGrantWriteACP = lens _poaGrantWriteACP (\ s a -> s{_poaGrantWriteACP = a}); -- | Allows grantee to list the objects in the bucket. poaGrantRead :: Lens' PutObjectACL (Maybe Text) poaGrantRead = lens _poaGrantRead (\ s a -> s{_poaGrantRead = a}); -- | Allows grantee the read, write, read ACP, and write ACP permissions on -- the bucket. poaGrantFullControl :: Lens' PutObjectACL (Maybe Text) poaGrantFullControl = lens _poaGrantFullControl (\ s a -> s{_poaGrantFullControl = a}); -- | Undocumented member. poaContentMD5 :: Lens' PutObjectACL (Maybe Text) poaContentMD5 = lens _poaContentMD5 (\ s a -> s{_poaContentMD5 = a}); -- | Undocumented member. poaAccessControlPolicy :: Lens' PutObjectACL (Maybe AccessControlPolicy) poaAccessControlPolicy = lens _poaAccessControlPolicy (\ s a -> s{_poaAccessControlPolicy = a}); -- | Allows grantee to create, overwrite, and delete any object in the -- bucket. poaGrantWrite :: Lens' PutObjectACL (Maybe Text) poaGrantWrite = lens _poaGrantWrite (\ s a -> s{_poaGrantWrite = a}); -- | The canned ACL to apply to the object. poaACL :: Lens' PutObjectACL (Maybe ObjectCannedACL) poaACL = lens _poaACL (\ s a -> s{_poaACL = a}); -- | Undocumented member. poaBucket :: Lens' PutObjectACL BucketName poaBucket = lens _poaBucket (\ s a -> s{_poaBucket = a}); -- | Undocumented member. poaKey :: Lens' PutObjectACL ObjectKey poaKey = lens _poaKey (\ s a -> s{_poaKey = a}); instance AWSRequest PutObjectACL where type Rs PutObjectACL = PutObjectACLResponse request = put s3 response = receiveEmpty (\ s h x -> PutObjectACLResponse' <$> (h .#? "x-amz-request-charged") <*> (pure (fromEnum s))) instance ToElement PutObjectACL where toElement = mkElement "{http://s3.amazonaws.com/doc/2006-03-01/}AccessControlPolicy" . _poaAccessControlPolicy instance ToHeaders PutObjectACL where toHeaders PutObjectACL'{..} = mconcat ["x-amz-grant-read-acp" =# _poaGrantReadACP, "x-amz-request-payer" =# _poaRequestPayer, "x-amz-grant-write-acp" =# _poaGrantWriteACP, "x-amz-grant-read" =# _poaGrantRead, "x-amz-grant-full-control" =# _poaGrantFullControl, "Content-MD5" =# _poaContentMD5, "x-amz-grant-write" =# _poaGrantWrite, "x-amz-acl" =# _poaACL] instance ToPath PutObjectACL where toPath PutObjectACL'{..} = mconcat ["/", toBS _poaBucket, "/", toBS _poaKey] instance ToQuery PutObjectACL where toQuery = const (mconcat ["acl"]) -- | /See:/ 'putObjectACLResponse' smart constructor. data PutObjectACLResponse = PutObjectACLResponse' { _poarsRequestCharged :: !(Maybe RequestCharged) , _poarsResponseStatus :: !Int } deriving (Eq,Read,Show,Data,Typeable,Generic) -- | Creates a value of 'PutObjectACLResponse' with the minimum fields required to make a request. -- -- Use one of the following lenses to modify other fields as desired: -- -- * 'poarsRequestCharged' -- -- * 'poarsResponseStatus' putObjectACLResponse :: Int -- ^ 'poarsResponseStatus' -> PutObjectACLResponse putObjectACLResponse pResponseStatus_ = PutObjectACLResponse' { _poarsRequestCharged = Nothing , _poarsResponseStatus = pResponseStatus_ } -- | Undocumented member. poarsRequestCharged :: Lens' PutObjectACLResponse (Maybe RequestCharged) poarsRequestCharged = lens _poarsRequestCharged (\ s a -> s{_poarsRequestCharged = a}); -- | The response status code. poarsResponseStatus :: Lens' PutObjectACLResponse Int poarsResponseStatus = lens _poarsResponseStatus (\ s a -> s{_poarsResponseStatus = a});
ambiata/mismi
mismi-s3/src/Mismi/S3/Patch/PutObjectACL.hs
Haskell
bsd-3-clause
7,476
{-# LANGUAGE CPP #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Ivory.Tower.AST.SyncChan where #if MIN_VERSION_mainland_pretty(0,6,0) import Text.PrettyPrint.Mainland.Class #endif import Text.PrettyPrint.Mainland import qualified Ivory.Language.Syntax.Type as I data SyncChan = SyncChan { sync_chan_label :: Integer , sync_chan_type :: I.Type } deriving (Eq, Show, Ord) instance Pretty SyncChan where ppr SyncChan{..} = integer sync_chan_label <+> "::" <+> text (show sync_chan_type)
GaloisInc/tower
tower/src/Ivory/Tower/AST/SyncChan.hs
Haskell
bsd-3-clause
543
import System.GlobalLock import Control.Concurrent.MVar import Criterion.Main main :: IO () main = do mVar <- newMVar () defaultMain [ bench "bare" $ (return () :: IO ()) , bench "MVar" $ withMVar mVar (const $ return ()) , bench "global" $ lock (return ()) ]
kmcallister/global-lock
test/bench.hs
Haskell
bsd-3-clause
298
module Chapter01 where import Control.Monad ( liftM2 ) import Control.Monad.Trans.Class ( lift ) import Control.Monad.Trans.State ( StateT, evalStateT, modify, gets ) import Data.List ( intersperse ) import Data.Maybe ( fromMaybe ) -------------------------------------------------------------------------------- type Id = String data BinOp = Plus | Minus | Times | Div deriving ( Show ) data Stm = CompoundStm Stm Stm | AssignStm Id Exp | PrintStm [Exp] deriving ( Show ) data Exp = IdExp Id | NumExp Int | OpExp Exp BinOp Exp | EseqExp Stm Exp deriving ( Show ) -- a := 5 + 3 ; b := ( print ( a , a - 1 ) , 10 * a ) ; print ( b ) prog :: Stm prog = CompoundStm (AssignStm "a" (OpExp (NumExp 5) Plus (NumExp 3))) (CompoundStm (AssignStm "b" (EseqExp (PrintStm [IdExp "a", OpExp (IdExp "a") Minus (NumExp 1)]) (OpExp (NumExp 10) Times (IdExp "a")))) (PrintStm [IdExp "b"])) -------------------------------------------------------------------------------- maxArgs :: Stm -> Int maxArgs (CompoundStm s1 s2) = maxArgs s1 `max` maxArgs s2 maxArgs (AssignStm _ e) = maxArgsExp e maxArgs (PrintStm es) = length es maxArgsExp :: Exp -> Int maxArgsExp (IdExp _) = 0 maxArgsExp (NumExp _) = 0 maxArgsExp (OpExp e1 _ e2) = maxArgsExp e1 `max` maxArgsExp e2 maxArgsExp (EseqExp s e) = maxArgs s `max` maxArgsExp e -------------------------------------------------------------------------------- -- Note: Changed to a more convenient argument order for some functions below. type Table = [(Id, Int)] emptyTable :: Table emptyTable = [] update :: Id -> Int -> Table -> Table update i v = ((i, v) :) lookUp :: Id -> Table -> Int lookUp i = fromMaybe (error ("unbound identifier " ++ i)) . lookup i -------------------------------------------------------------------------------- type Eval = StateT Table IO evalE :: Eval a -> IO a evalE = flip evalStateT emptyTable lookUpE :: Id -> Eval Int lookUpE = gets . lookUp updateE :: Id -> Int -> Eval () updateE i = modify . update i putStrLnE :: String -> Eval () putStrLnE = lift . putStrLn -------------------------------------------------------------------------------- interp :: Stm -> IO () interp = evalE . interpStm interpStm :: Stm -> Eval () interpStm (CompoundStm s1 s2) = interpStm s1 >> interpStm s2 interpStm (AssignStm i e) = interpExp e >>= updateE i interpStm (PrintStm es) = mapM interpExp es >>= putStrLnE . format where format = concat . intersperse " " . map show interpExp :: Exp -> Eval Int interpExp (IdExp i) = lookUpE i interpExp (NumExp v) = return v interpExp (OpExp e1 op e2) = liftM2 (funForOp op) (interpExp e1) (interpExp e2) interpExp (EseqExp s e) = interpStm s >> interpExp e funForOp :: BinOp -> Int -> Int -> Int funForOp Plus = (+) funForOp Minus = (-) funForOp Times = (*) funForOp Div = div -------------------------------------------------------------------------------- -- Exercise 1.1a type Key = String data Tree = Leaf | Node Tree Key Tree deriving ( Show ) empty :: Tree empty = Leaf insert :: Key -> Tree -> Tree insert key Leaf = Node Leaf key Leaf insert key (Node l k r) | key < k = Node (insert key l) k r | key > k = Node l k (insert key r) | otherwise = Node l key r member :: Key -> Tree -> Bool member _ Leaf = False member key (Node l k r) | key < k = member key l | key > k = member key r | otherwise = True -------------------------------------------------------------------------------- -- Exercise 1.1b data Tree' a = Leaf' | Node' (Tree' a) Key a (Tree' a) deriving ( Show ) empty' :: Tree' a empty' = Leaf' insert' :: Key -> a -> Tree' a -> Tree' a insert' key val Leaf' = Node' Leaf' key val Leaf' insert' key val (Node' l k v r) | key < k = Node' (insert' key val l) k v r | key > k = Node' l k v (insert' key val r) | otherwise = Node' l key v r lookup' :: Key -> Tree' a -> a lookup' key Leaf' = error (key ++ " not found") lookup' key (Node' l k v r) | key < k = lookup' key l | key > k = lookup' key r | otherwise = v -------------------------------------------------------------------------------- -- Exercise 1.1c -- (a): Basically a list hanging to the left: -- -- t -- / -- s -- / -- p -- / -- i -- / -- f -- / -- b -- -- (b): Basically a list hanging to the right: -- -- a -- \ -- b -- \ -- c -- \ -- d -- \ -- e -- \ -- f -- \ -- g -- \ -- h -- \ -- i -------------------------------------------------------------------------------- -- Exercise 1.1d -- -- One could use AVL trees or red-black trees.
svenpanne/tiger
chapter01/SLP.hs
Haskell
bsd-3-clause
4,884
{-# LANGUAGE CPP #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TupleSections #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE PatternGuards #-} {- | Module : Verifier.SAW.Term.Pretty Copyright : Galois, Inc. 2012-2015 License : BSD3 Maintainer : huffman@galois.com Stability : experimental Portability : non-portable (language extensions) -} module Verifier.SAW.Term.Pretty ( SawDoc , renderSawDoc , SawStyle(..) , PPOpts(..) , defaultPPOpts , depthPPOpts , ppNat , ppTerm , ppTermInCtx , showTerm , scPrettyTerm , scPrettyTermInCtx , ppTermDepth , ppTermWithNames , showTermWithNames , PPModule(..), PPDecl(..) , ppPPModule , scTermCount , OccurrenceMap , shouldMemoizeTerm , ppName ) where import Data.Char (intToDigit) import Data.Maybe (isJust) import Control.Monad.Reader import Control.Monad.State.Strict as State #if !MIN_VERSION_base(4,8,0) import Data.Foldable (Foldable) #endif import qualified Data.Foldable as Fold import qualified Data.Text as Text import qualified Data.Text.Lazy as Text.Lazy import qualified Data.Map as Map import qualified Data.Vector as V import Numeric (showIntAtBase) import Prettyprinter import Prettyprinter.Render.Terminal import Text.URI import Data.IntMap.Strict (IntMap) import qualified Data.IntMap.Strict as IntMap import Verifier.SAW.Name import Verifier.SAW.Term.Functor import Verifier.SAW.Utils (panic) import Verifier.SAW.Recognizer -------------------------------------------------------------------------------- -- * Doc annotations data SawStyle = PrimitiveStyle | ConstantStyle | ExtCnsStyle | LocalVarStyle | DataTypeStyle | CtorAppStyle | RecursorStyle | FieldNameStyle -- TODO: assign colors for more styles colorStyle :: SawStyle -> AnsiStyle colorStyle = \case PrimitiveStyle -> mempty ConstantStyle -> colorDull Blue ExtCnsStyle -> colorDull Red LocalVarStyle -> colorDull Green DataTypeStyle -> mempty CtorAppStyle -> mempty RecursorStyle -> mempty FieldNameStyle -> mempty type SawDoc = Doc SawStyle -------------------------------------------------------------------------------- -- * Pretty-Printing Options and Precedences -------------------------------------------------------------------------------- -- | Global options for pretty-printing data PPOpts = PPOpts { ppBase :: Int , ppColor :: Bool , ppShowLocalNames :: Bool , ppMaxDepth :: Maybe Int , ppMinSharing :: Int } -- | Default options for pretty-printing defaultPPOpts :: PPOpts defaultPPOpts = PPOpts { ppBase = 10, ppColor = False, ppShowLocalNames = True, ppMaxDepth = Nothing, ppMinSharing = 2 } -- | Options for printing with a maximum depth depthPPOpts :: Int -> PPOpts depthPPOpts max_d = defaultPPOpts { ppMaxDepth = Just max_d } -- | Test if a depth is "allowed", meaning not greater than the max depth depthAllowed :: PPOpts -> Int -> Bool depthAllowed (PPOpts { ppMaxDepth = Just max_d }) d = d < max_d depthAllowed _ _ = True -- | Precedence levels, each of which corresponds to a parsing nonterminal data Prec = PrecCommas -- ^ Nonterminal @sepBy(Term, \',\')@ | PrecTerm -- ^ Nonterminal @Term@ | PrecLambda -- ^ Nonterminal @LTerm@ | PrecProd -- ^ Nonterminal @ProdTerm@ | PrecApp -- ^ Nonterminal @AppTerm@ | PrecArg -- ^ Nonterminal @AtomTerm@ deriving (Eq, Ord) -- | Test if the first precedence "contains" the second, meaning that terms at -- the latter precedence level can be printed in the context of the former -- without parentheses. precContains :: Prec -> Prec -> Bool precContains x y = x <= y -- | Optionally print parentheses around a document, iff the incoming, outer -- precedence (listed first) contains (as in 'precContains') the required -- precedence (listed second) for printing the given document. -- -- Stated differently: @ppParensPrec p1 p2 d@ means we are pretty-printing in a -- term context that requires precedence @p1@, but @d@ was pretty-printed at -- precedence level @p2@. If @p1@ does not contain @p2@ (e.g., if @p1@ is -- 'PrecArg', meaning we are pretty-printing the argument of an application, and -- @p2@ is 'PrecLambda', meaning the construct we are pretty-printing is a -- lambda or pi abstraction) then add parentheses. ppParensPrec :: Prec -> Prec -> SawDoc -> SawDoc ppParensPrec p1 p2 d | precContains p1 p2 = d | otherwise = parens $ align d ---------------------------------------------------------------------- -- * Local Variable Namings ---------------------------------------------------------------------- -- | Local variable namings, which map each deBruijn index in scope to a unique -- string to be used to print it. This mapping is given by position in a list. newtype VarNaming = VarNaming [LocalName] -- | The empty local variable context emptyVarNaming :: VarNaming emptyVarNaming = VarNaming [] -- | Look up a string to use for a variable, if the first argument is 'True', or -- just print the variable number if the first argument is 'False' lookupVarName :: Bool -> VarNaming -> DeBruijnIndex -> LocalName lookupVarName True (VarNaming names) i | i >= length names = Text.pack ('!' : show (i - length names)) lookupVarName True (VarNaming names) i = names!!i lookupVarName False _ i = Text.pack ('!' : show i) -- | Generate a fresh name from a base name that does not clash with any names -- already in a given list, unless it is "_", in which case return it as is freshName :: [LocalName] -> LocalName -> LocalName freshName used name | name == "_" = name | elem name used = freshName used (name <> "'") | otherwise = name -- | Add a new variable with the given base name to the local variable list, -- returning both the fresh name actually used and the new variable list. As a -- special case, if the base name is "_", it is not modified. consVarNaming :: VarNaming -> LocalName -> (LocalName, VarNaming) consVarNaming (VarNaming names) name = let nm = freshName names name in (nm, VarNaming (nm : names)) -------------------------------------------------------------------------------- -- * Pretty-printing monad -------------------------------------------------------------------------------- -- | Memoization variables, which are like deBruijn index variables but for -- terms that we are memoizing during printing type MemoVar = Int -- | The local state used by pretty-printing computations data PPState = PPState { -- | The global pretty-printing options ppOpts :: PPOpts, -- | The current depth of printing ppDepth :: Int, -- | The current naming for the local variables ppNaming :: VarNaming, -- | The top-level naming environment ppNamingEnv :: SAWNamingEnv, -- | The next "memoization variable" to generate ppNextMemoVar :: MemoVar, -- | Memoization table for the global, closed terms, mapping term indices to -- "memoization variables" that are in scope ppGlobalMemoTable :: IntMap MemoVar, -- | Memoization table for terms at the current binding level, mapping term -- indices to "memoization variables" that are in scope ppLocalMemoTable :: IntMap MemoVar } emptyPPState :: PPOpts -> SAWNamingEnv -> PPState emptyPPState opts ne = PPState { ppOpts = opts, ppDepth = 0, ppNaming = emptyVarNaming, ppNamingEnv = ne, ppNextMemoVar = 1, ppGlobalMemoTable = IntMap.empty, ppLocalMemoTable = IntMap.empty } -- | The pretty-printing monad newtype PPM a = PPM (Reader PPState a) deriving (Functor, Applicative, Monad) -- | Run a pretty-printing computation in a top-level, empty context runPPM :: PPOpts -> SAWNamingEnv -> PPM a -> a runPPM opts ne (PPM m) = runReader m $ emptyPPState opts ne instance MonadReader PPState PPM where ask = PPM ask local f (PPM m) = PPM $ local f m -- | Look up the given local variable by deBruijn index to get its name varLookupM :: DeBruijnIndex -> PPM LocalName varLookupM idx = lookupVarName <$> (ppShowLocalNames <$> ppOpts <$> ask) <*> (ppNaming <$> ask) <*> return idx -- | Test if a given term index is memoized, returning its memoization variable -- if so and otherwise returning 'Nothing' memoLookupM :: TermIndex -> PPM (Maybe MemoVar) memoLookupM idx = do s <- ask return $ case (IntMap.lookup idx (ppGlobalMemoTable s), IntMap.lookup idx (ppLocalMemoTable s)) of (res@(Just _), _) -> res (_, res@(Just _)) -> res _ -> Nothing -- | Run a pretty-printing computation at the next greater depth, returning the -- default value if the max depth has been exceeded atNextDepthM :: a -> PPM a -> PPM a atNextDepthM dflt m = do s <- ask let new_depth = ppDepth s + 1 if depthAllowed (ppOpts s) new_depth then local (\_ -> s { ppDepth = new_depth }) m else return dflt -- | Run a pretty-printing computation in the context of a new bound variable, -- also erasing the local memoization table (which is no longer valid in an -- extended variable context) during that computation. Return the result of the -- computation and also the name that was actually used for the bound variable. withBoundVarM :: LocalName -> PPM a -> PPM (LocalName, a) withBoundVarM basename m = do st <- ask let (var, naming) = consVarNaming (ppNaming st) basename ret <- local (\_ -> st { ppNaming = naming, ppLocalMemoTable = IntMap.empty }) m return (var, ret) -- | Run a computation in the context of a fresh "memoization variable" that is -- bound to the given term index, passing the new memoization variable to the -- computation. If the flag is true, use the global table, otherwise use the -- local table. withMemoVar :: Bool -> TermIndex -> (MemoVar -> PPM a) -> PPM a withMemoVar global_p idx f = do memo_var <- ppNextMemoVar <$> ask local (\s -> add_to_table global_p memo_var s) (f memo_var) where add_to_table True v st = st { ppNextMemoVar = v + 1, ppGlobalMemoTable = IntMap.insert idx v (ppGlobalMemoTable st) } add_to_table False v st = st { ppNextMemoVar = v + 1, ppLocalMemoTable = IntMap.insert idx v (ppLocalMemoTable st) } -------------------------------------------------------------------------------- -- * The Pretty-Printing of Specific Constructs -------------------------------------------------------------------------------- -- | Pretty-print an identifier ppIdent :: Ident -> SawDoc ppIdent = viaShow -- | Pretty-print an integer in the correct base ppNat :: PPOpts -> Integer -> SawDoc ppNat (PPOpts{..}) i | ppBase > 36 = pretty i | otherwise = prefix <> pretty value where prefix = case ppBase of 2 -> "0b" 8 -> "0o" 10 -> mempty 16 -> "0x" _ -> "0" <> pretty '<' <> pretty ppBase <> pretty '>' value = showIntAtBase (toInteger ppBase) (digits !!) i "" digits = "0123456789abcdefghijklmnopqrstuvwxyz" -- | Pretty-print a memoization variable ppMemoVar :: MemoVar -> SawDoc ppMemoVar mv = "x@" <> pretty mv -- | Pretty-print a type constraint (also known as an ascription) @x : tp@ ppTypeConstraint :: SawDoc -> SawDoc -> SawDoc ppTypeConstraint x tp = hang 2 $ group $ vsep [annotate LocalVarStyle x, ":" <+> tp] -- | Pretty-print an application to 0 or more arguments at the given precedence ppAppList :: Prec -> SawDoc -> [SawDoc] -> SawDoc ppAppList _ f [] = f ppAppList p f args = ppParensPrec p PrecApp $ group $ hang 2 $ vsep (f : args) -- | Pretty-print "let x1 = t1 ... xn = tn in body" ppLetBlock :: [(MemoVar, SawDoc)] -> SawDoc -> SawDoc ppLetBlock defs body = vcat [ "let" <+> lbrace <+> align (vcat (map ppEqn defs)) , indent 4 rbrace , " in" <+> body ] where ppEqn (var,d) = ppMemoVar var <+> pretty '=' <+> d -- | Pretty-print pairs as "(x, y)" ppPair :: Prec -> SawDoc -> SawDoc -> SawDoc ppPair prec x y = ppParensPrec prec PrecCommas (group (vcat [x <> pretty ',', y])) -- | Pretty-print pair types as "x * y" ppPairType :: Prec -> SawDoc -> SawDoc -> SawDoc ppPairType prec x y = ppParensPrec prec PrecProd (x <+> pretty '*' <+> y) -- | Pretty-print records (if the flag is 'False') or record types (if the flag -- is 'True'), where the latter are preceded by the string @#@, either as: -- -- * @(val1, val2, .., valn)@, if the record represents a tuple; OR -- -- * @{ fld1 op val1, ..., fldn op valn }@ otherwise, where @op@ is @::@ for -- types and @=@ for values. ppRecord :: Bool -> [(FieldName, SawDoc)] -> SawDoc ppRecord type_p alist = (if type_p then (pretty '#' <>) else id) $ encloseSep lbrace rbrace comma $ map ppField alist where ppField (fld, rhs) = group (nest 2 (vsep [pretty fld <+> op_str, rhs])) op_str = if type_p then ":" else "=" -- | Pretty-print a projection / selector "x.f" ppProj :: FieldName -> SawDoc -> SawDoc ppProj sel doc = doc <> pretty '.' <> pretty sel -- | Pretty-print an array value @[v1, ..., vn]@ ppArrayValue :: [SawDoc] -> SawDoc ppArrayValue = list -- | Pretty-print a lambda abstraction as @\(x :: tp) -> body@, where the -- variable name to use for @x@ is bundled with @body@ ppLambda :: SawDoc -> (LocalName, SawDoc) -> SawDoc ppLambda tp (name, body) = group $ hang 2 $ vsep ["\\" <> parens (ppTypeConstraint (pretty name) tp) <+> "->", body] -- | Pretty-print a pi abstraction as @(x :: tp) -> body@, or as @tp -> body@ if -- @x == "_"@ ppPi :: SawDoc -> (LocalName, SawDoc) -> SawDoc ppPi tp (name, body) = vsep [lhs, "->" <+> body] where lhs = if name == "_" then tp else parens (ppTypeConstraint (pretty name) tp) -- | Pretty-print a definition @d :: tp = body@ ppDef :: SawDoc -> SawDoc -> Maybe SawDoc -> SawDoc ppDef d tp Nothing = ppTypeConstraint d tp ppDef d tp (Just body) = ppTypeConstraint d tp <+> equals <+> body -- | Pretty-print a datatype declaration of the form -- > data d (p1:tp1) .. (pN:tpN) : tp where { -- > c1 (x1_1:tp1_1) .. (x1_N:tp1_N) : tp1 -- > ... -- > } ppDataType :: Ident -> (SawDoc, ((SawDoc, SawDoc), [SawDoc])) -> SawDoc ppDataType d (params, ((d_ctx,d_tp), ctors)) = group $ vcat [ vsep [ (group . vsep) [ "data" <+> ppIdent d <+> params <+> ":" <+> (d_ctx <+> "->" <+> d_tp) , "where" <+> lbrace ] , vcat (map (<> semi) ctors) ] , rbrace ] -------------------------------------------------------------------------------- -- * Pretty-Printing Terms -------------------------------------------------------------------------------- -- | Pretty-print a built-in atomic construct ppFlatTermF :: Prec -> FlatTermF Term -> PPM SawDoc ppFlatTermF prec tf = case tf of Primitive ec -> annotate PrimitiveStyle <$> ppBestName (ModuleIdentifier (primName ec)) UnitValue -> return "(-empty-)" UnitType -> return "#(-empty-)" PairValue x y -> ppPair prec <$> ppTerm' PrecTerm x <*> ppTerm' PrecCommas y PairType x y -> ppPairType prec <$> ppTerm' PrecApp x <*> ppTerm' PrecProd y PairLeft t -> ppProj "1" <$> ppTerm' PrecArg t PairRight t -> ppProj "2" <$> ppTerm' PrecArg t RecursorType d params motive _motiveTy -> do params_pp <- mapM (ppTerm' PrecArg) params motive_pp <- ppTerm' PrecArg motive nm <- ppBestName (ModuleIdentifier (primName d)) return $ ppAppList prec (annotate RecursorStyle (nm <> "#recType")) (params_pp ++ [motive_pp]) Recursor (CompiledRecursor d params motive _motiveTy cs_fs ctorOrder) -> do params_pp <- mapM (ppTerm' PrecArg) params motive_pp <- ppTerm' PrecArg motive fs_pp <- traverse (ppTerm' PrecTerm . fst) cs_fs nm <- ppBestName (ModuleIdentifier (primName d)) f_pps <- forM ctorOrder $ \ec -> do cnm <- ppBestName (ModuleIdentifier (primName ec)) case Map.lookup (primVarIndex ec) fs_pp of Just f_pp -> pure $ vsep [cnm, "=>", f_pp] Nothing -> panic "ppFlatTerm" ["missing constructor", show cnm] return $ ppAppList prec (annotate RecursorStyle (nm <> "#rec")) (params_pp ++ [motive_pp, tupled f_pps]) RecursorApp r ixs arg -> do rec_pp <- ppTerm' PrecApp r ixs_pp <- mapM (ppTerm' PrecArg) ixs arg_pp <- ppTerm' PrecArg arg return $ ppAppList prec rec_pp (ixs_pp ++ [arg_pp]) CtorApp c params args -> do cnm <- ppBestName (ModuleIdentifier (primName c)) ppAppList prec (annotate CtorAppStyle cnm) <$> mapM (ppTerm' PrecArg) (params ++ args) DataTypeApp dt params args -> do dnm <- ppBestName (ModuleIdentifier (primName dt)) ppAppList prec (annotate DataTypeStyle dnm) <$> mapM (ppTerm' PrecArg) (params ++ args) RecordType alist -> ppRecord True <$> mapM (\(fld,t) -> (fld,) <$> ppTerm' PrecTerm t) alist RecordValue alist -> ppRecord False <$> mapM (\(fld,t) -> (fld,) <$> ppTerm' PrecTerm t) alist RecordProj e fld -> ppProj fld <$> ppTerm' PrecArg e Sort s h -> return ((if h then pretty ("i"::String) else mempty) <> viaShow s) NatLit i -> ppNat <$> (ppOpts <$> ask) <*> return (toInteger i) ArrayValue (asBoolType -> Just _) args | Just bits <- mapM asBool $ V.toList args -> if length bits `mod` 4 == 0 then return $ pretty ("0x" ++ ppBitsToHex bits) else return $ pretty ("0b" ++ map (\b -> if b then '1' else '0') bits) ArrayValue _ args -> ppArrayValue <$> mapM (ppTerm' PrecTerm) (V.toList args) StringLit s -> return $ viaShow s ExtCns cns -> annotate ExtCnsStyle <$> ppBestName (ecName cns) -- | Pretty-print a big endian list of bit values as a hexadecimal number ppBitsToHex :: [Bool] -> String ppBitsToHex (b8:b4:b2:b1:bits') = intToDigit (8 * toInt b8 + 4 * toInt b4 + 2 * toInt b2 + toInt b1) : ppBitsToHex bits' where toInt True = 1 toInt False = 0 ppBitsToHex [] = "" ppBitsToHex bits = panic "ppBitsToHex" ["length of bit list is not a multiple of 4", show bits] -- | Pretty-print a name, using the best unambiguous alias from the -- naming environment. ppBestName :: NameInfo -> PPM SawDoc ppBestName ni = do ne <- asks ppNamingEnv case bestAlias ne ni of Left _ -> pure $ ppName ni Right alias -> pure $ pretty alias ppName :: NameInfo -> SawDoc ppName (ModuleIdentifier i) = ppIdent i ppName (ImportedName absName _) = pretty (render absName) -- | Pretty-print a non-shared term ppTermF :: Prec -> TermF Term -> PPM SawDoc ppTermF prec (FTermF ftf) = ppFlatTermF prec ftf ppTermF prec (App e1 e2) = ppAppList prec <$> ppTerm' PrecApp e1 <*> mapM (ppTerm' PrecArg) [e2] ppTermF prec (Lambda x tp body) = ppParensPrec prec PrecLambda <$> (ppLambda <$> ppTerm' PrecApp tp <*> ppTermInBinder PrecLambda x body) ppTermF prec (Pi x tp body) = ppParensPrec prec PrecLambda <$> (ppPi <$> ppTerm' PrecApp tp <*> ppTermInBinder PrecLambda x body) ppTermF _ (LocalVar x) = annotate LocalVarStyle <$> pretty <$> varLookupM x ppTermF _ (Constant ec _) = annotate ConstantStyle <$> ppBestName (ecName ec) -- | Internal function to recursively pretty-print a term ppTerm' :: Prec -> Term -> PPM SawDoc ppTerm' prec = atNextDepthM "..." . ppTerm'' where ppTerm'' (Unshared tf) = ppTermF prec tf ppTerm'' (STApp {stAppIndex = idx, stAppTermF = tf}) = do maybe_memo_var <- memoLookupM idx case maybe_memo_var of Just memo_var -> return $ ppMemoVar memo_var Nothing -> ppTermF prec tf -------------------------------------------------------------------------------- -- * Memoization Tables and Dealing with Binders in Terms -------------------------------------------------------------------------------- -- | An occurrence map maps each shared term index to its term and how many -- times that term occurred type OccurrenceMap = IntMap (Term, Int) -- | Returns map that associates each term index appearing in the term to the -- number of occurrences in the shared term. Subterms that are on the left-hand -- side of an application are excluded. (FIXME: why?) The boolean flag indicates -- whether to descend under lambdas and other binders. scTermCount :: Bool -> Term -> OccurrenceMap scTermCount doBinders t0 = execState (go [t0]) IntMap.empty where go :: [Term] -> State OccurrenceMap () go [] = return () go (t:r) = case t of Unshared _ -> recurse STApp{ stAppIndex = i } -> do m <- get case IntMap.lookup i m of Just (_, n) -> do put $ n `seq` IntMap.insert i (t, n+1) m go r Nothing -> do put (IntMap.insert i (t, 1) m) recurse where recurse = go (r ++ argsAndSubterms t) argsAndSubterms (unwrapTermF -> App f arg) = arg : argsAndSubterms f argsAndSubterms h = case unwrapTermF h of Lambda _ t1 _ | not doBinders -> [t1] Pi _ t1 _ | not doBinders -> [t1] Constant{} -> [] FTermF (Primitive _) -> [] FTermF (DataTypeApp _ ps xs) -> ps ++ xs FTermF (CtorApp _ ps xs) -> ps ++ xs FTermF (RecursorType _ ps m _) -> ps ++ [m] FTermF (Recursor crec) -> recursorParams crec ++ [recursorMotive crec] ++ map fst (Map.elems (recursorElims crec)) tf -> Fold.toList tf -- | Return true if the printing of the given term should be memoized; we do not -- want to memoize the printing of terms that are "too small" shouldMemoizeTerm :: Term -> Bool shouldMemoizeTerm t = case unwrapTermF t of FTermF Primitive{} -> False FTermF UnitValue -> False FTermF UnitType -> False FTermF (CtorApp _ [] []) -> False FTermF (DataTypeApp _ [] []) -> False FTermF Sort{} -> False FTermF NatLit{} -> False FTermF (ArrayValue _ v) | V.length v == 0 -> False FTermF StringLit{} -> False FTermF ExtCns{} -> False LocalVar{} -> False _ -> True -- | Compute a memoization table for a term, and pretty-print the term using the -- table to memoize the printing. Also print the table itself as a sequence of -- let-bindings for the entries in the memoization table. If the flag is true, -- compute a global table, otherwise compute a local table. ppTermWithMemoTable :: Prec -> Bool -> Term -> PPM SawDoc ppTermWithMemoTable prec global_p trm = do min_occs <- ppMinSharing <$> ppOpts <$> ask ppLets (occ_map_elems min_occs) [] where -- Generate an occurrence map for trm, filtering out terms that only occur -- once, that are "too small" to memoize, and, for the global table, terms -- that are not closed occ_map_elems min_occs = IntMap.assocs $ IntMap.filter (\(t,cnt) -> cnt >= min_occs && shouldMemoizeTerm t && (if global_p then looseVars t == emptyBitSet else True)) $ scTermCount global_p trm -- For each (TermIndex, Term) pair in the occurrence map, pretty-print the -- Term and then add it to the memoization table of subsequent printing. The -- pretty-printing of these terms is reverse-accumulated in the second -- list. Finally, print trm with a let-binding for the bound terms. ppLets :: [(TermIndex, (Term, Int))] -> [(MemoVar, SawDoc)] -> PPM SawDoc -- Special case: don't print let-binding if there are no bound vars ppLets [] [] = ppTerm' prec trm -- When we have run out of (idx,term) pairs, pretty-print a let binding for -- all the accumulated bindings around the term ppLets [] bindings = ppLetBlock (reverse bindings) <$> ppTerm' prec trm -- To add an (idx,term) pair, first check if idx is already bound, and, if -- not, add a new MemoVar bind it to idx ppLets ((idx, (t_rhs,_)):idxs) bindings = do isBound <- isJust <$> memoLookupM idx if isBound then ppLets idxs bindings else do doc_rhs <- ppTerm' prec t_rhs withMemoVar global_p idx $ \memo_var -> ppLets idxs ((memo_var, doc_rhs):bindings) -- | Pretty-print a term inside a binder for a variable of the given name, -- returning both the result of pretty-printing and the fresh name actually used -- for the newly bound variable. If the variable occurs in the term, then do not -- use an underscore for it, and use "_x" instead. -- -- Also, pretty-print let-bindings around the term for all subterms that occur -- more than once at the same binding level. ppTermInBinder :: Prec -> LocalName -> Term -> PPM (LocalName, SawDoc) ppTermInBinder prec basename trm = let nm = if basename == "_" && inBitSet 0 (looseVars trm) then "_x" else basename in withBoundVarM nm $ ppTermWithMemoTable prec False trm -- | Run a pretty-printing computation inside a context that binds zero or more -- variables, returning the result of the computation and also the -- pretty-printing of the context. Note: we do not use any local memoization -- tables for the inner computation; the justification is that this function is -- only used for printing datatypes, which we assume are not very big. ppWithBoundCtx :: [(LocalName, Term)] -> PPM a -> PPM (SawDoc, a) ppWithBoundCtx [] m = (mempty ,) <$> m ppWithBoundCtx ((x,tp):ctx) m = (\tp_d (x', (ctx_d, ret)) -> (parens (ppTypeConstraint (pretty x') tp_d) <+> ctx_d, ret)) <$> ppTerm' PrecTerm tp <*> withBoundVarM x (ppWithBoundCtx ctx m) -- | Pretty-print a term, also adding let-bindings for all subterms that occur -- more than once at the same binding level ppTerm :: PPOpts -> Term -> SawDoc ppTerm opts = ppTermWithNames opts emptySAWNamingEnv -- | Pretty-print a term, but only to a maximum depth ppTermDepth :: Int -> Term -> SawDoc ppTermDepth depth t = ppTerm (depthPPOpts depth) t -- | Like 'ppTerm', but also supply a context of bound names, where the most -- recently-bound variable is listed first in the context ppTermInCtx :: PPOpts -> [LocalName] -> Term -> SawDoc ppTermInCtx opts ctx trm = runPPM opts emptySAWNamingEnv $ flip (Fold.foldl' (\m x -> snd <$> withBoundVarM x m)) ctx $ ppTermWithMemoTable PrecTerm True trm renderSawDoc :: PPOpts -> SawDoc -> String renderSawDoc ppOpts doc = Text.Lazy.unpack (renderLazy (style (layoutPretty layoutOpts doc))) where layoutOpts = LayoutOptions (AvailablePerLine 80 0.8) style = if ppColor ppOpts then reAnnotateS colorStyle else unAnnotateS -- | Pretty-print a term and render it to a string, using the given options scPrettyTerm :: PPOpts -> Term -> String scPrettyTerm opts t = renderSawDoc opts $ ppTerm opts t -- | Like 'scPrettyTerm', but also supply a context of bound names, where the -- most recently-bound variable is listed first in the context scPrettyTermInCtx :: PPOpts -> [LocalName] -> Term -> String scPrettyTermInCtx opts ctx trm = renderSawDoc opts $ runPPM opts emptySAWNamingEnv $ flip (Fold.foldl' (\m x -> snd <$> withBoundVarM x m)) ctx $ ppTermWithMemoTable PrecTerm False trm -- | Pretty-print a term and render it to a string showTerm :: Term -> String showTerm t = scPrettyTerm defaultPPOpts t -------------------------------------------------------------------------------- -- * Pretty-printers with naming environments -------------------------------------------------------------------------------- -- | Pretty-print a term, also adding let-bindings for all subterms that occur -- more than once at the same binding level ppTermWithNames :: PPOpts -> SAWNamingEnv -> Term -> SawDoc ppTermWithNames opts ne trm = runPPM opts ne $ ppTermWithMemoTable PrecTerm True trm showTermWithNames :: PPOpts -> SAWNamingEnv -> Term -> String showTermWithNames opts ne trm = renderSawDoc opts $ ppTermWithNames opts ne trm -------------------------------------------------------------------------------- -- * Pretty-printers for Modules and Top-level Constructs -------------------------------------------------------------------------------- -- | Datatype for representing modules in pretty-printer land. We do not want to -- make the pretty-printer dependent on @Verifier.SAW.Module@, so we instead -- have that module translate to this representation. data PPModule = PPModule [ModuleName] [PPDecl] data PPDecl = PPTypeDecl Ident [(LocalName, Term)] [(LocalName, Term)] Sort [(Ident, Term)] | PPDefDecl Ident Term (Maybe Term) | PPInjectCode Text.Text Text.Text -- | Pretty-print a 'PPModule' ppPPModule :: PPOpts -> PPModule -> SawDoc ppPPModule opts (PPModule importNames decls) = vcat $ concat $ fmap (map (<> line)) $ [ map ppImport importNames , map (runPPM opts emptySAWNamingEnv . ppDecl) decls ] where ppImport nm = pretty $ "import " ++ show nm ppDecl (PPTypeDecl dtName dtParams dtIndices dtSort dtCtors) = ppDataType dtName <$> ppWithBoundCtx dtParams ((,) <$> ppWithBoundCtx dtIndices (return $ viaShow dtSort) <*> mapM (\(ctorName,ctorType) -> ppTypeConstraint (ppIdent ctorName) <$> ppTerm' PrecTerm ctorType) dtCtors) ppDecl (PPDefDecl defIdent defType defBody) = ppDef (ppIdent defIdent) <$> ppTerm' PrecTerm defType <*> case defBody of Just body -> Just <$> ppTerm' PrecTerm body Nothing -> return Nothing ppDecl (PPInjectCode ns text) = pure (pretty ("injectCode"::Text.Text) <+> viaShow ns <+> viaShow text)
GaloisInc/saw-script
saw-core/src/Verifier/SAW/Term/Pretty.hs
Haskell
bsd-3-clause
29,774
{-# LANGUAGE OverloadedStrings #-} module Main where import Imo.App import Network.Wai.Middleware.Gzip (gzip, def) import Network.Wai.Handler.Warp (run, Port) import System.Environment (getEnvironment) import Control.Monad (liftM) main :: IO () main = do port <- getPort run port $ gzip def imoApp getPort :: IO Port getPort = liftM getPort' getEnvironment where getPort' = maybe defaultPort read . lookup "PORT" defaultPort :: Port defaultPort = 3000
IMOKURI/wai-example-using-buildpack-stack
src/Main.hs
Haskell
bsd-3-clause
468
{-| Module : Data.Number.MPFR.Assignment Description : wrappers for assignment functions Copyright : (c) Aleš Bizjak License : BSD3 Maintainer : ales.bizjak0@gmail.com Stability : experimental Portability : non-portable Conversion from basic Haskell types to MPFR. See <http://www.mpfr.org/mpfr-current/mpfr.html#Assignment-Functions> for documentation on particular functions. -} {-# INCLUDE <mpfr.h> #-} {-# INCLUDE <chsmpfr.h> #-} module Data.Number.MPFR.Assignment where import Data.Number.MPFR.Internal import Data.Number.MPFR.Arithmetic set :: RoundMode -> Precision -> MPFR -> MPFR set r p = fst . set_ r p set_ :: RoundMode -> Precision -> MPFR -> (MPFR, Int) set_ r p mp1 = unsafePerformIO go where go = withDummy p $ \p1 -> with mp1 $ \p2 -> mpfr_set p1 p2 ((fromIntegral . fromEnum) r) fromWord :: RoundMode -> Precision -> Word -> MPFR fromWord r p = fst . fromWord_ r p fromInt :: RoundMode -> Precision -> Int -> MPFR fromInt r p = fst . fromInt_ r p fromDouble :: RoundMode -> Precision -> Double -> MPFR fromDouble r p = fst . fromDouble_ r p fromWord_ :: RoundMode -> Precision -> Word -> (MPFR, Int) fromWord_ r p d = unsafePerformIO go where go = withDummy p $ \p1 -> mpfr_set_ui p1 (fromIntegral d) ((fromIntegral . fromEnum) r) fromInt_ :: RoundMode -> Precision -> Int -> (MPFR, Int) fromInt_ r p d = unsafePerformIO go where go = withDummy p $ \p1 -> mpfr_set_si p1 (fromIntegral d) ((fromIntegral . fromEnum) r) fromDouble_ :: RoundMode -> Precision -> Double -> (MPFR, Int) fromDouble_ r p d = unsafePerformIO go where go = withDummy p $ \p1 -> mpfr_set_d p1 (realToFrac d) ((fromIntegral . fromEnum) r) -- | x * 2 ^ y int2w :: RoundMode -> Precision -> Word -> Int -> MPFR int2w r p i = fst . int2w_ r p i -- | x * 2 ^ y int2i :: RoundMode -> Precision -> Int -> Int -> MPFR int2i r p i = fst . int2i_ r p i int2w_ :: RoundMode -> Precision -> Word -> Int -> (MPFR, Int) int2w_ r p i e = unsafePerformIO go where go = withDummy p $ \p1 -> mpfr_set_ui_2exp p1 (fromIntegral i) (fromIntegral e) ((fromIntegral . fromEnum) r) int2i_ :: RoundMode -> Precision -> Int -> Int -> (MPFR, Int) int2i_ r p i e = unsafePerformIO go where go = withDummy p $ \p1 -> mpfr_set_si_2exp p1 (fromIntegral i) (fromIntegral e) ((fromIntegral . fromEnum) r) stringToMPFR :: RoundMode -> Precision -> Word -- ^ Base -> String -> MPFR stringToMPFR r p b = fst . stringToMPFR_ r p b stringToMPFR_ :: RoundMode -> Precision -> Word -- ^ Base -> String -> (MPFR, Int) stringToMPFR_ r p b d = unsafePerformIO go where go = withDummy p $ \p1 -> withCString d $ \p2 -> mpfr_set_str p1 p2 (fromIntegral b) ((fromIntegral . fromEnum) r) strtofr :: RoundMode -> Precision -> Word -- ^ base -> String -> (MPFR, String) strtofr r p b d = case strtofr_ r p b d of (a, b', _) -> (a,b') strtofr_ :: RoundMode -> Precision -> Word -- ^ base -> String -> (MPFR, String, Int) strtofr_ r p b d = unsafePerformIO go where go = do ls <- mpfr_custom_get_size (fromIntegral p) fp <- mallocForeignPtrBytes (fromIntegral ls) alloca $ \p1 -> do pokeDummy p1 fp p withCString d $ \p2 -> alloca $ \p3 -> do r3 <- mpfr_strtofr p1 p2 p3 (fromIntegral b) ((fromIntegral . fromEnum) r) p3' <- peek p3 r2 <- peekCString p3' r1 <- peekP p1 fp return (r1, r2, fromIntegral r3) setInf :: Precision -> Int -> MPFR setInf p i = unsafePerformIO go where go = do ls <- mpfr_custom_get_size (fromIntegral p) fp <- mallocForeignPtrBytes (fromIntegral ls) alloca $ \p1 -> do pokeDummy p1 fp p mpfr_set_inf p1 (fromIntegral i) peekP p1 fp setNaN :: Precision -> MPFR setNaN p = unsafePerformIO go where go = do ls <- mpfr_custom_get_size (fromIntegral p) fp <- mallocForeignPtrBytes (fromIntegral ls) alloca $ \p1 -> do pokeDummy p1 fp p mpfr_set_nan p1 peekP p1 fp fromIntegerA :: RoundMode -> Precision -> Integer -> MPFR fromIntegerA r p = stringToMPFR r p 10 . show compose :: RoundMode -> Precision -> (Integer, Int) -> MPFR compose r p (i, ii) = div2i r p (fromIntegerA r p i) ii -- | 'stringToMPFR' with default rounding to Near. fromString :: String -> Precision -> Word -> MPFR fromString s p b = stringToMPFR Near p b s
ekmett/hmpfr
src/Data/Number/MPFR/Assignment.hs
Haskell
bsd-3-clause
5,290
-- | RGBA module Graphics.FreetypeGL.RGBA ( RGBA(..), noColor, toVec4, withVec ) where import Bindings.FreetypeGL.Vec234 (C'vec4(..)) import Foreign.Marshal.Alloc (alloca) import Foreign.Ptr (Ptr) import Foreign.Storable (Storable(..)) data RGBA = RGBA !Float !Float !Float !Float deriving (Eq, Ord, Read, Show) noColor :: RGBA noColor = RGBA 0.0 0.0 0.0 0.0 toVec4 :: RGBA -> C'vec4 toVec4 (RGBA r g b a) = C'vec4 (realToFrac r) (realToFrac g) (realToFrac b) (realToFrac a) withVec :: RGBA -> (Ptr C'vec4 -> IO a) -> IO a withVec rgba act = alloca $ \vec4 -> do poke vec4 (toVec4 rgba) act vec4
Peaker/FreeTypeGL
src/Graphics/FreetypeGL/RGBA.hs
Haskell
bsd-3-clause
658
{-# LANGUAGE Haskell2010 #-} {-# LINE 1 "dist/dist-sandbox-261cd265/build/System/Posix/Process.hs" #-} {-# LINE 1 "System/Posix/Process.hsc" #-} {-# LINE 2 "System/Posix/Process.hsc" #-} {-# LANGUAGE Safe #-} {-# LINE 6 "System/Posix/Process.hsc" #-} ----------------------------------------------------------------------------- -- | -- Module : System.Posix.Process -- Copyright : (c) The University of Glasgow 2002 -- License : BSD-style (see the file libraries/base/LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : provisional -- Portability : non-portable (requires POSIX) -- -- POSIX process support. See also the System.Cmd and System.Process -- modules in the process package. -- ----------------------------------------------------------------------------- module System.Posix.Process ( -- * Processes -- ** Forking and executing forkProcess, forkProcessWithUnmask, executeFile, -- ** Exiting exitImmediately, -- ** Process environment getProcessID, getParentProcessID, -- ** Process groups getProcessGroupID, getProcessGroupIDOf, createProcessGroupFor, joinProcessGroup, setProcessGroupIDOf, -- ** Sessions createSession, -- ** Process times ProcessTimes(..), getProcessTimes, -- ** Scheduling priority nice, getProcessPriority, getProcessGroupPriority, getUserPriority, setProcessPriority, setProcessGroupPriority, setUserPriority, -- ** Process status ProcessStatus(..), getProcessStatus, getAnyProcessStatus, getGroupProcessStatus, -- ** Deprecated createProcessGroup, setProcessGroupID, ) where {-# LINE 72 "System/Posix/Process.hsc" #-} import Foreign import Foreign.C import System.Posix.Process.Internals import System.Posix.Process.Common import System.Posix.Internals ( withFilePath ) -- | @'executeFile' cmd args env@ calls one of the -- @execv*@ family, depending on whether or not the current -- PATH is to be searched for the command, and whether or not an -- environment is provided to supersede the process's current -- environment. The basename (leading directory names suppressed) of -- the command is passed to @execv*@ as @arg[0]@; -- the argument list passed to 'executeFile' therefore -- begins with @arg[1]@. executeFile :: FilePath -- ^ Command -> Bool -- ^ Search PATH? -> [String] -- ^ Arguments -> Maybe [(String, String)] -- ^ Environment -> IO a executeFile path search args Nothing = do withFilePath path $ \s -> withMany withFilePath (path:args) $ \cstrs -> withArray0 nullPtr cstrs $ \arr -> do pPrPr_disableITimers if search then throwErrnoPathIfMinus1_ "executeFile" path (c_execvp s arr) else throwErrnoPathIfMinus1_ "executeFile" path (c_execv s arr) return undefined -- never reached executeFile path search args (Just env) = do withFilePath path $ \s -> withMany withFilePath (path:args) $ \cstrs -> withArray0 nullPtr cstrs $ \arg_arr -> let env' = map (\ (name, val) -> name ++ ('=' : val)) env in withMany withFilePath env' $ \cenv -> withArray0 nullPtr cenv $ \env_arr -> do pPrPr_disableITimers if search then throwErrnoPathIfMinus1_ "executeFile" path (c_execvpe s arg_arr env_arr) else throwErrnoPathIfMinus1_ "executeFile" path (c_execve s arg_arr env_arr) return undefined -- never reached foreign import ccall unsafe "execvp" c_execvp :: CString -> Ptr CString -> IO CInt foreign import ccall unsafe "execv" c_execv :: CString -> Ptr CString -> IO CInt foreign import ccall unsafe "execve" c_execve :: CString -> Ptr CString -> Ptr CString -> IO CInt
phischu/fragnix
tests/packages/scotty/System.Posix.Process.hs
Haskell
bsd-3-clause
3,920
{-# LANGUAGE Haskell2010 #-} {-# LINE 1 "Control/DeepSeq.hs" #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DefaultSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE TypeOperators #-} {-# LANGUAGE Safe #-} {-# LANGUAGE PolyKinds #-} ----------------------------------------------------------------------------- -- | -- Module : Control.DeepSeq -- Copyright : (c) The University of Glasgow 2001-2009 -- License : BSD-style (see the file LICENSE) -- -- Maintainer : libraries@haskell.org -- Stability : stable -- Portability : portable -- -- This module provides an overloaded function, 'deepseq', for fully -- evaluating data structures (that is, evaluating to \"Normal Form\"). -- -- A typical use is to prevent resource leaks in lazy IO programs, by -- forcing all characters from a file to be read. For example: -- -- > import System.IO -- > import Control.DeepSeq -- > -- > main = do -- > h <- openFile "f" ReadMode -- > s <- hGetContents h -- > s `deepseq` hClose h -- > return s -- -- 'deepseq' differs from 'seq' as it traverses data structures deeply, -- for example, 'seq' will evaluate only to the first constructor in -- the list: -- -- > > [1,2,undefined] `seq` 3 -- > 3 -- -- While 'deepseq' will force evaluation of all the list elements: -- -- > > [1,2,undefined] `deepseq` 3 -- > *** Exception: Prelude.undefined -- -- Another common use is to ensure any exceptions hidden within lazy -- fields of a data structure do not leak outside the scope of the -- exception handler, or to force evaluation of a data structure in one -- thread, before passing to another thread (preventing work moving to -- the wrong threads). -- -- @since 1.1.0.0 module Control.DeepSeq ( deepseq, ($!!), force, NFData(..), ) where import Control.Applicative import Control.Concurrent ( ThreadId, MVar ) import Data.IORef import Data.STRef import Data.Int import Data.Word import Data.Ratio import Data.Complex import Data.Array import Data.Fixed import Data.Version import Data.Monoid as Mon import Data.Unique ( Unique ) import Foreign.Ptr import Foreign.C.Types import System.Exit ( ExitCode(..) ) import System.Mem.StableName ( StableName ) import Data.Ord ( Down(Down) ) import Data.Proxy ( Proxy(Proxy) ) import Data.Functor.Identity ( Identity(..) ) import Data.Typeable ( TypeRep, TyCon, rnfTypeRep, rnfTyCon ) import Data.Void ( Void, absurd ) import Numeric.Natural ( Natural ) import Data.List.NonEmpty ( NonEmpty (..) ) import Data.Semigroup as Semi import GHC.Stack.Types ( CallStack(..), SrcLoc(..) ) import GHC.Fingerprint.Type ( Fingerprint(..) ) import GHC.Generics -- | Hidden internal type-class class GNFData f where grnf :: f a -> () instance GNFData V1 where grnf = error "Control.DeepSeq.rnf: uninhabited type" instance GNFData U1 where grnf U1 = () instance NFData a => GNFData (K1 i a) where grnf = rnf . unK1 {-# INLINEABLE grnf #-} instance GNFData a => GNFData (M1 i c a) where grnf = grnf . unM1 {-# INLINEABLE grnf #-} instance (GNFData a, GNFData b) => GNFData (a :*: b) where grnf (x :*: y) = grnf x `seq` grnf y {-# INLINEABLE grnf #-} instance (GNFData a, GNFData b) => GNFData (a :+: b) where grnf (L1 x) = grnf x grnf (R1 x) = grnf x {-# INLINEABLE grnf #-} infixr 0 $!! -- | 'deepseq': fully evaluates the first argument, before returning the -- second. -- -- The name 'deepseq' is used to illustrate the relationship to 'seq': -- where 'seq' is shallow in the sense that it only evaluates the top -- level of its argument, 'deepseq' traverses the entire data structure -- evaluating it completely. -- -- 'deepseq' can be useful for forcing pending exceptions, -- eradicating space leaks, or forcing lazy I/O to happen. It is -- also useful in conjunction with parallel Strategies (see the -- @parallel@ package). -- -- There is no guarantee about the ordering of evaluation. The -- implementation may evaluate the components of the structure in -- any order or in parallel. To impose an actual order on -- evaluation, use 'pseq' from "Control.Parallel" in the -- @parallel@ package. -- -- @since 1.1.0.0 deepseq :: NFData a => a -> b -> b deepseq a b = rnf a `seq` b -- | the deep analogue of '$!'. In the expression @f $!! x@, @x@ is -- fully evaluated before the function @f@ is applied to it. -- -- @since 1.2.0.0 ($!!) :: (NFData a) => (a -> b) -> a -> b f $!! x = x `deepseq` f x -- | a variant of 'deepseq' that is useful in some circumstances: -- -- > force x = x `deepseq` x -- -- @force x@ fully evaluates @x@, and then returns it. Note that -- @force x@ only performs evaluation when the value of @force x@ -- itself is demanded, so essentially it turns shallow evaluation into -- deep evaluation. -- -- 'force' can be conveniently used in combination with @ViewPatterns@: -- -- > {-# LANGUAGE BangPatterns, ViewPatterns #-} -- > import Control.DeepSeq -- > -- > someFun :: ComplexData -> SomeResult -- > someFun (force -> !arg) = {- 'arg' will be fully evaluated -} -- -- Another useful application is to combine 'force' with -- 'Control.Exception.evaluate' in order to force deep evaluation -- relative to other 'IO' operations: -- -- > import Control.Exception (evaluate) -- > import Control.DeepSeq -- > -- > main = do -- > result <- evaluate $ force $ pureComputation -- > {- 'result' will be fully evaluated at this point -} -- > return () -- -- @since 1.2.0.0 force :: (NFData a) => a -> a force x = x `deepseq` x -- | A class of types that can be fully evaluated. -- -- @since 1.1.0.0 class NFData a where -- | 'rnf' should reduce its argument to normal form (that is, fully -- evaluate all sub-components), and then return '()'. -- -- === 'Generic' 'NFData' deriving -- -- Starting with GHC 7.2, you can automatically derive instances -- for types possessing a 'Generic' instance. -- -- > {-# LANGUAGE DeriveGeneric #-} -- > -- > import GHC.Generics (Generic) -- > import Control.DeepSeq -- > -- > data Foo a = Foo a String -- > deriving (Eq, Generic) -- > -- > instance NFData a => NFData (Foo a) -- > -- > data Colour = Red | Green | Blue -- > deriving Generic -- > -- > instance NFData Colour -- -- Starting with GHC 7.10, the example above can be written more -- concisely by enabling the new @DeriveAnyClass@ extension: -- -- > {-# LANGUAGE DeriveGeneric, DeriveAnyClass #-} -- > -- > import GHC.Generics (Generic) -- > import Control.DeepSeq -- > -- > data Foo a = Foo a String -- > deriving (Eq, Generic, NFData) -- > -- > data Colour = Red | Green | Blue -- > deriving (Generic, NFData) -- > -- -- === Compatibility with previous @deepseq@ versions -- -- Prior to version 1.4.0.0, the default implementation of the 'rnf' -- method was defined as -- -- @'rnf' a = 'seq' a ()@ -- -- However, starting with @deepseq-1.4.0.0@, the default -- implementation is based on @DefaultSignatures@ allowing for -- more accurate auto-derived 'NFData' instances. If you need the -- previously used exact default 'rnf' method implementation -- semantics, use -- -- > instance NFData Colour where rnf x = seq x () -- -- or alternatively -- -- > {-# LANGUAGE BangPatterns #-} -- > instance NFData Colour where rnf !_ = () -- rnf :: a -> () default rnf :: (Generic a, GNFData (Rep a)) => a -> () rnf = grnf . from instance NFData Int where rnf !_ = () instance NFData Word where rnf !_ = () instance NFData Integer where rnf !_ = () instance NFData Float where rnf !_ = () instance NFData Double where rnf !_ = () instance NFData Char where rnf !_ = () instance NFData Bool where rnf !_ = () instance NFData () where rnf !_ = () instance NFData Int8 where rnf !_ = () instance NFData Int16 where rnf !_ = () instance NFData Int32 where rnf !_ = () instance NFData Int64 where rnf !_ = () instance NFData Word8 where rnf !_ = () instance NFData Word16 where rnf !_ = () instance NFData Word32 where rnf !_ = () instance NFData Word64 where rnf !_ = () -- |@since 1.4.0.0 instance NFData (Proxy a) where rnf Proxy = () -- |@since 1.4.0.0 instance NFData a => NFData (Identity a) where rnf = rnf . runIdentity -- | Defined as @'rnf' = 'absurd'@. -- -- @since 1.4.0.0 instance NFData Void where rnf = absurd -- |@since 1.4.0.0 instance NFData Natural where rnf !_ = () -- |@since 1.3.0.0 instance NFData (Fixed a) where rnf !_ = () -- |This instance is for convenience and consistency with 'seq'. -- This assumes that WHNF is equivalent to NF for functions. -- -- @since 1.3.0.0 instance NFData (a -> b) where rnf !_ = () --Rational and complex numbers. instance NFData a => NFData (Ratio a) where rnf x = rnf (numerator x, denominator x) instance (NFData a) => NFData (Complex a) where rnf (x:+y) = rnf x `seq` rnf y `seq` () instance NFData a => NFData (Maybe a) where rnf Nothing = () rnf (Just x) = rnf x instance (NFData a, NFData b) => NFData (Either a b) where rnf (Left x) = rnf x rnf (Right y) = rnf y -- |@since 1.3.0.0 instance NFData Data.Version.Version where rnf (Data.Version.Version branch tags) = rnf branch `seq` rnf tags instance NFData a => NFData [a] where rnf [] = () rnf (x:xs) = rnf x `seq` rnf xs -- |@since 1.4.0.0 instance NFData a => NFData (ZipList a) where rnf = rnf . getZipList -- |@since 1.4.0.0 instance NFData a => NFData (Const a b) where rnf = rnf . getConst instance (NFData a, NFData b) => NFData (Array a b) where rnf x = rnf (bounds x, Data.Array.elems x) -- |@since 1.4.0.0 instance NFData a => NFData (Down a) where rnf (Down x) = rnf x -- |@since 1.4.0.0 instance NFData a => NFData (Dual a) where rnf = rnf . getDual -- |@since 1.4.0.0 instance NFData a => NFData (Mon.First a) where rnf = rnf . Mon.getFirst -- |@since 1.4.0.0 instance NFData a => NFData (Mon.Last a) where rnf = rnf . Mon.getLast -- |@since 1.4.0.0 instance NFData Any where rnf = rnf . getAny -- |@since 1.4.0.0 instance NFData All where rnf = rnf . getAll -- |@since 1.4.0.0 instance NFData a => NFData (Sum a) where rnf = rnf . getSum -- |@since 1.4.0.0 instance NFData a => NFData (Product a) where rnf = rnf . getProduct -- |@since 1.4.0.0 instance NFData (StableName a) where rnf !_ = () -- assumes `data StableName a = StableName (StableName# a)` -- |@since 1.4.0.0 instance NFData ThreadId where rnf !_ = () -- assumes `data ThreadId = ThreadId ThreadId#` -- |@since 1.4.0.0 instance NFData Unique where rnf !_ = () -- assumes `newtype Unique = Unique Integer` -- | __NOTE__: Only defined for @base-4.8.0.0@ and later -- -- @since 1.4.0.0 instance NFData TypeRep where rnf tyrep = rnfTypeRep tyrep -- | __NOTE__: Only defined for @base-4.8.0.0@ and later -- -- @since 1.4.0.0 instance NFData TyCon where rnf tycon = rnfTyCon tycon -- | __NOTE__: Only strict in the reference and not the referenced value. -- -- @since 1.4.2.0 instance NFData (IORef a) where rnf !_ = () -- | __NOTE__: Only strict in the reference and not the referenced value. -- -- @since 1.4.2.0 instance NFData (STRef s a) where rnf !_ = () -- | __NOTE__: Only strict in the reference and not the referenced value. -- -- @since 1.4.2.0 instance NFData (MVar a) where rnf !_ = () ---------------------------------------------------------------------------- -- GHC Specifics -- |@since 1.4.0.0 instance NFData Fingerprint where rnf (Fingerprint _ _) = () ---------------------------------------------------------------------------- -- Foreign.Ptr -- |@since 1.4.2.0 instance NFData (Ptr a) where rnf !_ = () -- |@since 1.4.2.0 instance NFData (FunPtr a) where rnf !_ = () ---------------------------------------------------------------------------- -- Foreign.C.Types -- |@since 1.4.0.0 instance NFData CChar where rnf !_ = () -- |@since 1.4.0.0 instance NFData CSChar where rnf !_ = () -- |@since 1.4.0.0 instance NFData CUChar where rnf !_ = () -- |@since 1.4.0.0 instance NFData CShort where rnf !_ = () -- |@since 1.4.0.0 instance NFData CUShort where rnf !_ = () -- |@since 1.4.0.0 instance NFData CInt where rnf !_ = () -- |@since 1.4.0.0 instance NFData CUInt where rnf !_ = () -- |@since 1.4.0.0 instance NFData CLong where rnf !_ = () -- |@since 1.4.0.0 instance NFData CULong where rnf !_ = () -- |@since 1.4.0.0 instance NFData CPtrdiff where rnf !_ = () -- |@since 1.4.0.0 instance NFData CSize where rnf !_ = () -- |@since 1.4.0.0 instance NFData CWchar where rnf !_ = () -- |@since 1.4.0.0 instance NFData CSigAtomic where rnf !_ = () -- |@since 1.4.0.0 instance NFData CLLong where rnf !_ = () -- |@since 1.4.0.0 instance NFData CULLong where rnf !_ = () -- |@since 1.4.0.0 instance NFData CIntPtr where rnf !_ = () -- |@since 1.4.0.0 instance NFData CUIntPtr where rnf !_ = () -- |@since 1.4.0.0 instance NFData CIntMax where rnf !_ = () -- |@since 1.4.0.0 instance NFData CUIntMax where rnf !_ = () -- |@since 1.4.0.0 instance NFData CClock where rnf !_ = () -- |@since 1.4.0.0 instance NFData CTime where rnf !_ = () -- |@since 1.4.0.0 instance NFData CUSeconds where rnf !_ = () -- |@since 1.4.0.0 instance NFData CSUSeconds where rnf !_ = () -- |@since 1.4.0.0 instance NFData CFloat where rnf !_ = () -- |@since 1.4.0.0 instance NFData CDouble where rnf !_ = () -- NOTE: The types `CFile`, `CFPos`, and `CJmpBuf` below are not -- newtype wrappers rather defined as field-less single-constructor -- types. -- |@since 1.4.0.0 instance NFData CFile where rnf !_ = () -- |@since 1.4.0.0 instance NFData CFpos where rnf !_ = () -- |@since 1.4.0.0 instance NFData CJmpBuf where rnf !_ = () ---------------------------------------------------------------------------- -- System.Exit -- |@since 1.4.2.0 instance NFData ExitCode where rnf (ExitFailure n) = rnf n rnf ExitSuccess = () ---------------------------------------------------------------------------- -- instances previously provided by semigroups package -- |@since 1.4.2.0 instance NFData a => NFData (NonEmpty a) where rnf (x :| xs) = rnf x `seq` rnf xs -- |@since 1.4.2.0 instance NFData a => NFData (Min a) where rnf (Min a) = rnf a -- |@since 1.4.2.0 instance NFData a => NFData (Max a) where rnf (Max a) = rnf a -- |@since 1.4.2.0 instance (NFData a, NFData b) => NFData (Arg a b) where rnf (Arg a b) = rnf a `seq` rnf b `seq` () -- |@since 1.4.2.0 instance NFData a => NFData (Semi.First a) where rnf (Semi.First a) = rnf a -- |@since 1.4.2.0 instance NFData a => NFData (Semi.Last a) where rnf (Semi.Last a) = rnf a -- |@since 1.4.2.0 instance NFData m => NFData (WrappedMonoid m) where rnf (WrapMonoid a) = rnf a -- |@since 1.4.2.0 instance NFData a => NFData (Option a) where rnf (Option a) = rnf a ---------------------------------------------------------------------------- -- GHC.Stack -- |@since 1.4.2.0 instance NFData SrcLoc where rnf (SrcLoc a b c d e f g) = rnf a `seq` rnf b `seq` rnf c `seq` rnf d `seq` rnf e `seq` rnf f `seq` rnf g -- |@since 1.4.2.0 instance NFData CallStack where rnf EmptyCallStack = () rnf (PushCallStack a b c) = rnf a `seq` rnf b `seq` rnf c rnf (FreezeCallStack a) = rnf a ---------------------------------------------------------------------------- -- Tuples instance (NFData a, NFData b) => NFData (a,b) where rnf (x,y) = rnf x `seq` rnf y instance (NFData a, NFData b, NFData c) => NFData (a,b,c) where rnf (x,y,z) = rnf x `seq` rnf y `seq` rnf z instance (NFData a, NFData b, NFData c, NFData d) => NFData (a,b,c,d) where rnf (x1,x2,x3,x4) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 instance (NFData a1, NFData a2, NFData a3, NFData a4, NFData a5) => NFData (a1, a2, a3, a4, a5) where rnf (x1, x2, x3, x4, x5) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 instance (NFData a1, NFData a2, NFData a3, NFData a4, NFData a5, NFData a6) => NFData (a1, a2, a3, a4, a5, a6) where rnf (x1, x2, x3, x4, x5, x6) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` rnf x6 instance (NFData a1, NFData a2, NFData a3, NFData a4, NFData a5, NFData a6, NFData a7) => NFData (a1, a2, a3, a4, a5, a6, a7) where rnf (x1, x2, x3, x4, x5, x6, x7) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` rnf x7 instance (NFData a1, NFData a2, NFData a3, NFData a4, NFData a5, NFData a6, NFData a7, NFData a8) => NFData (a1, a2, a3, a4, a5, a6, a7, a8) where rnf (x1, x2, x3, x4, x5, x6, x7, x8) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` rnf x7 `seq` rnf x8 instance (NFData a1, NFData a2, NFData a3, NFData a4, NFData a5, NFData a6, NFData a7, NFData a8, NFData a9) => NFData (a1, a2, a3, a4, a5, a6, a7, a8, a9) where rnf (x1, x2, x3, x4, x5, x6, x7, x8, x9) = rnf x1 `seq` rnf x2 `seq` rnf x3 `seq` rnf x4 `seq` rnf x5 `seq` rnf x6 `seq` rnf x7 `seq` rnf x8 `seq` rnf x9
phischu/fragnix
tests/packages/scotty/Control.DeepSeq.hs
Haskell
bsd-3-clause
17,982
{-# LANGUAGE Haskell98 #-} {-# LINE 1 "Network/Wai/Handler/Warp/Request.hs" #-} {-# LANGUAGE BangPatterns #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP #-} {-# LANGUAGE DeriveDataTypeable #-} {-# OPTIONS_GHC -fno-warn-deprecations #-} module Network.Wai.Handler.Warp.Request ( recvRequest , headerLines , pauseTimeoutKey , getFileInfoKey , NoKeepAliveRequest (..) ) where import qualified Control.Concurrent as Conc (yield) import Control.Exception (throwIO, Exception) import Data.Array ((!)) import Data.ByteString (ByteString) import Data.Typeable (Typeable) import qualified Data.ByteString as S import qualified Data.ByteString.Unsafe as SU import qualified Data.CaseInsensitive as CI import qualified Data.IORef as I import qualified Network.HTTP.Types as H import Network.Socket (SockAddr) import Network.Wai import Network.Wai.Handler.Warp.Conduit import Network.Wai.Handler.Warp.FileInfoCache import Network.Wai.Handler.Warp.HashMap (hashByteString) import Network.Wai.Handler.Warp.Header import Network.Wai.Handler.Warp.ReadInt import Network.Wai.Handler.Warp.RequestHeader import Network.Wai.Handler.Warp.Settings (Settings, settingsNoParsePath) import qualified Network.Wai.Handler.Warp.Timeout as Timeout import Network.Wai.Handler.Warp.Types import Network.Wai.Internal import Prelude hiding (lines) import Control.Monad (when) import qualified Data.Vault.Lazy as Vault import System.IO.Unsafe (unsafePerformIO) ---------------------------------------------------------------- -- FIXME come up with good values here maxTotalHeaderLength :: Int maxTotalHeaderLength = 50 * 1024 ---------------------------------------------------------------- -- | Receiving a HTTP request from 'Connection' and parsing its header -- to create 'Request'. recvRequest :: Bool -- ^ first request on this connection? -> Settings -> Connection -> InternalInfo1 -> SockAddr -- ^ Peer's address. -> Source -- ^ Where HTTP request comes from. -> IO (Request ,Maybe (I.IORef Int) ,IndexedHeader ,IO ByteString ,InternalInfo) -- ^ -- 'Request' passed to 'Application', -- how many bytes remain to be consumed, if known -- 'IndexedHeader' of HTTP request for internal use, -- Body producing action used for flushing the request body recvRequest firstRequest settings conn ii1 addr src = do hdrlines <- headerLines firstRequest src (method, unparsedPath, path, query, httpversion, hdr) <- parseHeaderLines hdrlines let idxhdr = indexRequestHeader hdr expect = idxhdr ! fromEnum ReqExpect cl = idxhdr ! fromEnum ReqContentLength te = idxhdr ! fromEnum ReqTransferEncoding handle100Continue = handleExpect conn httpversion expect rawPath = if settingsNoParsePath settings then unparsedPath else path h = hashByteString rawPath ii = toInternalInfo ii1 h th = threadHandle ii vaultValue = Vault.insert pauseTimeoutKey (Timeout.pause th) $ Vault.insert getFileInfoKey (getFileInfo ii) Vault.empty (rbody, remainingRef, bodyLength) <- bodyAndSource src cl te -- body producing function which will produce '100-continue', if needed rbody' <- timeoutBody remainingRef th rbody handle100Continue -- body producing function which will never produce 100-continue rbodyFlush <- timeoutBody remainingRef th rbody (return ()) let req = Request { requestMethod = method , httpVersion = httpversion , pathInfo = H.decodePathSegments path , rawPathInfo = rawPath , rawQueryString = query , queryString = H.parseQuery query , requestHeaders = hdr , isSecure = False , remoteHost = addr , requestBody = rbody' , vault = vaultValue , requestBodyLength = bodyLength , requestHeaderHost = idxhdr ! fromEnum ReqHost , requestHeaderRange = idxhdr ! fromEnum ReqRange , requestHeaderReferer = idxhdr ! fromEnum ReqReferer , requestHeaderUserAgent = idxhdr ! fromEnum ReqUserAgent } return (req, remainingRef, idxhdr, rbodyFlush, ii) ---------------------------------------------------------------- headerLines :: Bool -> Source -> IO [ByteString] headerLines firstRequest src = do bs <- readSource src if S.null bs -- When we're working on a keep-alive connection and trying to -- get the second or later request, we don't want to treat the -- lack of data as a real exception. See the http1 function in -- the Run module for more details. then if firstRequest then throwIO ConnectionClosedByPeer else throwIO NoKeepAliveRequest else push src (THStatus 0 id id) bs data NoKeepAliveRequest = NoKeepAliveRequest deriving (Show, Typeable) instance Exception NoKeepAliveRequest ---------------------------------------------------------------- handleExpect :: Connection -> H.HttpVersion -> Maybe HeaderValue -> IO () handleExpect conn ver (Just "100-continue") = do connSendAll conn continue Conc.yield where continue | ver == H.http11 = "HTTP/1.1 100 Continue\r\n\r\n" | otherwise = "HTTP/1.0 100 Continue\r\n\r\n" handleExpect _ _ _ = return () ---------------------------------------------------------------- bodyAndSource :: Source -> Maybe HeaderValue -- ^ content length -> Maybe HeaderValue -- ^ transfer-encoding -> IO (IO ByteString ,Maybe (I.IORef Int) ,RequestBodyLength ) bodyAndSource src cl te | chunked = do csrc <- mkCSource src return (readCSource csrc, Nothing, ChunkedBody) | otherwise = do isrc@(ISource _ remaining) <- mkISource src len return (readISource isrc, Just remaining, bodyLen) where len = toLength cl bodyLen = KnownLength $ fromIntegral len chunked = isChunked te toLength :: Maybe HeaderValue -> Int toLength Nothing = 0 toLength (Just bs) = readInt bs isChunked :: Maybe HeaderValue -> Bool isChunked (Just bs) = CI.foldCase bs == "chunked" isChunked _ = False ---------------------------------------------------------------- timeoutBody :: Maybe (I.IORef Int) -- ^ remaining -> Timeout.Handle -> IO ByteString -> IO () -> IO (IO ByteString) timeoutBody remainingRef timeoutHandle rbody handle100Continue = do isFirstRef <- I.newIORef True let checkEmpty = case remainingRef of Nothing -> return . S.null Just ref -> \bs -> if S.null bs then return True else do x <- I.readIORef ref return $! x <= 0 return $ do isFirst <- I.readIORef isFirstRef when isFirst $ do -- Only check if we need to produce the 100 Continue status -- when asking for the first chunk of the body handle100Continue -- Timeout handling was paused after receiving the full request -- headers. Now we need to resume it to avoid a slowloris -- attack during request body sending. Timeout.resume timeoutHandle I.writeIORef isFirstRef False bs <- rbody -- As soon as we finish receiving the request body, whether -- because the application is not interested in more bytes, or -- because there is no more data available, pause the timeout -- handler again. isEmpty <- checkEmpty bs when isEmpty (Timeout.pause timeoutHandle) return bs ---------------------------------------------------------------- type BSEndo = ByteString -> ByteString type BSEndoList = [ByteString] -> [ByteString] data THStatus = THStatus {-# UNPACK #-} !Int -- running total byte count BSEndoList -- previously parsed lines BSEndo -- bytestrings to be prepended ---------------------------------------------------------------- {- FIXME close :: Sink ByteString IO a close = throwIO IncompleteHeaders -} push :: Source -> THStatus -> ByteString -> IO [ByteString] push src (THStatus len lines prepend) bs' -- Too many bytes | len > maxTotalHeaderLength = throwIO OverLargeHeader | otherwise = push' mnl where bs = prepend bs' bsLen = S.length bs mnl = do nl <- S.elemIndex 10 bs -- check if there are two more bytes in the bs -- if so, see if the second of those is a horizontal space if bsLen > nl + 1 then let c = S.index bs (nl + 1) b = case nl of 0 -> True 1 -> S.index bs 0 == 13 _ -> False in Just (nl, not b && (c == 32 || c == 9)) else Just (nl, False) {-# INLINE push' #-} push' :: Maybe (Int, Bool) -> IO [ByteString] -- No newline find in this chunk. Add it to the prepend, -- update the length, and continue processing. push' Nothing = do bst <- readSource' src when (S.null bst) $ throwIO IncompleteHeaders push src status bst where len' = len + bsLen prepend' = S.append bs status = THStatus len' lines prepend' -- Found a newline, but next line continues as a multiline header push' (Just (end, True)) = push src status rest where rest = S.drop (end + 1) bs prepend' = S.append (SU.unsafeTake (checkCR bs end) bs) len' = len + end status = THStatus len' lines prepend' -- Found a newline at position end. push' (Just (end, False)) -- leftover | S.null line = do when (start < bsLen) $ leftoverSource src (SU.unsafeDrop start bs) return (lines []) -- more headers | otherwise = let len' = len + start lines' = lines . (line:) status = THStatus len' lines' id in if start < bsLen then -- more bytes in this chunk, push again let bs'' = SU.unsafeDrop start bs in push src status bs'' else do -- no more bytes in this chunk, ask for more bst <- readSource' src when (S.null bs) $ throwIO IncompleteHeaders push src status bst where start = end + 1 -- start of next chunk line = SU.unsafeTake (checkCR bs end) bs {-# INLINE checkCR #-} checkCR :: ByteString -> Int -> Int checkCR bs pos = if pos > 0 && 13 == S.index bs p then p else pos -- 13 is CR where !p = pos - 1 pauseTimeoutKey :: Vault.Key (IO ()) pauseTimeoutKey = unsafePerformIO Vault.newKey {-# NOINLINE pauseTimeoutKey #-} getFileInfoKey :: Vault.Key (FilePath -> IO FileInfo) getFileInfoKey = unsafePerformIO Vault.newKey {-# NOINLINE getFileInfoKey #-}
phischu/fragnix
tests/packages/scotty/Network.Wai.Handler.Warp.Request.hs
Haskell
bsd-3-clause
11,536
{-# LANGUAGE MagicHash, UnboxedTuples, TypeFamilies #-} -- | -- Module : Control.Monad.Primitive -- Copyright : (c) Roman Leshchinskiy 2009 -- License : BSD-style -- -- Maintainer : Roman Leshchinskiy <rl@cse.unsw.edu.au> -- Portability : non-portable -- -- Primitive state-transformer monads -- module Control.Monad.Primitive ( PrimMonad(..), RealWorld, primitive_, primToPrim, primToIO, primToST, unsafePrimToPrim, unsafePrimToIO, unsafePrimToST, unsafeInlinePrim, unsafeInlineIO, unsafeInlineST, touch ) where import GHC.Prim ( State#, RealWorld, touch# ) import GHC.Base ( unsafeCoerce#, realWorld# ) #if MIN_VERSION_base(4,2,0) import GHC.IO ( IO(..) ) #else import GHC.IOBase ( IO(..) ) #endif import GHC.ST ( ST(..) ) -- | Class of primitive state-transformer monads class Monad m => PrimMonad m where -- | State token type type PrimState m -- | Execute a primitive operation primitive :: (State# (PrimState m) -> (# State# (PrimState m), a #)) -> m a -- | Expose the internal structure of the monad internal :: m a -> State# (PrimState m) -> (# State# (PrimState m), a #) -- | Execute a primitive operation with no result primitive_ :: PrimMonad m => (State# (PrimState m) -> State# (PrimState m)) -> m () {-# INLINE primitive_ #-} primitive_ f = primitive (\s# -> (# f s#, () #)) instance PrimMonad IO where type PrimState IO = RealWorld primitive = IO internal (IO p) = p {-# INLINE primitive #-} {-# INLINE internal #-} instance PrimMonad (ST s) where type PrimState (ST s) = s primitive = ST internal (ST p) = p {-# INLINE primitive #-} {-# INLINE internal #-} -- | Convert a 'PrimMonad' to another monad with the same state token. primToPrim :: (PrimMonad m1, PrimMonad m2, PrimState m1 ~ PrimState m2) => m1 a -> m2 a {-# INLINE primToPrim #-} primToPrim m = primitive (internal m) -- | Convert a 'PrimMonad' with a 'RealWorld' state token to 'IO' primToIO :: (PrimMonad m, PrimState m ~ RealWorld) => m a -> IO a {-# INLINE primToIO #-} primToIO = primToPrim -- | Convert a 'PrimMonad' to 'ST' primToST :: PrimMonad m => m a -> ST (PrimState m) a {-# INLINE primToST #-} primToST = primToPrim -- | Convert a 'PrimMonad' to another monad with a possibly different state -- token. This operation is highly unsafe! unsafePrimToPrim :: (PrimMonad m1, PrimMonad m2) => m1 a -> m2 a {-# INLINE unsafePrimToPrim #-} unsafePrimToPrim m = primitive (unsafeCoerce# (internal m)) -- | Convert any 'PrimMonad' to 'ST' with an arbitrary state token. This -- operation is highly unsafe! unsafePrimToST :: PrimMonad m => m a -> ST s a {-# INLINE unsafePrimToST #-} unsafePrimToST = unsafePrimToPrim -- | Convert any 'PrimMonad' to 'IO'. This operation is highly unsafe! unsafePrimToIO :: PrimMonad m => m a -> IO a {-# INLINE unsafePrimToIO #-} unsafePrimToIO = unsafePrimToPrim unsafeInlinePrim :: PrimMonad m => m a -> a {-# INLINE unsafeInlinePrim #-} unsafeInlinePrim m = unsafeInlineIO (unsafePrimToIO m) unsafeInlineIO :: IO a -> a {-# INLINE unsafeInlineIO #-} unsafeInlineIO m = case internal m realWorld# of (# _, r #) -> r unsafeInlineST :: ST s a -> a {-# INLINE unsafeInlineST #-} unsafeInlineST = unsafeInlinePrim touch :: PrimMonad m => a -> m () {-# INLINE touch #-} touch x = unsafePrimToPrim $ (primitive (\s -> case touch# x s of { s' -> (# s', () #) }) :: IO ())
rleshchinskiy/primitive
Control/Monad/Primitive.hs
Haskell
bsd-3-clause
3,400
{-# LANGUAGE CPP, BangPatterns #-} module Network.Wai.Handler.Warp.MultiMap ( MMap , isEmpty , empty , singleton , insert , search , searchWith , pruneWith , toList , merge ) where #if __GLASGOW_HASKELL__ < 709 import Control.Applicative ((<$>)) #endif import Data.IntMap.Strict (IntMap) import qualified Data.IntMap.Strict as I import qualified Network.Wai.Handler.Warp.Some as S ---------------------------------------------------------------- type MMap v = IntMap (S.Some v) ---------------------------------------------------------------- -- | O(1) isEmpty :: MMap v -> Bool isEmpty = I.null -- | O(1) empty :: MMap v empty = I.empty ---------------------------------------------------------------- -- | O(1) singleton :: Int -> v -> MMap v singleton k v = I.singleton k (S.singleton v) ---------------------------------------------------------------- -- | O(log n) search :: Int -> MMap v -> Maybe v search k m = case I.lookup k m of Nothing -> Nothing Just s -> Just $! S.top s -- | O(log n) searchWith :: Int -> (v -> Bool) -> MMap v -> Maybe v searchWith k f m = case I.lookup k m of Nothing -> Nothing Just s -> S.lookupWith f s ---------------------------------------------------------------- -- | O(log n) insert :: Int -> v -> MMap v -> MMap v insert k v m = I.insertWith S.union k (S.singleton v) m ---------------------------------------------------------------- -- | O(n) toList :: MMap v -> [v] toList m = concatMap f $ I.toAscList m where f (_,s) = S.toList s ---------------------------------------------------------------- -- | O(n) pruneWith :: MMap v -> (v -> IO Bool) -> IO (MMap v) pruneWith m action = I.fromAscList <$> go (I.toDescList m) [] where go [] acc = return acc go ((k,s):kss) acc = do mt <- S.prune action s case mt of Nothing -> go kss acc Just t -> go kss ((k,t) : acc) ---------------------------------------------------------------- -- O(n + m) where N is the size of the second argument merge :: MMap v -> MMap v -> MMap v merge m1 m2 = I.unionWith S.union m1 m2
utdemir/wai
warp/Network/Wai/Handler/Warp/MultiMap.hs
Haskell
mit
2,151
-- | -- The integration tests have no ghc present, initially. Stack should not -- require ghc present to run the `clean` command. import StackTest main :: IO () main = do -- `stack clean` should succeed even though there is no ghc available. -- See the stack.yaml file for how this works. stackIgnoreException ["clean"] stackCleanFull
juhp/stack
test/integration/tests/4181-clean-wo-dl-ghc/Main.hs
Haskell
bsd-3-clause
345
{-# LANGUAGE StandaloneDeriving #-} module Distribution.Server.Packages.UnpackTest ( testPermissions, ) where import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Archive.Tar.Check as Tar import qualified Codec.Compression.GZip as GZip import qualified Data.ByteString.Lazy as BL import Distribution.Server.Packages.Unpack import Control.Monad (when) import Test.HUnit deriving instance Eq e => Eq (Tar.Entries e) deriving instance Eq Tar.Entry deriving instance Eq Tar.PortabilityError deriving instance Eq Tar.FileNameError deriving instance Eq Tar.FormatError deriving instance Eq CombinedTarErrs -- | Test that check permissions does the right thing testPermissions :: FilePath -> -- ^ .tar.gz file to test (Tar.Entry -> Maybe CombinedTarErrs) -> -- ^ Converter to create errors if necessary Test testPermissions tarPath mangler = TestCase $ do entries <- return . Tar.read . GZip.decompress =<< BL.readFile tarPath let mappedEntries = Tar.foldEntries Tar.Next Tar.Done (Tar.Fail . FormatError) entries when ((checkEntries mangler mappedEntries) /= checkUselessPermissions mappedEntries) $ assertFailure ("Permissions check did not match expected for: " ++ tarPath)
ocharles/hackage-server
tests/Distribution/Server/Packages/UnpackTest.hs
Haskell
bsd-3-clause
1,271
{-# LANGUAGE GADTs, DisambiguateRecordFields #-} module CmmProcPoint ( ProcPointSet, Status(..) , callProcPoints, minimalProcPointSet , splitAtProcPoints, procPointAnalysis , attachContInfoTables ) where import Prelude hiding (last, unzip, succ, zip) import DynFlags import BlockId import CLabel import Cmm import PprCmm () import CmmUtils import CmmInfo import CmmLive (cmmGlobalLiveness) import Data.List (sortBy) import Maybes import Control.Monad import Outputable import Platform import UniqSupply import Hoopl -- Compute a minimal set of proc points for a control-flow graph. -- Determine a protocol for each proc point (which live variables will -- be passed as arguments and which will be on the stack). {- A proc point is a basic block that, after CPS transformation, will start a new function. The entry block of the original function is a proc point, as is the continuation of each function call. A third kind of proc point arises if we want to avoid copying code. Suppose we have code like the following: f() { if (...) { ..1..; call foo(); ..2..} else { ..3..; call bar(); ..4..} x = y + z; return x; } The statement 'x = y + z' can be reached from two different proc points: the continuations of foo() and bar(). We would prefer not to put a copy in each continuation; instead we would like 'x = y + z' to be the start of a new procedure to which the continuations can jump: f_cps () { if (...) { ..1..; push k_foo; jump foo_cps(); } else { ..3..; push k_bar; jump bar_cps(); } } k_foo() { ..2..; jump k_join(y, z); } k_bar() { ..4..; jump k_join(y, z); } k_join(y, z) { x = y + z; return x; } You might think then that a criterion to make a node a proc point is that it is directly reached by two distinct proc points. (Note [Direct reachability].) But this criterion is a bit too simple; for example, 'return x' is also reached by two proc points, yet there is no point in pulling it out of k_join. A good criterion would be to say that a node should be made a proc point if it is reached by a set of proc points that is different than its immediate dominator. NR believes this criterion can be shown to produce a minimum set of proc points, and given a dominator tree, the proc points can be chosen in time linear in the number of blocks. Lacking a dominator analysis, however, we turn instead to an iterative solution, starting with no proc points and adding them according to these rules: 1. The entry block is a proc point. 2. The continuation of a call is a proc point. 3. A node is a proc point if it is directly reached by more proc points than one of its predecessors. Because we don't understand the problem very well, we apply rule 3 at most once per iteration, then recompute the reachability information. (See Note [No simple dataflow].) The choice of the new proc point is arbitrary, and I don't know if the choice affects the final solution, so I don't know if the number of proc points chosen is the minimum---but the set will be minimal. Note [Proc-point analysis] ~~~~~~~~~~~~~~~~~~~~~~~~~~ Given a specified set of proc-points (a set of block-ids), "proc-point analysis" figures out, for every block, which proc-point it belongs to. All the blocks belonging to proc-point P will constitute a single top-level C procedure. A non-proc-point block B "belongs to" a proc-point P iff B is reachable from P without going through another proc-point. Invariant: a block B should belong to at most one proc-point; if it belongs to two, that's a bug. Note [Non-existing proc-points] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ On some architectures it might happen that the list of proc-points computed before stack layout pass will be invalidated by the stack layout. This will happen if stack layout removes from the graph blocks that were determined to be proc-points. Later on in the pipeline we use list of proc-points to perform [Proc-point analysis], but if a proc-point does not exist anymore then we will get compiler panic. See #8205. -} type ProcPointSet = BlockSet data Status = ReachedBy ProcPointSet -- set of proc points that directly reach the block | ProcPoint -- this block is itself a proc point instance Outputable Status where ppr (ReachedBy ps) | setNull ps = text "<not-reached>" | otherwise = text "reached by" <+> (hsep $ punctuate comma $ map ppr $ setElems ps) ppr ProcPoint = text "<procpt>" -------------------------------------------------- -- Proc point analysis procPointAnalysis :: ProcPointSet -> CmmGraph -> UniqSM (BlockEnv Status) -- Once you know what the proc-points are, figure out -- what proc-points each block is reachable from -- See Note [Proc-point analysis] procPointAnalysis procPoints g@(CmmGraph {g_graph = graph}) = -- pprTrace "procPointAnalysis" (ppr procPoints) $ dataflowAnalFwdBlocks g initProcPoints $ analFwd lattice forward where initProcPoints = [(id, ProcPoint) | id <- setElems procPoints, id `setMember` labelsInGraph ] -- See Note [Non-existing proc-points] labelsInGraph = labelsDefined graph -- transfer equations forward :: FwdTransfer CmmNode Status forward = mkFTransfer3 first middle last where first :: CmmNode C O -> Status -> Status first (CmmEntry id _) ProcPoint = ReachedBy $ setSingleton id first _ x = x middle _ x = x last :: CmmNode O C -> Status -> FactBase Status last l x = mkFactBase lattice $ map (\id -> (id, x)) (successors l) lattice :: DataflowLattice Status lattice = DataflowLattice "direct proc-point reachability" unreached add_to where unreached = ReachedBy setEmpty add_to _ (OldFact ProcPoint) _ = (NoChange, ProcPoint) add_to _ _ (NewFact ProcPoint) = (SomeChange, ProcPoint) -- because of previous case add_to _ (OldFact (ReachedBy p)) (NewFact (ReachedBy p')) | setSize union > setSize p = (SomeChange, ReachedBy union) | otherwise = (NoChange, ReachedBy p) where union = setUnion p' p ---------------------------------------------------------------------- -- It is worth distinguishing two sets of proc points: those that are -- induced by calls in the original graph and those that are -- introduced because they're reachable from multiple proc points. -- -- Extract the set of Continuation BlockIds, see Note [Continuation BlockIds]. callProcPoints :: CmmGraph -> ProcPointSet callProcPoints g = foldGraphBlocks add (setSingleton (g_entry g)) g where add :: CmmBlock -> BlockSet -> BlockSet add b set = case lastNode b of CmmCall {cml_cont = Just k} -> setInsert k set CmmForeignCall {succ=k} -> setInsert k set _ -> set minimalProcPointSet :: Platform -> ProcPointSet -> CmmGraph -> UniqSM ProcPointSet -- Given the set of successors of calls (which must be proc-points) -- figure out the minimal set of necessary proc-points minimalProcPointSet platform callProcPoints g = extendPPSet platform g (postorderDfs g) callProcPoints extendPPSet :: Platform -> CmmGraph -> [CmmBlock] -> ProcPointSet -> UniqSM ProcPointSet extendPPSet platform g blocks procPoints = do env <- procPointAnalysis procPoints g -- pprTrace "extensPPSet" (ppr env) $ return () let add block pps = let id = entryLabel block in case mapLookup id env of Just ProcPoint -> setInsert id pps _ -> pps procPoints' = foldGraphBlocks add setEmpty g newPoints = mapMaybe ppSuccessor blocks newPoint = listToMaybe newPoints ppSuccessor b = let nreached id = case mapLookup id env `orElse` pprPanic "no ppt" (ppr id <+> ppr b) of ProcPoint -> 1 ReachedBy ps -> setSize ps block_procpoints = nreached (entryLabel b) -- | Looking for a successor of b that is reached by -- more proc points than b and is not already a proc -- point. If found, it can become a proc point. newId succ_id = not (setMember succ_id procPoints') && nreached succ_id > block_procpoints in listToMaybe $ filter newId $ successors b {- case newPoints of [] -> return procPoints' pps -> extendPPSet g blocks (foldl extendBlockSet procPoints' pps) -} case newPoint of Just id -> if setMember id procPoints' then panic "added old proc pt" else extendPPSet platform g blocks (setInsert id procPoints') Nothing -> return procPoints' -- At this point, we have found a set of procpoints, each of which should be -- the entry point of a procedure. -- Now, we create the procedure for each proc point, -- which requires that we: -- 1. build a map from proc points to the blocks reachable from the proc point -- 2. turn each branch to a proc point into a jump -- 3. turn calls and returns into jumps -- 4. build info tables for the procedures -- and update the info table for -- the SRTs in the entry procedure as well. -- Input invariant: A block should only be reachable from a single ProcPoint. -- ToDo: use the _ret naming convention that the old code generator -- used. -- EZY splitAtProcPoints :: DynFlags -> CLabel -> ProcPointSet-> ProcPointSet -> BlockEnv Status -> CmmDecl -> UniqSM [CmmDecl] splitAtProcPoints dflags entry_label callPPs procPoints procMap (CmmProc (TopInfo {info_tbls = info_tbls}) top_l _ g@(CmmGraph {g_entry=entry})) = do -- Build a map from procpoints to the blocks they reach let addBlock b graphEnv = case mapLookup bid procMap of Just ProcPoint -> add graphEnv bid bid b Just (ReachedBy set) -> case setElems set of [] -> graphEnv [id] -> add graphEnv id bid b _ -> panic "Each block should be reachable from only one ProcPoint" Nothing -> graphEnv where bid = entryLabel b add graphEnv procId bid b = mapInsert procId graph' graphEnv where graph = mapLookup procId graphEnv `orElse` mapEmpty graph' = mapInsert bid b graph let liveness = cmmGlobalLiveness dflags g let ppLiveness pp = filter isArgReg $ regSetToList $ expectJust "ppLiveness" $ mapLookup pp liveness graphEnv <- return $ foldGraphBlocks addBlock emptyBlockMap g -- Build a map from proc point BlockId to pairs of: -- * Labels for their new procedures -- * Labels for the info tables of their new procedures (only if -- the proc point is a callPP) -- Due to common blockification, we may overestimate the set of procpoints. let add_label map pp = mapInsert pp lbls map where lbls | pp == entry = (entry_label, fmap cit_lbl (mapLookup entry info_tbls)) | otherwise = (block_lbl, guard (setMember pp callPPs) >> Just (toInfoLbl block_lbl)) where block_lbl = blockLbl pp procLabels :: LabelMap (CLabel, Maybe CLabel) procLabels = foldl add_label mapEmpty (filter (flip mapMember (toBlockMap g)) (setElems procPoints)) -- In each new graph, add blocks jumping off to the new procedures, -- and replace branches to procpoints with branches to the jump-off blocks let add_jump_block (env, bs) (pp, l) = do bid <- liftM mkBlockId getUniqueM let b = blockJoin (CmmEntry bid GlobalScope) emptyBlock jump live = ppLiveness pp jump = CmmCall (CmmLit (CmmLabel l)) Nothing live 0 0 0 return (mapInsert pp bid env, b : bs) add_jumps newGraphEnv (ppId, blockEnv) = do let needed_jumps = -- find which procpoints we currently branch to mapFold add_if_branch_to_pp [] blockEnv add_if_branch_to_pp :: CmmBlock -> [(BlockId, CLabel)] -> [(BlockId, CLabel)] add_if_branch_to_pp block rst = case lastNode block of CmmBranch id -> add_if_pp id rst CmmCondBranch _ ti fi -> add_if_pp ti (add_if_pp fi rst) CmmSwitch _ tbl -> foldr add_if_pp rst (catMaybes tbl) _ -> rst -- when jumping to a PP that has an info table, if -- tablesNextToCode is off we must jump to the entry -- label instead. jump_label (Just info_lbl) _ | tablesNextToCode dflags = info_lbl | otherwise = toEntryLbl info_lbl jump_label Nothing block_lbl = block_lbl add_if_pp id rst = case mapLookup id procLabels of Just (lbl, mb_info_lbl) -> (id, jump_label mb_info_lbl lbl) : rst Nothing -> rst (jumpEnv, jumpBlocks) <- foldM add_jump_block (mapEmpty, []) needed_jumps -- update the entry block let b = expectJust "block in env" $ mapLookup ppId blockEnv blockEnv' = mapInsert ppId b blockEnv -- replace branches to procpoints with branches to jumps blockEnv'' = toBlockMap $ replaceBranches jumpEnv $ ofBlockMap ppId blockEnv' -- add the jump blocks to the graph blockEnv''' = foldl (flip insertBlock) blockEnv'' jumpBlocks let g' = ofBlockMap ppId blockEnv''' -- pprTrace "g' pre jumps" (ppr g') $ do return (mapInsert ppId g' newGraphEnv) graphEnv <- foldM add_jumps emptyBlockMap $ mapToList graphEnv let to_proc (bid, g) | bid == entry = CmmProc (TopInfo {info_tbls = info_tbls, stack_info = stack_info}) top_l live g' | otherwise = case expectJust "pp label" $ mapLookup bid procLabels of (lbl, Just info_lbl) -> CmmProc (TopInfo { info_tbls = mapSingleton (g_entry g) (mkEmptyContInfoTable info_lbl) , stack_info=stack_info}) lbl live g' (lbl, Nothing) -> CmmProc (TopInfo {info_tbls = mapEmpty, stack_info=stack_info}) lbl live g' where g' = replacePPIds g live = ppLiveness (g_entry g') stack_info = StackInfo { arg_space = 0 , updfr_space = Nothing , do_layout = True } -- cannot use panic, this is printed by -ddump-cmm -- References to procpoint IDs can now be replaced with the -- infotable's label replacePPIds g = {-# SCC "replacePPIds" #-} mapGraphNodes (id, mapExp repl, mapExp repl) g where repl e@(CmmLit (CmmBlock bid)) = case mapLookup bid procLabels of Just (_, Just info_lbl) -> CmmLit (CmmLabel info_lbl) _ -> e repl e = e -- The C back end expects to see return continuations before the -- call sites. Here, we sort them in reverse order -- it gets -- reversed later. let (_, block_order) = foldl add_block_num (0::Int, emptyBlockMap) (postorderDfs g) add_block_num (i, map) block = (i+1, mapInsert (entryLabel block) i map) sort_fn (bid, _) (bid', _) = compare (expectJust "block_order" $ mapLookup bid block_order) (expectJust "block_order" $ mapLookup bid' block_order) procs <- return $ map to_proc $ sortBy sort_fn $ mapToList graphEnv return -- pprTrace "procLabels" (ppr procLabels) -- pprTrace "splitting graphs" (ppr procs) procs splitAtProcPoints _ _ _ _ _ t@(CmmData _ _) = return [t] -- Only called from CmmProcPoint.splitAtProcPoints. NB. does a -- recursive lookup, see comment below. replaceBranches :: BlockEnv BlockId -> CmmGraph -> CmmGraph replaceBranches env cmmg = {-# SCC "replaceBranches" #-} ofBlockMap (g_entry cmmg) $ mapMap f $ toBlockMap cmmg where f block = replaceLastNode block $ last (lastNode block) last :: CmmNode O C -> CmmNode O C last (CmmBranch id) = CmmBranch (lookup id) last (CmmCondBranch e ti fi) = CmmCondBranch e (lookup ti) (lookup fi) last (CmmSwitch e tbl) = CmmSwitch e (map (fmap lookup) tbl) last l@(CmmCall {}) = l { cml_cont = Nothing } -- NB. remove the continuation of a CmmCall, since this -- label will now be in a different CmmProc. Not only -- is this tidier, it stops CmmLint from complaining. last l@(CmmForeignCall {}) = l lookup id = fmap lookup (mapLookup id env) `orElse` id -- XXX: this is a recursive lookup, it follows chains -- until the lookup returns Nothing, at which point we -- return the last BlockId -- -------------------------------------------------------------- -- Not splitting proc points: add info tables for continuations attachContInfoTables :: ProcPointSet -> CmmDecl -> CmmDecl attachContInfoTables call_proc_points (CmmProc top_info top_l live g) = CmmProc top_info{info_tbls = info_tbls'} top_l live g where info_tbls' = mapUnion (info_tbls top_info) $ mapFromList [ (l, mkEmptyContInfoTable (infoTblLbl l)) | l <- setElems call_proc_points , l /= g_entry g ] attachContInfoTables _ other_decl = other_decl ---------------------------------------------------------------- {- Note [Direct reachability] Block B is directly reachable from proc point P iff control can flow from P to B without passing through an intervening proc point. -} ---------------------------------------------------------------- {- Note [No simple dataflow] Sadly, it seems impossible to compute the proc points using a single dataflow pass. One might attempt to use this simple lattice: data Location = Unknown | InProc BlockId -- node is in procedure headed by the named proc point | ProcPoint -- node is itself a proc point At a join, a node in two different blocks becomes a proc point. The difficulty is that the change of information during iterative computation may promote a node prematurely. Here's a program that illustrates the difficulty: f () { entry: .... L1: if (...) { ... } else { ... } L2: if (...) { g(); goto L1; } return x + y; } The only proc-point needed (besides the entry) is L1. But in an iterative analysis, consider what happens to L2. On the first pass through, it rises from Unknown to 'InProc entry', but when L1 is promoted to a proc point (because it's the successor of g()), L1's successors will be promoted to 'InProc L1'. The problem hits when the new fact 'InProc L1' flows into L2 which is already bound to 'InProc entry'. The join operation makes it a proc point when in fact it needn't be, because its immediate dominator L1 is already a proc point and there are no other proc points that directly reach L2. -} {- Note [Separate Adams optimization] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It may be worthwhile to attempt the Adams optimization by rewriting the graph before the assignment of proc-point protocols. Here are a couple of rules: g() returns to k; g() returns to L; k: CopyIn c ress; goto L: ... ==> ... L: // no CopyIn node here L: CopyIn c ress; And when c == c' and ress == ress', this also: g() returns to k; g() returns to L; k: CopyIn c ress; goto L: ... ==> ... L: CopyIn c' ress' L: CopyIn c' ress' ; In both cases the goal is to eliminate k. -}
green-haskell/ghc
compiler/cmm/CmmProcPoint.hs
Haskell
bsd-3-clause
20,858
{-# OPTIONS_GHC -Wall #-} module Canonicalize.Type (tipe) where import Control.Applicative ((<$>),(<*>)) import qualified Data.Traversable as Trav import qualified AST.Type as T import qualified AST.Variable as Var import qualified Reporting.Annotation as A import qualified Reporting.Error.Canonicalize as Error import qualified Reporting.Region as R import qualified Canonicalize.Environment as Env import qualified Canonicalize.Result as Result import qualified Canonicalize.Variable as Canonicalize tipe :: Env.Environment -> T.Raw -> Result.ResultErr T.Canonical tipe env typ@(A.A region typ') = let go = tipe env goSnd (name,t) = (,) name <$> go t in case typ' of T.RVar x -> Result.ok (T.Var x) T.RType _ -> canonicalizeApp region env typ [] T.RApp t ts -> canonicalizeApp region env t ts T.RLambda a b -> T.Lambda <$> go a <*> go b T.RRecord fields ext -> T.Record <$> Trav.traverse goSnd fields <*> Trav.traverse go ext canonicalizeApp :: R.Region -> Env.Environment -> T.Raw -> [T.Raw] -> Result.ResultErr T.Canonical canonicalizeApp region env f args = case f of A.A _ (T.RType (Var.Raw rawName)) -> Canonicalize.tvar region env rawName `Result.andThen` canonicalizeWithTvar _ -> T.App <$> tipe env f <*> Trav.traverse (tipe env) args where canonicalizeWithTvar tvar = case tvar of Right alias -> canonicalizeAlias region env alias args Left name -> case args of [] -> Result.ok (T.Type name) _ -> T.App (T.Type name) <$> Trav.traverse (tipe env) args canonicalizeAlias :: R.Region -> Env.Environment -> (Var.Canonical, [String], T.Canonical) -> [T.Raw] -> Result.ResultErr T.Canonical canonicalizeAlias region env (name, tvars, dealiasedTipe) types = if typesLen /= tvarsLen then Result.err (A.A region (Error.alias name tvarsLen typesLen)) else toAlias <$> Trav.traverse (tipe env) types where typesLen = length types tvarsLen = length tvars toAlias canonicalTypes = T.Aliased name (zip tvars canonicalTypes) (T.Holey dealiasedTipe)
pairyo/elm-compiler
src/Canonicalize/Type.hs
Haskell
bsd-3-clause
2,329
module HAD.Y2014.M04.D11.Exercise where {- | thirdOfFive return the third of five arguments No other interest than pointFree prop> \(x1, x2, x3, x4, x5) -> thirdOfFive x1 x2 x3 x4 x5 == (x3 :: Int) -} thirdOfFive :: a -> b -> c -> d -> e -> c thirdOfFive = undefined
weima/1HAD
exercises/HAD/Y2014/M04/D11/Exercise.hs
Haskell
mit
282
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DataKinds, PolyKinds #-} module T13398b where import GHC.TypeLits class C a where type T a (b :: Bool) :: a instance C Nat where type T Nat 'True = 1 type T Nat 'False = 0
sdiehl/ghc
testsuite/tests/indexed-types/should_compile/T13398b.hs
Haskell
bsd-3-clause
225
{-# LANGUAGE CPP #-} {-# LANGUAGE ExistentialQuantification #-} {-# LANGUAGE PatternGuards #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE RankNTypes #-} -- | Some helpers for parsing data out of a raw WAI 'Request'. module Network.Wai.Parse ( parseHttpAccept , parseRequestBody , RequestBodyType (..) , getRequestBodyType , sinkRequestBody , BackEnd , lbsBackEnd , tempFileBackEnd , tempFileBackEndOpts , Param , File , FileInfo (..) , parseContentType #if TEST , Bound (..) , findBound , sinkTillBound , killCR , killCRLF , takeLine #endif ) where import qualified Data.ByteString.Search as Search import qualified Data.ByteString as S import qualified Data.ByteString.Lazy as L import qualified Data.ByteString.Char8 as S8 import Data.Word (Word8) import Data.Maybe (fromMaybe) import Data.List (sortBy) import Data.Function (on, fix) import System.Directory (removeFile, getTemporaryDirectory) import System.IO (hClose, openBinaryTempFile) import Network.Wai import qualified Network.HTTP.Types as H import Control.Monad (when, unless) import Control.Monad.Trans.Resource (allocate, release, register, InternalState, runInternalState) import Data.IORef import Network.HTTP.Types (hContentType) breakDiscard :: Word8 -> S.ByteString -> (S.ByteString, S.ByteString) breakDiscard w s = let (x, y) = S.break (== w) s in (x, S.drop 1 y) -- | Parse the HTTP accept string to determine supported content types. parseHttpAccept :: S.ByteString -> [S.ByteString] parseHttpAccept = map fst . sortBy (rcompare `on` snd) . map (addSpecificity . grabQ) . S.split 44 -- comma where rcompare :: (Double,Int) -> (Double,Int) -> Ordering rcompare = flip compare addSpecificity (s, q) = -- Prefer higher-specificity types let semicolons = S.count 0x3B s stars = S.count 0x2A s in (s, (q, semicolons - stars)) grabQ s = -- Stripping all spaces may be too harsh. -- Maybe just strip either side of semicolon? let (s', q) = S.breakSubstring ";q=" (S.filter (/=0x20) s) -- 0x20 is space q' = S.takeWhile (/=0x3B) (S.drop 3 q) -- 0x3B is semicolon in (s', readQ q') readQ s = case reads $ S8.unpack s of (x, _):_ -> x _ -> 1.0 -- | Store uploaded files in memory lbsBackEnd :: Monad m => ignored1 -> ignored2 -> m S.ByteString -> m L.ByteString lbsBackEnd _ _ popper = loop id where loop front = do bs <- popper if S.null bs then return $ L.fromChunks $ front [] else loop $ front . (bs:) -- | Save uploaded files on disk as temporary files -- -- Note: starting with version 2.0, removal of temp files is registered with -- the provided @InternalState@. It is the responsibility of the caller to -- ensure that this @InternalState@ gets cleaned up. tempFileBackEnd :: InternalState -> ignored1 -> ignored2 -> IO S.ByteString -> IO FilePath tempFileBackEnd = tempFileBackEndOpts getTemporaryDirectory "webenc.buf" -- | Same as 'tempFileSink', but use configurable temp folders and patterns. tempFileBackEndOpts :: IO FilePath -- ^ get temporary directory -> String -- ^ filename pattern -> InternalState -> ignored1 -> ignored2 -> IO S.ByteString -> IO FilePath tempFileBackEndOpts getTmpDir pattern internalState _ _ popper = do (key, (fp, h)) <- flip runInternalState internalState $ allocate (do tempDir <- getTmpDir openBinaryTempFile tempDir pattern) (\(_, h) -> hClose h) _ <- runInternalState (register $ removeFile fp) internalState fix $ \loop -> do bs <- popper unless (S.null bs) $ do S.hPut h bs loop release key return fp -- | Information on an uploaded file. data FileInfo c = FileInfo { fileName :: S.ByteString , fileContentType :: S.ByteString , fileContent :: c } deriving (Eq, Show) -- | Post parameter name and value. type Param = (S.ByteString, S.ByteString) -- | Post parameter name and associated file information. type File y = (S.ByteString, FileInfo y) -- | A file uploading backend. Takes the parameter name, file name, and a -- stream of data. type BackEnd a = S.ByteString -- ^ parameter name -> FileInfo () -> IO S.ByteString -> IO a data RequestBodyType = UrlEncoded | Multipart S.ByteString getRequestBodyType :: Request -> Maybe RequestBodyType getRequestBodyType req = do ctype' <- lookup hContentType $ requestHeaders req let (ctype, attrs) = parseContentType ctype' case ctype of "application/x-www-form-urlencoded" -> return UrlEncoded "multipart/form-data" | Just bound <- lookup "boundary" attrs -> return $ Multipart bound _ -> Nothing -- | Parse a content type value, turning a single @ByteString@ into the actual -- content type and a list of pairs of attributes. -- -- Since 1.3.2 parseContentType :: S.ByteString -> (S.ByteString, [(S.ByteString, S.ByteString)]) parseContentType a = do let (ctype, b) = S.break (== semicolon) a attrs = goAttrs id $ S.drop 1 b in (ctype, attrs) where semicolon = 59 equals = 61 space = 32 goAttrs front bs | S.null bs = front [] | otherwise = let (x, rest) = S.break (== semicolon) bs in goAttrs (front . (goAttr x:)) $ S.drop 1 rest goAttr bs = let (k, v') = S.break (== equals) bs v = S.drop 1 v' in (strip k, strip v) strip = S.dropWhile (== space) . fst . S.breakEnd (/= space) parseRequestBody :: BackEnd y -> Request -> IO ([Param], [File y]) parseRequestBody s r = case getRequestBodyType r of Nothing -> return ([], []) Just rbt -> sinkRequestBody s rbt (requestBody r) sinkRequestBody :: BackEnd y -> RequestBodyType -> IO S.ByteString -> IO ([Param], [File y]) sinkRequestBody s r body = do ref <- newIORef (id, id) let add x = atomicModifyIORef ref $ \(y, z) -> case x of Left y' -> ((y . (y':), z), ()) Right z' -> ((y, z . (z':)), ()) conduitRequestBody s r body add (x, y) <- readIORef ref return (x [], y []) conduitRequestBody :: BackEnd y -> RequestBodyType -> IO S.ByteString -> (Either Param (File y) -> IO ()) -> IO () conduitRequestBody _ UrlEncoded rbody add = do -- NOTE: in general, url-encoded data will be in a single chunk. -- Therefore, I'm optimizing for the usual case by sticking with -- strict byte strings here. let loop front = do bs <- rbody if S.null bs then return $ S.concat $ front [] else loop $ front . (bs:) bs <- loop id mapM_ (add . Left) $ H.parseSimpleQuery bs conduitRequestBody backend (Multipart bound) rbody add = parsePieces backend (S8.pack "--" `S.append` bound) rbody add takeLine :: Source -> IO (Maybe S.ByteString) takeLine src = go id where go front = do bs <- readSource src if S.null bs then close front else push front bs close front = leftover src (front S.empty) >> return Nothing push front bs = do let (x, y) = S.break (== 10) $ front bs -- LF in if S.null y then go $ S.append x else do when (S.length y > 1) $ leftover src $ S.drop 1 y return $ Just $ killCR x takeLines :: Source -> IO [S.ByteString] takeLines src = do res <- takeLine src case res of Nothing -> return [] Just l | S.null l -> return [] | otherwise -> do ls <- takeLines src return $ l : ls data Source = Source (IO S.ByteString) (IORef S.ByteString) mkSource :: IO S.ByteString -> IO Source mkSource f = do ref <- newIORef S.empty return $ Source f ref readSource :: Source -> IO S.ByteString readSource (Source f ref) = do bs <- atomicModifyIORef ref $ \bs -> (S.empty, bs) if S.null bs then f else return bs leftover :: Source -> S.ByteString -> IO () leftover (Source _ ref) bs = writeIORef ref bs parsePieces :: BackEnd y -> S.ByteString -> IO S.ByteString -> (Either Param (File y) -> IO ()) -> IO () parsePieces sink bound rbody add = mkSource rbody >>= loop where loop src = do _boundLine <- takeLine src res' <- takeLines src unless (null res') $ do let ls' = map parsePair res' let x = do cd <- lookup contDisp ls' let ct = lookup contType ls' let attrs = parseAttrs cd name <- lookup "name" attrs return (ct, name, lookup "filename" attrs) case x of Just (mct, name, Just filename) -> do let ct = fromMaybe "application/octet-stream" mct fi0 = FileInfo filename ct () (wasFound, y) <- sinkTillBound' bound name fi0 sink src add $ Right (name, fi0 { fileContent = y }) when wasFound (loop src) Just (_ct, name, Nothing) -> do let seed = id let iter front bs = return $ front . (:) bs (wasFound, front) <- sinkTillBound bound iter seed src let bs = S.concat $ front [] let x' = (name, bs) add $ Left x' when wasFound (loop src) _ -> do -- ignore this part let seed = () iter () _ = return () (wasFound, ()) <- sinkTillBound bound iter seed src when wasFound (loop src) where contDisp = S8.pack "Content-Disposition" contType = S8.pack "Content-Type" parsePair s = let (x, y) = breakDiscard 58 s -- colon in (x, S.dropWhile (== 32) y) -- space data Bound = FoundBound S.ByteString S.ByteString | NoBound | PartialBound deriving (Eq, Show) findBound :: S.ByteString -> S.ByteString -> Bound findBound b bs = handleBreak $ Search.breakOn b bs where handleBreak (h, t) | S.null t = go [lowBound..S.length bs - 1] | otherwise = FoundBound h $ S.drop (S.length b) t lowBound = max 0 $ S.length bs - S.length b go [] = NoBound go (i:is) | mismatch [0..S.length b - 1] [i..S.length bs - 1] = go is | otherwise = let endI = i + S.length b in if endI > S.length bs then PartialBound else FoundBound (S.take i bs) (S.drop endI bs) mismatch [] _ = False mismatch _ [] = False mismatch (x:xs) (y:ys) | S.index b x == S.index bs y = mismatch xs ys | otherwise = True sinkTillBound' :: S.ByteString -> S.ByteString -> FileInfo () -> BackEnd y -> Source -> IO (Bool, y) sinkTillBound' bound name fi sink src = do (next, final) <- wrapTillBound bound src y <- sink name fi next b <- final return (b, y) data WTB = WTBWorking (S.ByteString -> S.ByteString) | WTBDone Bool wrapTillBound :: S.ByteString -- ^ bound -> Source -> IO (IO S.ByteString, IO Bool) -- ^ Bool indicates if the bound was found wrapTillBound bound src = do ref <- newIORef $ WTBWorking id return (go ref, final ref) where final ref = do x <- readIORef ref case x of WTBWorking _ -> error "wrapTillBound did not finish" WTBDone y -> return y go ref = do state <- readIORef ref case state of WTBDone _ -> return S.empty WTBWorking front -> do bs <- readSource src if S.null bs then do writeIORef ref $ WTBDone False return $ front bs else push $ front bs where push bs = case findBound bound bs of FoundBound before after -> do let before' = killCRLF before leftover src after writeIORef ref $ WTBDone True return before' NoBound -> do -- don't emit newlines, in case it's part of a bound let (toEmit, front') = if not (S8.null bs) && S8.last bs `elem` ['\r','\n'] then let (x, y) = S.splitAt (S.length bs - 2) bs in (x, S.append y) else (bs, id) writeIORef ref $ WTBWorking front' if S.null toEmit then go ref else return toEmit PartialBound -> do writeIORef ref $ WTBWorking $ S.append bs go ref sinkTillBound :: S.ByteString -> (x -> S.ByteString -> IO x) -> x -> Source -> IO (Bool, x) sinkTillBound bound iter seed0 src = do (next, final) <- wrapTillBound bound src let loop seed = do bs <- next if S.null bs then return seed else iter seed bs >>= loop seed <- loop seed0 b <- final return (b, seed) parseAttrs :: S.ByteString -> [(S.ByteString, S.ByteString)] parseAttrs = map go . S.split 59 -- semicolon where tw = S.dropWhile (== 32) -- space dq s = if S.length s > 2 && S.head s == 34 && S.last s == 34 -- quote then S.tail $ S.init s else s go s = let (x, y) = breakDiscard 61 s -- equals sign in (tw x, dq $ tw y) killCRLF :: S.ByteString -> S.ByteString killCRLF bs | S.null bs || S.last bs /= 10 = bs -- line feed | otherwise = killCR $ S.init bs killCR :: S.ByteString -> S.ByteString killCR bs | S.null bs || S.last bs /= 13 = bs -- carriage return | otherwise = S.init bs
AndrewRademacher/wai
wai-extra/Network/Wai/Parse.hs
Haskell
mit
14,638
module Main (main) where import B main = print b
urbanslug/ghc
testsuite/tests/driver/recomp007/b/Main.hs
Haskell
bsd-3-clause
51
module Estruturas.Complexidade where data Complexidade = Simples | Multigrafo deriving (Show, Eq) instance Ord Complexidade where Simples `compare` Simples = EQ Simples `compare` Multigrafo = LT Multigrafo `compare` Simples = GT Multigrafo `compare` Multigrafo = EQ
jean-lopes/grafos
src/Estruturas/Complexidade.hs
Haskell
mit
336
{-# htermination exponent :: Float -> Int #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_exponent_1.hs
Haskell
mit
46
import qualified Data.Array.Unboxed as A import qualified Data.HashMap.Strict as M factorial :: Int -> Int factorial 0 = 1 factorial n = go n 1 where go 1 acc = acc go n' acc = go (n' - 1) (n' * acc) solve :: Int -> Int solve n = sum [x | x <- [3..n], (sum $ digitFactorials x) == x] where digitFactorials :: Int -> [Int] digitFactorials = map ((factorials A.!) . (digitMap M.!)) . show factorials :: A.UArray Int Int factorials = A.listArray (0,9) $ map factorial [0..9] digitMap :: M.HashMap Char Int digitMap = M.fromList [(c, read [c]) | c <- ['0'..'9']] main :: IO () main = print $ solve 2540160
jwtouron/haskell-play
ProjectEuler/Problem34.hs
Haskell
mit
641
{-# LANGUAGE TypeFamilies #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE ConstraintKinds #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE UndecidableInstances #-} {-# LANGUAGE UndecidableSuperClasses #-} {-# LANGUAGE FlexibleInstances #-} {-| Type family and class definitions for dealing with tuples. See the "Control.IndexT" module description for an overview. -} module Control.IndexT.Tuple ( TupleN, TupleConstraint, HomoTupleConstraint, IsTuple, IsHomoTuple ) where import Control.IndexT (IndexT) import GHC.TypeLits (Nat) import GHC.Exts (Constraint) import Data.Functor.Identity (Identity) {-| 'TupleN' seems a bit weird, but it's an important part of defining constraints that allow one to say "@t@ is a pair" in 'TupleConstraint'. -} type family TupleN (n :: Nat) a type instance TupleN 0 a = () type instance TupleN 1 a = Identity a type instance TupleN 2 a = (IndexT 0 a, IndexT 1 a) type instance TupleN 3 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a) type instance TupleN 4 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a) type instance TupleN 5 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a) type instance TupleN 6 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a) type instance TupleN 7 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a) type instance TupleN 8 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a) type instance TupleN 9 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a) type instance TupleN 10 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a, IndexT 9 a) type instance TupleN 11 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a, IndexT 9 a, IndexT 10 a) type instance TupleN 12 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a, IndexT 9 a, IndexT 10 a, IndexT 11 a) type instance TupleN 13 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a, IndexT 9 a, IndexT 10 a, IndexT 11 a, IndexT 12 a) type instance TupleN 14 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a, IndexT 9 a, IndexT 10 a, IndexT 11 a, IndexT 12 a, IndexT 13 a) type instance TupleN 15 a = (IndexT 0 a, IndexT 1 a, IndexT 2 a, IndexT 3 a, IndexT 4 a, IndexT 5 a, IndexT 6 a, IndexT 7 a, IndexT 8 a, IndexT 9 a, IndexT 10 a, IndexT 11 a, IndexT 12 a, IndexT 13 a, IndexT 14 a) {-| To best explain this, lets consider the particular example @TupleConstraint 2@. As @TupleN 2 t = (IndexT 0 t, IndexT 1 t)@ we get: > TupleConstraint 2 t = t ~ (IndexT 0 t, IndexT 1 t) What does this say? Well, firstly, as @t ~ (IndexT 0 t, IndexT 1 t)@, it must be a pair at least. What are the elements of the pair? Well, the first element of @t@ is @IndexT 0 t@. And what's @IndexT 0 t@ defined as? The first element of @t@. So we know that the first element of @t@ is well, the first element of @t@. Which tells us nothing at all. We can go through the same argument with the second element of @t@. So all we know after this is that @t@ is a pair. @TupleConstraint 2 t@ is the same as saying @t@ is a pair. So @TupleConstraint n t@ basically says @t@ is a n-tuple. -} type TupleConstraint (n :: Nat) a = a ~ TupleN n a {-| 'HomoTupleConstraint' simply further constrains 'TupleConstraint' so that all the elements are the same. So @HomoTupleConstraint 3 t@ basically says @t ~ (u,u,u)@ for some @u@, (\"Homo\" is short for \"Homogeneous\". As in, all the same. Or like milk.) -} type family HomoTupleConstraint (n :: Nat) a :: Constraint type instance HomoTupleConstraint 0 a = (TupleConstraint 0 a) type instance HomoTupleConstraint 1 a = (TupleConstraint 1 a) type instance HomoTupleConstraint 2 a = (TupleConstraint 2 a, IndexT 0 a ~ IndexT 1 a) type instance HomoTupleConstraint 3 a = (TupleConstraint 3 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a) type instance HomoTupleConstraint 4 a = (TupleConstraint 4 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a) type instance HomoTupleConstraint 5 a = (TupleConstraint 5 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a) type instance HomoTupleConstraint 6 a = (TupleConstraint 6 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a) type instance HomoTupleConstraint 7 a = (TupleConstraint 7 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a) type instance HomoTupleConstraint 8 a = (TupleConstraint 8 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a) type instance HomoTupleConstraint 9 a = (TupleConstraint 9 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a) type instance HomoTupleConstraint 10 a = (TupleConstraint 10 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a, IndexT 8 a ~ IndexT 9 a) type instance HomoTupleConstraint 11 a = (TupleConstraint 11 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a, IndexT 8 a ~ IndexT 9 a, IndexT 9 a ~ IndexT 10 a) type instance HomoTupleConstraint 12 a = (TupleConstraint 12 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a, IndexT 8 a ~ IndexT 9 a, IndexT 9 a ~ IndexT 10 a, IndexT 10 a ~ IndexT 11 a) type instance HomoTupleConstraint 13 a = (TupleConstraint 13 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a, IndexT 8 a ~ IndexT 9 a, IndexT 9 a ~ IndexT 10 a, IndexT 10 a ~ IndexT 11 a, IndexT 11 a ~ IndexT 12 a) type instance HomoTupleConstraint 14 a = (TupleConstraint 14 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a, IndexT 8 a ~ IndexT 9 a, IndexT 9 a ~ IndexT 10 a, IndexT 10 a ~ IndexT 11 a, IndexT 11 a ~ IndexT 12 a, IndexT 12 a ~ IndexT 13 a) type instance HomoTupleConstraint 15 a = (TupleConstraint 15 a, IndexT 0 a ~ IndexT 1 a, IndexT 1 a ~ IndexT 2 a, IndexT 2 a ~ IndexT 3 a, IndexT 3 a ~ IndexT 4 a, IndexT 4 a ~ IndexT 5 a, IndexT 5 a ~ IndexT 6 a, IndexT 6 a ~ IndexT 7 a, IndexT 7 a ~ IndexT 8 a, IndexT 8 a ~ IndexT 9 a, IndexT 9 a ~ IndexT 10 a, IndexT 10 a ~ IndexT 11 a, IndexT 11 a ~ IndexT 12 a, IndexT 12 a ~ IndexT 13 a, IndexT 13 a ~ IndexT 14 a) {-| GHC does not allow you to partially apply type families (or any type declaration for that matter). So if you have a type of @* -> Constraint@ you can't pass @TupleConstraint 2@, because 'TupleConstraint' is partially applied and this is not allowed. But you can partially apply classes. So 'IsTuple' is basically the same as 'TupleConstraint' except that it's a class, not a type family. -} class (TupleConstraint n a) => IsTuple n a instance (TupleConstraint n a) => IsTuple n a {-| The version of 'IsTuple' for homogenous tuples (i.e. all the same type). -} class (HomoTupleConstraint n a) => IsHomoTuple n a instance (HomoTupleConstraint n a) => IsHomoTuple n a
clintonmead/indextype
src/Control/IndexT/Tuple.hs
Haskell
mit
8,225
module MaybeUtils where import Data.Maybe import Control.Monad --http://hackage.haskell.org/package/syb-0.5.1/docs/src/Data-Generics-Aliases.html#orElse -- | Left-biased choice on maybes orElse :: Maybe a -> Maybe a -> Maybe a x `orElse` y = case x of Just _ -> x Nothing -> y (.|) :: (a -> Maybe a) -> (a -> Maybe a) -> a -> Maybe a f .| g = \x -> case f x of Just y -> Just y Nothing -> case g x of Just z -> Just z Nothing -> Nothing (.&) :: (a -> Maybe a) -> (a -> Maybe a) -> a -> Maybe a (.&) = (>=>) {- tryDo :: (a -> Maybe a) -> a -> Maybe a tryDo f x = case f x of Just y -> Just y Nothing -> Just x -}
holdenlee/fluidity
MaybeUtils.hs
Haskell
mit
793
{-# LANGUAGE ConstraintKinds #-} module Test where import ClassyPrelude import Control.Monad.Classes import Control.Monad.Writer (runWriterT) import Text.Printf import N import GUI.Fake import Helpers import Notes import Command import ClassesLens type Test' m = (M' m, Fake' m, MonadWriter [Text] m) type Test = Test' m => m () runTest :: Test → IO [Text] runTest t = snd $/ runWriterT $ runFake $ runDb $ runAux emptyAuxState t expect :: Bool → String → Test expect b t = unless b $ tell [asText $ pack t] t'edit :: Test t'edit = do rt ← use root note ← newNote [] "blah" addSub note rt addInputLines ["other text"] execute "ed blah" t ← use $ atNid note.title expect (t == "other text") (printf "t'edit: expected \"other text\" after editing, got %s" (show t)) {- code I used to get back notes file :: Text file = unsafePerformIO $ readFile "/home/yom/db.txt" doIt t = do enableMessages addInputLines [snd (headEx tt)] go (tailDef tt) (headEx tt) execute "+/" where go [] _ = addInputLines [""] go ((i1, s1):next) (i0, _) = do if | i1 == i0 → addInputLines [s1] | i1 == i0 + 1 → addInputLines [">", s1] | i1 < i0 → addInputLines (replicate (i0-i1) "" ++ [s1]) go next (i1, s1) tt = map parseLine $ lines t parseLine :: Text → (Int, Text) parseLine s = let (sp, s2) = span isSpace s (n, s3) = span isDigit s2 in (length sp `div` 3, drop 2 s3) -}
aelve/Jane
Test.hs
Haskell
mit
1,526
{-# OPTIONS_GHC -fno-warn-missing-signatures #-} import qualified Commands.Servers.Simple main = Commands.Servers.Simple.main
sboosali/commands
commands-server-simple/executables/Main.hs
Haskell
mit
126
-- vim: set ts=2 sw=2 sts=0 ff=unix foldmethod=indent: {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PostfixOperators #-} module MixKenallGeocode.KenAllCsvSpec where import SpecHelper import qualified Text.Parsec as P import MixKenallGeocode.KenAllCsv spec :: Spec spec = do describe "parseAreaOption" $ do context "オプションがない場合" $ do it "処理されないこと" $ let area = AreaNameNoOpt "XX" row = KenAllCsvRow "0337777" "北海道" "札幌市" area Nothing Nothing in parseAreaOption row `shouldBe` row {- context "閉じているオプションがある場合" $ do it "処理されること" $ let area = AreaNameOpt "XX" "AA" parea = AreaNameParsedOpt "XX" [AreaOptAddr (AreaRangeNumOnly (AreaNumNm "AA" NoNum "" []))] row = KenAllCsvRow "0337777" "北海道" "札幌市" area expect = KenAllCsvRow "0337777" "北海道" "札幌市" parea Nothing Nothing in parseAreaOption row `shouldBe` expect describe "areaPrefix" $ do context "空文字列の場合" $ do it "パースできること" $ do (P.parse areaPrefix "" "") `shouldParse` "" context "数字を含まない文字列の場合" $ do it "パースできること" $ do (P.parse areaPrefix "" "テスト") `shouldParse` "テスト" context "数字を含む文字列の場合" $ do it "数字より前の文字列がPrefixとしてパースされること" $ do (P.parse areaPrefix "" "テスト37") `shouldParse` "テスト" context "ノートの区切り文字を含む文字列の場合" $ do it "区切り文字より前の文字列がPrefixとしてパースされること" $ do (P.parse areaPrefix "" "テスト「AA」") `shouldParse` "テスト" describe "areaUnit" $ do it "住所単位をパースできること" $ (P.parse areaUnit "" "丁目") `shouldParse` "丁目" context "住所単位以外の文字の場合" $ do it "空文字が返ってくること" $ do (P.parse areaUnit "" "AA") `shouldParse` "" describe "areaNum" $ do context "数字の場合" $ do it "住所の数値をパースできること" $ (P.parse areaNum "" "120") `shouldParse` AreaNum 120 describe "sepStrs" $ do it "パースできること" $ (P.parse sepStrs "" "「") `shouldParse` "「" it "パースできること" $ (P.parse sepStrs "" "」") `shouldParse` "」" describe "areaNote" $ do it "パースできること" $ (P.parse areaNote "" "「AA」") `shouldParse` AreaRawNote "AA" -}
eji/mix-kenall-geocode
test/MixKenallGeocode/KenAllCsvSpec.hs
Haskell
mit
2,684
{-# LANGUAGE PackageImports #-} {-# OPTIONS_GHC -fno-warn-dodgy-exports -fno-warn-unused-imports #-} -- | Reexports "Control.Monad.Fail.Compat" -- from a globally unique namespace. module Control.Monad.Fail.Compat.Repl.Batteries ( module Control.Monad.Fail.Compat ) where import "this" Control.Monad.Fail.Compat
haskell-compat/base-compat
base-compat-batteries/src/Control/Monad/Fail/Compat/Repl/Batteries.hs
Haskell
mit
314
-- Tree.hs module Tree where import Data.Monoid data TravelGuide = TravelGuide { title :: String, authors :: [String], price :: Double } deriving (Show, Eq, Ord) newtype TravelGuidePrice = TravelGuidePrice TravelGuide deriving Eq instance Ord TravelGuidePrice where (TravelGuidePrice (TravelGuide t1 a1 p1)) <= (TravelGuidePrice(TravelGuide t2 a2 p2)) = p1 < p2 || (p1 == p2 && (t1 < t2 || (t1 == t2 && a1 <= a2))) data BinaryTree a = Node a (BinaryTree a) (BinaryTree a) | Leaf deriving Show -- Binary Tres with Monoidal Cache data BinaryTree3 v c = Node3 v c (BinaryTree3 v c) (BinaryTree3 v c) | Leaf3 deriving (Show, Eq, Ord) treeInsert4 :: (Ord v, Monoid c) => v -> c -> BinaryTree3 v c -> BinaryTree3 v c treeInsert4 v c (Node3 v2 c2 l r) = case compare v v2 of EQ -> Node3 v2 c2 l r LT -> let newLeft = treeInsert4 v c l newCached = c2 <> cached newLeft <> cached r in Node3 v2 newCached newLeft r GT -> let newRight = treeInsert4 v c r newCached = c2 <> cached l <> cached newRight in Node3 v2 newCached l newRight treeInsert4 v c Leaf3 = Node3 v c Leaf3 Leaf3 cached :: Monoid c => BinaryTree3 v c -> c cached (Node3 _ c _ _) = c cached Leaf3 = mempty newtype Min = Min Double deriving Show instance Monoid Min where mempty = Min infinity where infinity = 1/0 mappend (Min x) (Min y) = Min $ min x y
hnfmr/beginning_haskell
Tree.hs
Haskell
mit
1,568
{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveFoldable #-} {-# LANGUAGE DeriveDataTypeable #-} -------------------------------------------------------------------- -- | -- Copyright : © Oleg Grenrus 2014 -- License : MIT -- Maintainer: Oleg Grenrus <oleg.grenrus@iki.fi> -- Stability : experimental -- Portability: non-portable -- -------------------------------------------------------------------- module Data.Algebra.Boolean.FreeBoolean ( FreeBoolean(..), module Data.Algebra.Boolean.CoBoolean ) where import Data.Typeable (Typeable) import Data.Foldable (Foldable) import Control.DeepSeq (NFData(rnf)) import Data.Algebra.Boolean.CoBoolean import Data.Algebra.Boolean.Negable hiding (not) import qualified Data.Algebra.Boolean.Negable as Negable import Prelude hiding ((||),(&&),not,any,all) import Data.Algebra.Boolean -- | Free 'Boolean' type, does not perform any optimizations on the structure. Useful only in tests. -- -- Consider using 'Data.Algebra.Boolean.NNF'. data FreeBoolean a = FBValue a | FBTrue | FBFalse | FBNot (FreeBoolean a) | FBAnd (FreeBoolean a) (FreeBoolean a) | FBOr (FreeBoolean a) (FreeBoolean a) deriving (Eq, Ord, Show, Read, Functor, Foldable, Typeable) instance CoBoolean1 FreeBoolean where toBooleanWith f (FBValue x) = f x toBooleanWith _ FBTrue = true toBooleanWith _ FBFalse = false toBooleanWith f (FBNot x) = not $ toBooleanWith f x toBooleanWith f (FBOr a b) = toBooleanWith f a || toBooleanWith f b toBooleanWith f (FBAnd a b) = toBooleanWith f a && toBooleanWith f b instance CoBoolean a => CoBoolean (FreeBoolean a) where toBoolean = toBooleanWith toBoolean instance Negable (FreeBoolean a) where not = FBNot instance Boolean (FreeBoolean a) where true = FBTrue false = FBFalse (||) = FBOr (&&) = FBAnd not = FBNot instance NFData a => NFData (FreeBoolean a) where rnf (FBValue a) = rnf a rnf (FBNot a) = rnf a rnf (FBAnd a b) = rnf a `seq` rnf b rnf (FBOr a b) = rnf a `seq` rnf b rnf FBTrue = () rnf FBFalse = ()
phadej/boolean-normal-forms
src/Data/Algebra/Boolean/FreeBoolean.hs
Haskell
mit
2,158
{-# htermination plusFM_C :: (Ord a, Ord k) => (b -> b -> b) -> FiniteMap (a,k) b -> FiniteMap (a,k) b -> FiniteMap (a,k) b #-} import FiniteMap
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/FiniteMap_plusFM_C_12.hs
Haskell
mit
145
-- Problems/Problem014.hs module Problems.Problem014 (p14) where import Data.List import Helpers.Numbers main = print p14 p14 :: Int p14 = snd $ foldl1' max $ map (\x -> ((length $ collatz x),x)) [500001,500003..999999] collatzList :: Int -> [(Int,Int)] collatzList num | num == 1 = [(1,1)] | otherwise = (num,(snd $ head $ colList) + 1) : colList where colList = if even num then collatzList (num `div` 2) else collatzList (3 * num + 1) nextCollatz :: Int -> Int nextCollatz num | odd num = 3 * num + 1 | otherwise = num `div` 2
Sgoettschkes/learning
haskell/ProjectEuler/src/Problems/Problem014.hs
Haskell
mit
588
module Handler.HomeSpec (spec) where import TestImport spec :: Spec spec = withApp $ do it "loads the index and checks it looks right" $ do get HomeR statusIs 200 htmlAllContain "h1" "Welcome to Yesod" request $ do setMethod "POST" setUrl HomeR addNonce fileByLabel "Choose a file" "test/Spec.hs" "text/plain" -- talk about self-reference byLabel "What's on the file?" "Some Content" statusIs 200 -- more debugging printBody htmlCount ".message" 1 htmlAllContain ".message" "Some Content" htmlAllContain ".message" "text/plain"
ruHaskell/ruhaskell-yesod
test/Handler/HomeSpec.hs
Haskell
mit
668
-- Copyright 2012-2014 Samplecount S.L. -- -- Licensed under the Apache License, Version 2.0 (the "License"); -- you may not use this file except in compliance with the License. -- You may obtain a copy of the License at -- -- http://www.apache.org/licenses/LICENSE-2.0 -- -- Unless required by applicable law or agreed to in writing, software -- distributed under the License is distributed on an "AS IS" BASIS, -- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -- See the License for the specific language governing permissions and -- limitations under the License. {-| Description: Toolchain definitions and utilities for Linux This module provides toolchain definitions and utilities for targeting Linux. See "Development.Shake.Language.C.Rules" for examples of how to use a target toolchain. Linux is also a supported host operating system, see "Development.Shake.Language.C.Host" for examples of how to target the host. -} module Development.Shake.Language.C.Target.Linux ( target , toolChain ) where import Data.Label (get, set) import Development.Shake import Development.Shake.Language.C.BuildFlags import Development.Shake.Language.C.Target import Development.Shake.Language.C.ToolChain -- | Build target given an architecture. target :: Arch -> Target target = Target Linux (Platform "linux") platformArchiver :: Archiver platformArchiver tc buildFlags inputs output = do need inputs command_ [] (tool tc archiverCommand) $ ["cr"] ++ get archiverFlags buildFlags ++ [output] ++ inputs command_ [] (toolFromString tc "ranlib") [output] -- | Linux toolchain. toolChain :: ToolChainVariant -> ToolChain toolChain GCC = set variant GCC $ set compilerCommand "gcc" $ set archiverCommand "ar" $ set archiver platformArchiver $ set linkerCommand "g++" $ defaultToolChain toolChain LLVM = set variant LLVM $ set compilerCommand "clang" $ set archiverCommand "ar" $ set archiver platformArchiver $ set linkerCommand "clang++" $ defaultToolChain toolChain Generic = toolChain GCC
samplecount/shake-language-c
src/Development/Shake/Language/C/Target/Linux.hs
Haskell
apache-2.0
2,090
module Listener (main) where import Ros.Node import qualified Ros.Std_msgs.String as S showMsg :: S.String -> IO () showMsg = putStrLn . ("I heard " ++) . S._data main = runNode "listener" $ runHandler showMsg =<< subscribe "chatter"
rgleichman/roshask
Examples/PubSub/src/Listener.hs
Haskell
bsd-3-clause
236
{-# LANGUAGE DeriveGeneric, FlexibleInstances, OverlappingInstances, UndecidableInstances #-} -- -- 0k/pong message definitions. Incoming/outgoing datatype and the corresponding -- JSON and on-the-wire protobuf representations. -- -- Caveat: datatype definitions themself entail protobuf encoding. No .proto -- files. -- module Types (Ingress(..), Outgress(..), (><)) where import GHC.Generics hiding (D1) import Data.TypeLevel (D1, D2) import Data.Aeson (FromJSON(..), ToJSON(..)) import Data.ProtocolBuffers import Data.ProtocolBuffers.Internal data Ingress = Ingress { email :: Required D1 (Value String) , kadaID :: Optional D2 (Value String) } deriving (Show, Generic) instance Encode Ingress instance Decode Ingress instance FromJSON Ingress data Outgress = Outgress { success :: Required D1 (Value Bool) , message :: Required D2 (Value String) } deriving (Show, Generic) instance Encode Outgress instance Decode Outgress instance ToJSON Outgress instance (HasField a, FromJSON (FieldType a)) => FromJSON a where parseJSON = fmap putField . parseJSON instance (HasField a, ToJSON (FieldType a)) => ToJSON a where toJSON = toJSON . getField (><) :: HasField a => (b -> a) -> b -> FieldType a (><) = (getField .) . ($)
element-doo/ekade
code/haskell/src/Types.hs
Haskell
bsd-3-clause
1,294
-- BNF Converter: Error Monad -- Copyright (C) 2004 Author: Aarne Ranta -- This file comes with NO WARRANTY and may be used FOR ANY PURPOSE. module ErrM where -- the Error monad: like Maybe type with error msgs import Control.Monad (MonadPlus(..), liftM) import Control.Applicative (Applicative(..), Alternative(..)) data Err a = Ok a | Bad String deriving (Read, Show, Eq, Ord) instance Monad Err where return = Ok fail = Bad Ok a >>= f = f a Bad s >>= _ = Bad s instance Applicative Err where pure = Ok (Bad s) <*> _ = Bad s (Ok f) <*> o = liftM f o instance Functor Err where fmap = liftM instance MonadPlus Err where mzero = Bad "Err.mzero" mplus (Bad _) y = y mplus x _ = x instance Alternative Err where empty = mzero (<|>) = mplus
MichaelMcCulloch/MPlusPlus
src/ErrM.hs
Haskell
bsd-3-clause
835
{-# OPTIONS_HADDOCK show-extensions #-} {-# LANGUAGE KindSignatures #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} module Control.THEff.Reader ( -- * Overview -- | -- This version builds its output lazily; for a strict version with -- the same interface, see "Control.THEff.Reader.Strict". -- -- > {-# LANGUAGE KindSignatures #-} -- > {-# LANGUAGE FlexibleInstances #-} -- > {-# LANGUAGE MultiParamTypeClasses #-} -- > {-# LANGUAGE TemplateHaskell #-} -- > -- > import Control.THEff -- > import Control.THEff.Reader -- > -- > mkEff "CharReader" ''Reader ''Char ''NoEff -- > mkEff "StrReader" ''Reader ''String ''CharReader -- > -- > main:: IO () -- > main = putStrLn $ runCharReader 'T' $ runStrReader "est" $ do -- > c <- ask -- > s <- ask -- > return $ c:s -- -- __/Output :/__ \"Test\" -- * Types and functions used in mkEff Reader' , Reader(..) , ReaderArgT , ReaderResT , effReader , runEffReader -- * Functions that use this effect , ask , asks ) where import Control.THEff -- | Actually, the effect type -- - __/v/__ - Type - the parameter of the effect. -- - __/e/__ - mkEff generated type. newtype Reader' v e = Reader' (v -> e) -- | Type implements link in the chain of effects. -- Constructors must be named __/{EffectName}{Outer|WriterAction|WriterResult}/__ -- and have a specified types of fields. -- - __/m/__ - Or Monad (if use the 'Lift') or phantom type - stub (if used 'NoEff'). -- - __/o/__ - Type of outer effect. -- - __/a/__ - The result of mkEff generated runEEEE... function. data Reader (m:: * -> *) e o v a = ReaderOuter (o m e) | ReaderAction (Reader' v e) | ReaderResult a -- | Type of fourth argument of runEffReader and first argument of runEEEE. type ReaderArgT v = v -- | Result type of runEEEE. type ReaderResT r = r -- | This function is used in the 'mkEff' generated runEEEE functions and typically -- in effect action functions. Calling the effect action. effReader:: EffClass Reader' v e => Reader' v r -> Eff e r effReader (Reader' g) = effAction $ \k -> Reader' (k . g) -- | The main function of the effect implementing. -- This function is used in the 'mkEff' generated runEEEE functions. runEffReader :: forall (t :: * -> *) (u :: (* -> *) -> * -> *) (m :: * -> *) a v (m1 :: * -> *) e (o :: (* -> *) -> * -> *) w a1 r. Monad m => (u t r -> (r -> m (ReaderResT a)) -> m (ReaderResT a)) -- ^ The outer effect function -> (Reader m1 e o w a1 -> r) -- ^ The chain of effects link wrapper. -> (r -> Reader t r u v a) -- ^ The chain of effects link unwrapper. -> ReaderArgT v -- ^ The initial value of argument of effect. -> Eff r a1 -> m (ReaderResT a) runEffReader outer to un v m = loop $ runEff m (to . ReaderResult) where loop = select . un select (ReaderOuter f) = outer f loop select (ReaderAction (Reader' g)) = loop $ g v select (ReaderResult r) = return r -- | Get reader value ask :: EffClass Reader' v e => Eff e v ask = effReader $ Reader' id -- | Get and convert the value of the reader asks :: EffClass Reader' r e => (r -> v) -> Eff e v asks f = effReader $ Reader' f
KolodeznyDiver/THEff
src/Control/THEff/Reader.hs
Haskell
bsd-3-clause
3,599
module Main where import App main :: IO () main = run ":memory:"
CatzScience/sassy
server/src/Main.hs
Haskell
bsd-3-clause
67
{-# LANGUAGE RankNTypes #-} {-# LANGUAGE GADTs #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE CPP #-} {-# LANGUAGE ScopedTypeVariables #-} -- | Note - this API is designed to support a narrow (but common!) set -- of use cases. If you find that you need more customization than this -- offers, then you will need to consider building your own layout and -- event handling for input fields. -- -- For a fuller introduction to this API, see the "Input Forms" section -- of the Brick User Guide. Also see the demonstration programs for -- examples of forms in action. -- -- This module provides an input form API. This API allows you to -- construct an input interface based on a data type of your choice. -- Each input in the form corresponds to a field in your data type. This -- API then automatically dispatches keyboard and mouse input events to -- each form input field, manages rendering of the form, notifies the -- user when a form field's value is invalid, and stores valid inputs in -- your data type when possible. -- -- A form has both a visual representation and a corresponding data -- structure representing the latest valid values for that form -- (referred to as the "state" of the form). A 'FormField' is a single -- input component in the form and a 'FormFieldState' defines the -- linkage between that visual input and the corresponding portion -- of the state represented by that visual; there may be multiple -- 'FormField's combined for a single 'FormFieldState' (e.g. a radio -- button sequence). -- -- To use a 'Form', you must include it within your application state -- type. You can use 'formState' to access the underlying s whenever you -- need it. See @programs/FormDemo.hs@ for a complete working example. -- -- Also note that, by default, forms and their field inputs are -- concatenated together in a 'vBox'. This can be customized on a -- per-field basis and for the entire form by using the functions -- 'setFieldConcat' and 'setFormConcat', respectively. -- -- Bear in mind that for most uses, the 'FormField' and 'FormFieldState' -- types will not be used directly. Instead, the constructors for -- various field types (such as 'editTextField') will be used instead. module Brick.Forms ( -- * Data types Form , FormFieldState(..) , FormField(..) -- * Creating and using forms , newForm , formFocus , formState , handleFormEvent , renderForm , renderFormFieldState , (@@=) , allFieldsValid , invalidFields , setFieldValid , setFormConcat , setFieldConcat , setFormFocus , updateFormState -- * Simple form field constructors , editTextField , editShowableField , editShowableFieldWithValidate , editPasswordField , radioField , checkboxField , listField -- * Advanced form field constructors , editField , radioCustomField , checkboxCustomField -- * Attributes , formAttr , invalidFormInputAttr , focusedFormInputAttr ) where import Graphics.Vty hiding (showCursor) #if !(MIN_VERSION_base(4,11,0)) import Data.Monoid #endif import Data.Maybe (isJust, isNothing) import Data.List (elemIndex) import Data.Vector (Vector) import Brick import Brick.Focus import Brick.Widgets.Edit import Brick.Widgets.List import qualified Data.Text.Zipper as Z import qualified Data.Text as T import Text.Read (readMaybe) import Lens.Micro -- | A form field. This represents an interactive input field in the -- form. Its user input is validated and thus converted into a type of -- your choosing. -- -- Type variables are as follows: -- -- * @a@ - the type of the field in your form state that this field -- manipulates -- * @b@ - the form field's internal state type -- * @e@ - your application's event type -- * @n@ - your application's resource name type data FormField a b e n = FormField { formFieldName :: n -- ^ The name identifying this form field. , formFieldValidate :: b -> Maybe a -- ^ A validation function converting this field's state -- into a value of your choosing. @Nothing@ indicates a -- validation failure. For example, this might validate -- an 'Editor' state value by parsing its text contents as -- an integer and return 'Maybe' 'Int'. This is for pure -- value validation; if additional validation is required -- (e.g. via 'IO'), use this field's state value in an -- external validation routine and use 'setFieldValid' to -- feed the result back into the form. , formFieldExternallyValid :: Bool -- ^ Whether the field is valid according to an external -- validation source. Defaults to always being 'True' and -- can be set with 'setFieldValid'. The value of this -- field also affects the behavior of 'allFieldsValid' and -- 'getInvalidFields'. , formFieldRender :: Bool -> b -> Widget n -- ^ A function to render this form field. Parameters are -- whether the field is currently focused, followed by the -- field state. , formFieldHandleEvent :: BrickEvent n e -> b -> EventM n b -- ^ An event handler for this field. This receives the -- event and the field state and returns a new field -- state. } -- | A form field state accompanied by the fields that manipulate that -- state. The idea is that some record field in your form state has -- one or more form fields that manipulate that value. This data type -- maps that state field (using a lens into your state) to the form -- input fields responsible for managing that state field, along with -- a current value for that state field and an optional function to -- control how the form inputs are rendered. -- -- Most form fields will just have one input, such as text editors, but -- others, such as radio button collections, will have many, which is -- why this type supports more than one input corresponding to a state -- field. -- -- Type variables are as follows: -- -- * @s@ - the data type containing the value manipulated by these form -- fields. -- * @e@ - your application's event type -- * @n@ - your application's resource name type data FormFieldState s e n where FormFieldState :: { formFieldState :: b -- ^ The current state value associated with -- the field collection. Note that this type is -- existential. All form fields in the collection -- must validate to this type. , formFieldLens :: Lens' s a -- ^ A lens to extract and store a -- successfully-validated form input back into -- your form state. , formFieldUpdate :: a -> b -> b -- ^ Given a new form state value, update the form -- field state in place. , formFields :: [FormField a b e n] -- ^ The form fields, in order, that the user will -- interact with to manipulate this state value. , formFieldRenderHelper :: Widget n -> Widget n -- ^ A helper function to augment the rendered -- representation of this collection of form -- fields. It receives the default representation -- and can augment it, for example, by adding a -- label on the left. , formFieldConcat :: [Widget n] -> Widget n -- ^ Concatenation function for this field's input -- renderings. } -> FormFieldState s e n -- | A form: a sequence of input fields that manipulate the fields of an -- underlying state that you choose. This value must be stored in the -- Brick application's state. -- -- Type variables are as follows: -- -- * @s@ - the data type of your choosing containing the values -- manipulated by the fields in this form. -- * @e@ - your application's event type -- * @n@ - your application's resource name type data Form s e n = Form { formFieldStates :: [FormFieldState s e n] , formFocus :: FocusRing n -- ^ The focus ring for the form, indicating which form field -- has input focus. , formState :: s -- ^ The current state of the form. Forms guarantee that only -- valid inputs ever get stored in the state, and that after -- each input event on a form field, if that field contains a -- valid state value then the value is immediately saved to its -- corresponding field in this state value using the form -- field's lens over @s@. , formConcatAll :: [Widget n] -> Widget n -- ^ Concatenation function for this form's field renderings. } -- | Compose a new rendering augmentation function with the one in the -- form field collection. For example, we might put a label on the left -- side of a form field: -- -- > (str "Please check: " <+>) @@= checkboxField alive AliveField "Alive?" -- -- This can also be used to add multiple augmentations and associates -- right: -- -- > (withDefAttr someAttribute) @@= -- > (str "Please check: " <+>) @@= -- > checkboxField alive AliveField "Alive?" infixr 5 @@= (@@=) :: (Widget n -> Widget n) -> (s -> FormFieldState s e n) -> s -> FormFieldState s e n (@@=) h mkFs s = let v = mkFs s in v { formFieldRenderHelper = h . (formFieldRenderHelper v) } -- | Update the state contained in a form. -- -- This updates all form fields to be consistent with the new form -- state. Where possible, this attempts to maintain other input state, -- such as text editor cursor position. -- -- Note that since this updates the form fields, this means that any -- field values will be completely overwritten! This may or may not -- be what you want, since a user actively using the form could get -- confused if their edits go away. Use carefully. updateFormState :: s -> Form s e n -> Form s e n updateFormState newState f = let updateField fs = case fs of FormFieldState st l upd s rh concatAll -> FormFieldState (upd (newState^.l) st) l upd s rh concatAll in f { formState = newState , formFieldStates = updateField <$> formFieldStates f } -- | Set the focused field of a form. setFormFocus :: (Eq n) => n -> Form s e n -> Form s e n setFormFocus n f = f { formFocus = focusSetCurrent n $ formFocus f } -- | Set a form field's concatenation function. setFieldConcat :: ([Widget n] -> Widget n) -> FormFieldState s e n -> FormFieldState s e n setFieldConcat f s = s { formFieldConcat = f } -- | Set a form's concatenation function. setFormConcat :: ([Widget n] -> Widget n) -> Form s e n -> Form s e n setFormConcat func f = f { formConcatAll = func } -- | Create a new form with the specified input fields and an initial -- form state. The fields are initialized from the state using their -- state lenses and the first form input is focused initially. newForm :: [s -> FormFieldState s e n] -- ^ The form field constructors. This is intended to be -- populated using the various field constructors in this -- module. -> s -- ^ The initial form state used to populate the fields. -> Form s e n newForm mkEs s = let es = mkEs <*> pure s in Form { formFieldStates = es , formFocus = focusRing $ concat $ formFieldNames <$> es , formState = s , formConcatAll = vBox } formFieldNames :: FormFieldState s e n -> [n] formFieldNames (FormFieldState _ _ _ fields _ _) = formFieldName <$> fields -- | A form field for manipulating a boolean value. This represents -- 'True' as @[X] label@ and 'False' as @[ ] label@. -- -- This field responds to `Space` keypresses to toggle the checkbox and -- to mouse clicks. checkboxField :: (Ord n, Show n) => Lens' s Bool -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> T.Text -- ^ The label for the check box, to appear at its right. -> s -- ^ The initial form state. -> FormFieldState s e n checkboxField = checkboxCustomField '[' 'X' ']' -- | A form field for manipulating a boolean value. This represents -- 'True' as @[X] label@ and 'False' as @[ ] label@. This function -- permits the customization of the @[X]@ notation characters. -- -- This field responds to `Space` keypresses to toggle the checkbox and -- to mouse clicks. checkboxCustomField :: (Ord n, Show n) => Char -- ^ Left bracket character. -> Char -- ^ Checkmark character. -> Char -- ^ Right bracket character. -> Lens' s Bool -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> T.Text -- ^ The label for the check box, to appear at its right. -> s -- ^ The initial form state. -> FormFieldState s e n checkboxCustomField lb check rb stLens name label initialState = let initVal = initialState ^. stLens handleEvent (MouseDown n _ _ _) s | n == name = return $ not s handleEvent (VtyEvent (EvKey (KChar ' ') [])) s = return $ not s handleEvent _ s = return s in FormFieldState { formFieldState = initVal , formFields = [ FormField name Just True (renderCheckbox lb check rb label name) handleEvent ] , formFieldLens = stLens , formFieldUpdate = \val _ -> val , formFieldRenderHelper = id , formFieldConcat = vBox } renderCheckbox :: Char -> Char -> Char -> T.Text -> n -> Bool -> Bool -> Widget n renderCheckbox lb check rb label n foc val = let addAttr = if foc then withDefAttr focusedFormInputAttr else id csr = if foc then putCursor n (Location (1,0)) else id in clickable n $ addAttr $ csr $ (txt $ T.singleton lb <> (if val then T.singleton check else " ") <> T.singleton rb <> " ") <+> txt label -- | A form field for selecting a single choice from a set of possible -- choices in a scrollable list. This uses a 'List' internally. -- -- This field responds to the same input events that a 'List' does. listField :: forall s e n a . (Ord n, Show n, Eq a) => (s -> Vector a) -- ^ Possible choices. -> Lens' s (Maybe a) -- ^ The state lens for the initially/finally selected -- element. -> (Bool -> a -> Widget n) -- ^ List item rendering function. -> Int -- ^ List item height in rows. -> n -- ^ The resource name for the input field. -> s -- ^ The initial form state. -> FormFieldState s e n listField options stLens renderItem itemHeight name initialState = let optionsVector = options initialState initVal = initialState ^. customStLens customStLens :: Lens' s (List n a) customStLens = lens getList setList where getList s = let l = list name optionsVector itemHeight in case s ^. stLens of Nothing -> l Just e -> listMoveToElement e l setList s l = s & stLens .~ (snd <$> listSelectedElement l) handleEvent (VtyEvent e) s = handleListEvent e s handleEvent _ s = return s in FormFieldState { formFieldState = initVal , formFields = [ FormField name Just True (renderList renderItem) handleEvent ] , formFieldLens = customStLens , formFieldUpdate = \listState l -> case listSelectedElement listState of Nothing -> l Just (_, e) -> listMoveToElement e l , formFieldRenderHelper = id , formFieldConcat = vBox } -- | A form field for selecting a single choice from a set of possible -- choices. Each choice has an associated value and text label. -- -- This field responds to `Space` keypresses to select a radio button -- option and to mouse clicks. radioField :: (Ord n, Show n, Eq a) => Lens' s a -- ^ The state lens for this value. -> [(a, n, T.Text)] -- ^ The available choices, in order. Each choice has a value -- of type @a@, a resource name, and a text label. -> s -- ^ The initial form state. -> FormFieldState s e n radioField = radioCustomField '[' '*' ']' -- | A form field for selecting a single choice from a set of possible -- choices. Each choice has an associated value and text label. This -- function permits the customization of the @[*]@ notation characters. -- -- This field responds to `Space` keypresses to select a radio button -- option and to mouse clicks. radioCustomField :: (Ord n, Show n, Eq a) => Char -- ^ Left bracket character. -> Char -- ^ Checkmark character. -> Char -- ^ Right bracket character. -> Lens' s a -- ^ The state lens for this value. -> [(a, n, T.Text)] -- ^ The available choices, in order. Each choice has a value -- of type @a@, a resource name, and a text label. -> s -- ^ The initial form state. -> FormFieldState s e n radioCustomField lb check rb stLens options initialState = let initVal = initialState ^. stLens lookupOptionValue n = let results = filter (\(_, n', _) -> n' == n) options in case results of [(val, _, _)] -> Just val _ -> Nothing handleEvent _ (MouseDown n _ _ _) s = case lookupOptionValue n of Nothing -> return s Just v -> return v handleEvent new (VtyEvent (EvKey (KChar ' ') [])) _ = return new handleEvent _ _ s = return s optionFields = mkOptionField <$> options mkOptionField (val, name, label) = FormField name Just True (renderRadio lb check rb val name label) (handleEvent val) in FormFieldState { formFieldState = initVal , formFields = optionFields , formFieldLens = stLens , formFieldUpdate = \val _ -> val , formFieldRenderHelper = id , formFieldConcat = vBox } renderRadio :: (Eq a) => Char -> Char -> Char -> a -> n -> T.Text -> Bool -> a -> Widget n renderRadio lb check rb val name label foc cur = let addAttr = if foc then withDefAttr focusedFormInputAttr else id isSet = val == cur csr = if foc then putCursor name (Location (1,0)) else id in clickable name $ addAttr $ csr $ hBox [ txt $ T.singleton lb , txt $ if isSet then T.singleton check else " " , txt $ T.singleton rb <> " " <> label ] -- | A form field for using an editor to edit the text representation of -- a value. The other editing fields in this module are special cases of -- this function. -- -- This field responds to all events handled by 'editor', including -- mouse events. editField :: (Ord n, Show n) => Lens' s a -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> Maybe Int -- ^ The optional line limit for the editor (see 'editor'). -> (a -> T.Text) -- ^ The initialization function that turns your value into -- the editor's initial contents. The resulting text may -- contain newlines. -> ([T.Text] -> Maybe a) -- ^ The validation function that converts the editor's -- contents into a valid value of type @a@. -> ([T.Text] -> Widget n) -- ^ The rendering function for the editor's contents (see -- 'renderEditor'). -> (Widget n -> Widget n) -- ^ A rendering augmentation function to adjust the -- representation of the rendered editor. -> s -- ^ The initial form state. -> FormFieldState s e n editField stLens n limit ini val renderText wrapEditor initialState = let initVal = applyEdit gotoEnd $ editor n limit initialText gotoEnd = let ls = T.lines initialText pos = (length ls - 1, T.length (last ls)) in if null ls then id else Z.moveCursor pos initialText = ini $ initialState ^. stLens handleEvent (VtyEvent e) ed = handleEditorEvent e ed handleEvent _ ed = return ed in FormFieldState { formFieldState = initVal , formFields = [ FormField n (val . getEditContents) True (\b e -> wrapEditor $ renderEditor renderText b e) handleEvent ] , formFieldLens = stLens , formFieldUpdate = \newVal e -> let newTxt = ini newVal in if newTxt == (T.unlines $ getEditContents e) then e else applyEdit (Z.insertMany newTxt . Z.clearZipper) e , formFieldRenderHelper = id , formFieldConcat = vBox } -- | A form field using a single-line editor to edit the 'Show' -- representation of a state field value of type @a@. This automatically -- uses its 'Read' instance to validate the input. This field is mostly -- useful in cases where the user-facing representation of a value -- matches the 'Show' representation exactly, such as with 'Int'. -- -- This field responds to all events handled by 'editor', including -- mouse events. editShowableField :: (Ord n, Show n, Read a, Show a) => Lens' s a -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> s -- ^ The initial form state. -> FormFieldState s e n editShowableField stLens n = editShowableFieldWithValidate stLens n (const True) -- | A form field using a single-line editor to edit the 'Show' representation -- of a state field value of type @a@. This automatically uses its 'Read' -- instance to validate the input, and also accepts an additional user-defined -- pass for validation. This field is mostly useful in cases where the -- user-facing representation of a value matches the 'Show' representation -- exactly, such as with 'Int', but you don't want to accept just /any/ 'Int'. -- -- This field responds to all events handled by 'editor', including -- mouse events. editShowableFieldWithValidate :: (Ord n, Show n, Read a, Show a) => Lens' s a -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> (a -> Bool) -- ^ Additional validation step for input. -- 'True' indicates that the value is -- valid. -> s -- ^ The initial form state. -> FormFieldState s e n editShowableFieldWithValidate stLens n isValid = let ini = T.pack . show val ls = do v <- readMaybe $ T.unpack $ T.intercalate "\n" ls if isValid v then return v else Nothing limit = Just 1 renderText = txt . T.unlines in editField stLens n limit ini val renderText id -- | A form field using an editor to edit a text value. Since the value -- is free-form text, it is always valid. -- -- This field responds to all events handled by 'editor', including -- mouse events. editTextField :: (Ord n, Show n) => Lens' s T.Text -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> Maybe Int -- ^ The optional line limit for the editor (see 'editor'). -> s -- ^ The initial form state. -> FormFieldState s e n editTextField stLens n limit = let ini = id val = Just . T.intercalate "\n" renderText = txt . T.intercalate "\n" in editField stLens n limit ini val renderText id -- | A form field using a single-line editor to edit a free-form text -- value represented as a password. The value is always considered valid -- and is always represented with one asterisk per password character. -- -- This field responds to all events handled by 'editor', including -- mouse events. editPasswordField :: (Ord n, Show n) => Lens' s T.Text -- ^ The state lens for this value. -> n -- ^ The resource name for the input field. -> s -- ^ The initial form state. -> FormFieldState s e n editPasswordField stLens n = let ini = id val = Just . T.concat limit = Just 1 renderText = toPassword in editField stLens n limit ini val renderText id toPassword :: [T.Text] -> Widget a toPassword s = txt $ T.replicate (T.length $ T.concat s) "*" -- | The namespace for the other form attributes. formAttr :: AttrName formAttr = "brickForm" -- | The attribute for form input fields with invalid values. invalidFormInputAttr :: AttrName invalidFormInputAttr = formAttr <> "invalidInput" -- | The attribute for form input fields that have the focus. focusedFormInputAttr :: AttrName focusedFormInputAttr = formAttr <> "focusedInput" -- | Returns whether all form fields in the form currently have valid -- values according to the fields' validation functions. This is useful -- when we need to decide whether the form state is up to date with -- respect to the form input fields. allFieldsValid :: Form s e n -> Bool allFieldsValid = null . invalidFields -- | Returns the resource names associated with all form input fields -- that currently have invalid inputs. This is useful when we need to -- force the user to repair invalid inputs before moving on from a form -- editing session. invalidFields :: Form s e n -> [n] invalidFields f = concat $ getInvalidFields <$> formFieldStates f -- | Manually indicate that a field has invalid contents. This can be -- useful in situations where validation beyond the form element's -- validator needs to be performed and the result of that validation -- needs to be fed back into the form state. setFieldValid :: (Eq n) => Bool -- ^ Whether the field is considered valid. -> n -- ^ The name of the form field to set as (in)valid. -> Form s e n -- ^ The form to modify. -> Form s e n setFieldValid v n form = let go1 [] = [] go1 (s:ss) = let s' = case s of FormFieldState st l upd fs rh concatAll -> let go2 [] = [] go2 (f@(FormField fn val _ r h):ff) | n == fn = FormField fn val v r h : ff | otherwise = f : go2 ff in FormFieldState st l upd (go2 fs) rh concatAll in s' : go1 ss in form { formFieldStates = go1 (formFieldStates form) } getInvalidFields :: FormFieldState s e n -> [n] getInvalidFields (FormFieldState st _ _ fs _ _) = let gather (FormField n validate extValid _ _) = if (not extValid || (isNothing $ validate st)) then [n] else [] in concat $ gather <$> fs -- | Render a form. -- -- For each form field, each input for the field is rendered using -- the implementation provided by its 'FormField'. The inputs are -- then concatenated with the field's concatenation function (see -- 'setFieldConcat') and are then augmented using the form field's -- rendering augmentation function (see '@@='). Fields with invalid -- inputs (either due to built-in validator failure or due to external -- validation failure via 'setFieldValid') will be displayed using the -- 'invalidFormInputAttr' attribute. -- -- Finally, all of the resulting field renderings are concatenated with -- the form's concatenation function (see 'setFormConcat'). renderForm :: (Eq n) => Form s e n -> Widget n renderForm (Form es fr _ concatAll) = concatAll $ renderFormFieldState fr <$> es -- | Render a single form field collection. This is called internally by -- 'renderForm' but is exposed in cases where a form field state needs -- to be rendered outside of a 'Form', so 'renderForm' is probably what -- you want. renderFormFieldState :: (Eq n) => FocusRing n -> FormFieldState s e n -> Widget n renderFormFieldState fr (FormFieldState st _ _ fields helper concatFields) = let renderFields [] = [] renderFields ((FormField n validate extValid renderField _):fs) = let maybeInvalid = if (isJust $ validate st) && extValid then id else forceAttr invalidFormInputAttr foc = Just n == focusGetCurrent fr in maybeInvalid (renderField foc st) : renderFields fs in helper $ concatFields $ renderFields fields -- | Dispatch an event to the appropriate form field and return a new -- form. This handles the following events in this order: -- -- * On @Tab@ keypresses, this changes the focus to the next field in -- the form. -- * On @Shift-Tab@ keypresses, this changes the focus to the previous -- field in the form. -- * On mouse button presses (regardless of button or modifier), the -- focus is changed to the clicked form field and the event is -- forwarded to the event handler for the clicked form field. -- * On @Left@ or @Up@, if the currently-focused field is part of a -- collection (e.g. radio buttons), the previous entry in the -- collection is focused. -- * On @Right@ or @Down@, if the currently-focused field is part of a -- collection (e.g. radio buttons), the next entry in the collection -- is focused. -- * All other events are forwarded to the currently focused form field. -- -- In all cases where an event is forwarded to a form field, validation -- of the field's input state is performed immediately after the -- event has been handled. If the form field's input state succeeds -- validation using the field's validator function, its value is -- immediately stored in the form state using the form field's state -- lens. The external validation flag is ignored during this step to -- ensure that external validators have a chance to get the intermediate -- validated value. handleFormEvent :: (Eq n) => BrickEvent n e -> Form s e n -> EventM n (Form s e n) handleFormEvent (VtyEvent (EvKey (KChar '\t') [])) f = return $ f { formFocus = focusNext $ formFocus f } handleFormEvent (VtyEvent (EvKey KBackTab [])) f = return $ f { formFocus = focusPrev $ formFocus f } handleFormEvent e@(MouseDown n _ _ _) f = handleFormFieldEvent n e $ f { formFocus = focusSetCurrent n (formFocus f) } handleFormEvent e@(MouseUp n _ _) f = handleFormFieldEvent n e $ f { formFocus = focusSetCurrent n (formFocus f) } handleFormEvent e@(VtyEvent (EvKey KUp [])) f = case focusGetCurrent (formFocus f) of Nothing -> return f Just n -> case getFocusGrouping f n of Nothing -> forwardToCurrent e f Just grp -> return $ f { formFocus = focusSetCurrent (entryBefore grp n) (formFocus f) } handleFormEvent e@(VtyEvent (EvKey KDown [])) f = case focusGetCurrent (formFocus f) of Nothing -> return f Just n -> case getFocusGrouping f n of Nothing -> forwardToCurrent e f Just grp -> return $ f { formFocus = focusSetCurrent (entryAfter grp n) (formFocus f) } handleFormEvent e@(VtyEvent (EvKey KLeft [])) f = case focusGetCurrent (formFocus f) of Nothing -> return f Just n -> case getFocusGrouping f n of Nothing -> forwardToCurrent e f Just grp -> return $ f { formFocus = focusSetCurrent (entryBefore grp n) (formFocus f) } handleFormEvent e@(VtyEvent (EvKey KRight [])) f = case focusGetCurrent (formFocus f) of Nothing -> return f Just n -> case getFocusGrouping f n of Nothing -> forwardToCurrent e f Just grp -> return $ f { formFocus = focusSetCurrent (entryAfter grp n) (formFocus f) } handleFormEvent e f = forwardToCurrent e f getFocusGrouping :: (Eq n) => Form s e n -> n -> Maybe [n] getFocusGrouping f n = findGroup (formFieldStates f) where findGroup [] = Nothing findGroup (e:es) = let ns = formFieldNames e in if n `elem` ns && length ns > 1 then Just ns else findGroup es entryAfter :: (Eq a) => [a] -> a -> a entryAfter as a = let Just i = elemIndex a as i' = if i == length as - 1 then 0 else i + 1 in as !! i' entryBefore :: (Eq a) => [a] -> a -> a entryBefore as a = let Just i = elemIndex a as i' = if i == 0 then length as - 1 else i - 1 in as !! i' forwardToCurrent :: (Eq n) => BrickEvent n e -> Form s e n -> EventM n (Form s e n) forwardToCurrent e f = case focusGetCurrent (formFocus f) of Nothing -> return f Just n -> handleFormFieldEvent n e f handleFormFieldEvent :: (Eq n) => n -> BrickEvent n e -> Form s e n -> EventM n (Form s e n) handleFormFieldEvent n ev f = findFieldState [] (formFieldStates f) where findFieldState _ [] = return f findFieldState prev (e:es) = case e of FormFieldState st stLens upd fields helper concatAll -> do let findField [] = return Nothing findField (field:rest) = case field of FormField n' validate _ _ handleFunc | n == n' -> do nextSt <- handleFunc ev st -- If the new state validates, go ahead and update -- the form state with it. case validate nextSt of Nothing -> return $ Just (nextSt, Nothing) Just newSt -> return $ Just (nextSt, Just newSt) _ -> findField rest result <- findField fields case result of Nothing -> findFieldState (prev <> [e]) es Just (newSt, maybeSt) -> let newFieldState = FormFieldState newSt stLens upd fields helper concatAll in return $ f { formFieldStates = prev <> [newFieldState] <> es , formState = case maybeSt of Nothing -> formState f Just s -> formState f & stLens .~ s }
sjakobi/brick
src/Brick/Forms.hs
Haskell
bsd-3-clause
36,986
module Wigner.Transformations( wignerTransformation, positivePTransformation, truncateDifferentials, showTexByDifferentials, wignerOfLossTerm, ) where import Wigner.Complex import Wigner.Expression import Wigner.Deltas import Wigner.ExpressionHelpers import Wigner.Texable import qualified Wigner.Symbols as S import qualified Wigner.DefineExpression as D import qualified Data.Map as M import qualified Data.List as L import qualified Control.Arrow as A data OperatorPosition = Before | After type PhaseSpaceCorrespondence = OperatorPosition -> Operator -> Expr type FunctionCorrespondence = S.SymbolCorrespondence -> PhaseSpaceCorrespondence type FunctionCorrespondence2 = S.SymbolCorrespondence2 -> PhaseSpaceCorrespondence -- In order to simplify resulting expressions we need to know which deltas are real-valued. -- This function conjugates only complex-valued deltas in the expression -- (which means those with different variables). -- WARNING: works only with products of delta-functions; -- all delta-functions with variables must be restricted delta functions conjugateDeltas = mapFuncFactors processFactor where processFactor (Factor (ConjFunc e)) = makeExpr (Func e) processFactor (Factor f@(Func (Element s i []))) = makeExpr f processFactor (Factor (Func (Element s i [v1, v2]))) = makeExpr (Func (Element s i [v2, v1])) funcDiffCommutator :: Function -> Differential -> Expr funcDiffCommutator (Func _) (Diff (ConjFunc _)) = D.zero funcDiffCommutator (ConjFunc _) (Diff (Func _)) = D.zero funcDiffCommutator f@(Func fe) d@(Diff (Func de)) = if sameSymbol fe de then makeDeltas fe de else D.zero funcDiffCommutator f@(ConjFunc _) d@(Diff (ConjFunc _)) = conjugateDeltas $ funcDiffCommutator (conjugate f) (conjugate d) wignerCorrespondence :: FunctionCorrespondence wignerCorrespondence s_corr Before (Op e) = makeExpr (Func ce) + makeExpr (Diff (ConjFunc ce)) / 2 where ce = S.mapElementWith s_corr e wignerCorrespondence s_corr Before (DaggerOp e) = makeExpr (ConjFunc ce) - makeExpr (Diff (Func ce)) / 2 where ce = S.mapElementWith s_corr e wignerCorrespondence s_corr After (Op e) = makeExpr (Func ce) - makeExpr (Diff (ConjFunc ce)) / 2 where ce = S.mapElementWith s_corr e wignerCorrespondence s_corr After (DaggerOp e) = makeExpr (ConjFunc ce) + makeExpr (Diff (Func ce)) / 2 where ce = S.mapElementWith s_corr e positivePCorrespondence :: FunctionCorrespondence2 positivePCorrespondence s_corr2 Before (Op e) = makeExpr (Func ce1) where (ce1, ce2) = S.mapElementPairWith s_corr2 e positivePCorrespondence s_corr2 Before (DaggerOp e) = makeExpr (Func ce2) - makeExpr (Diff (Func ce1)) where (ce1, ce2) = S.mapElementPairWith s_corr2 e positivePCorrespondence s_corr2 After (Op e) = makeExpr (Func ce1) - makeExpr (Diff (Func ce2)) where (ce1, ce2) = S.mapElementPairWith s_corr2 e positivePCorrespondence s_corr2 After (DaggerOp e) = makeExpr (Func ce2) where (ce1, ce2) = S.mapElementPairWith s_corr2 e phaseSpaceTransformation :: PhaseSpaceCorrespondence -> Symbol -> Expr -> Expr phaseSpaceTransformation corr kernel expr = derivativesToFront $ mapOpFactors processOpFactor expr where isKernel (Op (Element sym [] [])) = sym == kernel isKernel _ = False processOpFactor (SymmetricProduct _) = error "Not implemented: phase-space transformation of symmetric operator product" processOpFactor (NormalProduct ops) = operatorsToFunctions (factorsExpanded ops) operatorsToFunctions [op] | isKernel op = D.one | otherwise = error "Kernel is missing from the expression" operatorsToFunctions (op:ops) | isKernel op = corr After (last ops) * operatorsToFunctions (op : init ops) | otherwise = corr Before op * operatorsToFunctions ops derivativesToFront :: Expr -> Expr derivativesToFront = mapTerms processTerm where processTerm (Term opf gs) = makeExpr (Term opf []) * processGroups gs processGroups [] = D.one processGroups [g] = makeExpr g processGroups (g@(DiffProduct _):gs) = makeExpr g * processGroups gs processGroups (FuncProduct fs : DiffProduct ds : gs) = derivativesToFront (mixGroups (factorsExpanded fs) (factorsExpanded ds) * makeExpr gs) mixGroups fs [d] = makeExpr (init fs) * (d_expr * f_expr - comm f d) where f = last fs d_expr = makeExpr d f_expr = makeExpr f mixGroups fs (d:ds) = mixGroups fs [d] * makeExpr ds comm (Factor f) = funcDiffCommutator f wignerTransformation :: S.SymbolCorrespondence -> Symbol -> Expr -> Expr wignerTransformation s_corr = phaseSpaceTransformation (wignerCorrespondence s_corr) positivePTransformation :: S.SymbolCorrespondence2 -> Symbol -> Expr -> Expr positivePTransformation s_corr2 = phaseSpaceTransformation (positivePCorrespondence s_corr2) truncateDifferentials :: Int -> Expr -> Expr truncateDifferentials n = mapTerms processTerm where processTerm t@(Term Nothing [DiffProduct ds, FuncProduct fs]) | length (factorsExpanded ds) <= n = makeExpr t | otherwise = D.zero showTexByDifferentials :: Expr -> String showTexByDifferentials (Expr s) = unlines result_lines where processTerm (c, Term Nothing [DiffProduct ds, f@(FuncProduct fs)]) = (ds, makeExpr c * makeExpr f) diff_to_expr = M.fromListWith (+) (map processTerm (terms s)) diffOrder (ds1, fs1) (ds2, fs2) = if ds1 /= ds2 then compare (length (factorsExpanded ds1)) (length (factorsExpanded ds2)) else compare ds1 ds2 pairs = L.sortBy diffOrder (M.assocs diff_to_expr) shift s = unlines (map (" " ++) (lines s)) showPair (diffs, funcs) = diff_str ++ " \\left(\n" ++ shift func_str ++ "\\right) " where diff_str = showTex (makeExpr (DiffProduct diffs)) func_str = showTex funcs result_lines = showPair (head pairs) : map (("+" ++) . showPair) (tail pairs) -- Helper function which calculates a single term for the analytical loss term formula. analyticalLossTerm :: S.SymbolCorrespondence -> [Operator] -> [Int] -> ([Int], [Int]) -> Expr analyticalLossTerm corr ops ls (js, ks) = coeff * diff_product * func_product where -- Return function/differential with the same symbol as given operator funcForOp corr (Op e) = Func (S.mapElementWith corr e) diffForOp corr op = Diff (funcForOp corr op) -- Result of commutator [d/df, f], where function f corresponds to given operator delta (Op e) = makeDeltas e e -- Returns Integer, because the result of the expression -- using this result (in qTerm) can be quite big fact :: Int -> Integer fact n = product [1 .. (fromIntegral n :: Integer)] -- Creates product of differentials. constructDiffs :: (Operator, Int, Int) -> Expr constructDiffs (op, j, k) = (makeExpr . conjugate . diffForOp corr) op ^ j * (makeExpr . diffForOp corr) op ^ k -- Numerical coefficient for the product of functions. qTerm :: Int -> Int -> Int -> Int -> Expr qTerm l j k m = makeExpr ((-1) ^ m * fact l ^ 2) / makeExpr (fact j * fact k * fact m * fact (l - k - m) * fact (l - j - m) * 2 ^ (j + k + m)) -- Functional term for given order of delta-function. constructFuncTerm :: Operator -> Int -> Int -> Int -> Int -> Expr constructFuncTerm op l j k m = qTerm l j k m * delta op ^ m * ((makeExpr . funcForOp corr) op ^ (l - j - m)) * ((makeExpr . conjugate . funcForOp corr) op ^ (l - k - m)) -- Creates product of functions. constructFuncs :: (Operator, Int, Int, Int) -> Expr constructFuncs (op, l, j, k) = sum (map (constructFuncTerm op l j k) [0..l - max j k]) coeff = makeExpr (2 - (-1) ^ sum js - (-1) ^ sum ks :: Int) diff_product = product (map constructDiffs (zip3 ops js ks)) func_product = product (map constructFuncs (L.zip4 ops ls js ks)) -- Calculates Wigner transformation of loss term using analytical formula -- WARNING: the following assumptions are made: -- 1) operators in the expr commute with each other -- 2) [d/df, f] for every operator is the same (usually it is either 1 or delta(x, x)) wignerOfLossTerm :: S.SymbolCorrespondence -> Expr -> Expr wignerOfLossTerm corr expr = op_expr * func_expr where -- carthesian product for several lists carthesianProduct' :: [[a]] -> [[a]] carthesianProduct' [] = [] carthesianProduct' [l] = [[x] | x <- l] carthesianProduct' (l:ls) = [x:xs | x <- l, xs <- carthesianProduct' ls] -- carthesian product for two lists -- (we could use more generic version, but it is easier to pattern match a tuple) carthesianProduct :: [a] -> [b] -> [(a, b)] carthesianProduct l1 l2 = [(x, y) | x <- l1, y <- l2] -- Splits initial expression into functional part and the sequence of operators extractOpFactors :: Expr -> (Expr, [(Operator, Int)]) extractOpFactors (Expr s) = head (map processTerm (terms s)) where processTerm (c, Term (Just (NormalProduct ops)) fs) = (func_expr, op_list) where func_expr = makeExpr (Term Nothing fs) * makeExpr c op_list = factors ops (func_expr, op_list) = extractOpFactors expr (ops, ls) = unzip op_list js = carthesianProduct' (map (\x -> [0..x]) ls) ks = carthesianProduct' (map (\x -> [0..x]) ls) op_expr = sum (map (analyticalLossTerm corr ops ls) (carthesianProduct js ks))
fjarri/wigner
src/Wigner/Transformations.hs
Haskell
bsd-3-clause
9,470
{-# LANGUAGE CPP #-} {-# OPTIONS_GHC -fno-warn-missing-import-lists #-} {-# OPTIONS_GHC -fno-warn-implicit-prelude #-} module Paths_StartStopFRP ( version, getBinDir, getLibDir, getDataDir, getLibexecDir, getDataFileName, getSysconfDir ) where import qualified Control.Exception as Exception import Data.Version (Version(..)) import System.Environment (getEnv) import Prelude #if defined(VERSION_base) #if MIN_VERSION_base(4,0,0) catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a #else catchIO :: IO a -> (Exception.Exception -> IO a) -> IO a #endif #else catchIO :: IO a -> (Exception.IOException -> IO a) -> IO a #endif catchIO = Exception.catch version :: Version version = Version [0,1,0,0] [] bindir, libdir, datadir, libexecdir, sysconfdir :: FilePath bindir = "/Users/Tyler/Documents/Haskell/StartStopFRP/.cabal-sandbox/bin" libdir = "/Users/Tyler/Documents/Haskell/StartStopFRP/.cabal-sandbox/lib/x86_64-osx-ghc-7.10.3/StartStopFRP-0.1.0.0-6h95S0jFn0f9iVtSOpJ3Lv" datadir = "/Users/Tyler/Documents/Haskell/StartStopFRP/.cabal-sandbox/share/x86_64-osx-ghc-7.10.3/StartStopFRP-0.1.0.0" libexecdir = "/Users/Tyler/Documents/Haskell/StartStopFRP/.cabal-sandbox/libexec" sysconfdir = "/Users/Tyler/Documents/Haskell/StartStopFRP/.cabal-sandbox/etc" getBinDir, getLibDir, getDataDir, getLibexecDir, getSysconfDir :: IO FilePath getBinDir = catchIO (getEnv "StartStopFRP_bindir") (\_ -> return bindir) getLibDir = catchIO (getEnv "StartStopFRP_libdir") (\_ -> return libdir) getDataDir = catchIO (getEnv "StartStopFRP_datadir") (\_ -> return datadir) getLibexecDir = catchIO (getEnv "StartStopFRP_libexecdir") (\_ -> return libexecdir) getSysconfDir = catchIO (getEnv "StartStopFRP_sysconfdir") (\_ -> return sysconfdir) getDataFileName :: FilePath -> IO FilePath getDataFileName name = do dir <- getDataDir return (dir ++ "/" ++ name)
tylerwx51/StartStopFRP
dist/build/autogen/Paths_StartStopFRP.hs
Haskell
bsd-3-clause
1,883
module Network.DNS.Pocket ( Port , runServer , setDomain , getDomain , deleteDomain , listDomain ) where --import Network.DNS.Pocket.Type import Network.DNS.Pocket.Server
junjihashimoto/pocket-dns
Network/DNS/Pocket.hs
Haskell
bsd-3-clause
176
{- (c) The GRASP/AQUA Project, Glasgow University, 1992-1998 \section{SetLevels} *************************** Overview *************************** 1. We attach binding levels to Core bindings, in preparation for floating outwards (@FloatOut@). 2. We also let-ify many expressions (notably case scrutinees), so they will have a fighting chance of being floated sensible. 3. Note [Need for cloning during float-out] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We clone the binders of any floatable let-binding, so that when it is floated out it will be unique. Example (let x=2 in x) + (let x=3 in x) we must clone before floating so we get let x1=2 in let x2=3 in x1+x2 NOTE: this can't be done using the uniqAway idea, because the variable must be unique in the whole program, not just its current scope, because two variables in different scopes may float out to the same top level place NOTE: Very tiresomely, we must apply this substitution to the rules stored inside a variable too. We do *not* clone top-level bindings, because some of them must not change, but we *do* clone bindings that are heading for the top level 4. Note [Binder-swap during float-out] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ In the expression case x of wild { p -> ...wild... } we substitute x for wild in the RHS of the case alternatives: case x of wild { p -> ...x... } This means that a sub-expression involving x is not "trapped" inside the RHS. And it's not inconvenient because we already have a substitution. Note that this is EXACTLY BACKWARDS from the what the simplifier does. The simplifier tries to get rid of occurrences of x, in favour of wild, in the hope that there will only be one remaining occurrence of x, namely the scrutinee of the case, and we can inline it. -} {-# LANGUAGE CPP #-} module SetLevels ( setLevels, Level(..), tOP_LEVEL, LevelledBind, LevelledExpr, LevelledBndr, FloatSpec(..), floatSpecLevel, incMinorLvl, ltMajLvl, ltLvl, isTopLvl ) where #include "HsVersions.h" import CoreSyn import CoreMonad ( FloatOutSwitches(..) ) import CoreUtils ( exprType, exprOkForSpeculation, exprIsBottom ) import CoreArity ( exprBotStrictness_maybe ) import CoreFVs -- all of it import CoreSubst import MkCore ( sortQuantVars ) import Id import IdInfo import Var import VarSet import VarEnv import Literal ( litIsTrivial ) import Demand ( StrictSig ) import Name ( getOccName, mkSystemVarName ) import OccName ( occNameString ) import Type ( isUnliftedType, Type, mkPiTypes ) import BasicTypes ( Arity, RecFlag(..) ) import UniqSupply import Util import Outputable import FastString import UniqDFM (udfmToUfm) import FV {- ************************************************************************ * * \subsection{Level numbers} * * ************************************************************************ -} type LevelledExpr = TaggedExpr FloatSpec type LevelledBind = TaggedBind FloatSpec type LevelledBndr = TaggedBndr FloatSpec data Level = Level Int -- Major level: number of enclosing value lambdas Int -- Minor level: number of big-lambda and/or case -- expressions between here and the nearest -- enclosing value lambda data FloatSpec = FloatMe Level -- Float to just inside the binding -- tagged with this level | StayPut Level -- Stay where it is; binding is -- tagged with tihs level floatSpecLevel :: FloatSpec -> Level floatSpecLevel (FloatMe l) = l floatSpecLevel (StayPut l) = l {- The {\em level number} on a (type-)lambda-bound variable is the nesting depth of the (type-)lambda which binds it. The outermost lambda has level 1, so (Level 0 0) means that the variable is bound outside any lambda. On an expression, it's the maximum level number of its free (type-)variables. On a let(rec)-bound variable, it's the level of its RHS. On a case-bound variable, it's the number of enclosing lambdas. Top-level variables: level~0. Those bound on the RHS of a top-level definition but ``before'' a lambda; e.g., the \tr{x} in (levels shown as ``subscripts'')... \begin{verbatim} a_0 = let b_? = ... in x_1 = ... b ... in ... \end{verbatim} The main function @lvlExpr@ carries a ``context level'' (@ctxt_lvl@). That's meant to be the level number of the enclosing binder in the final (floated) program. If the level number of a sub-expression is less than that of the context, then it might be worth let-binding the sub-expression so that it will indeed float. If you can float to level @Level 0 0@ worth doing so because then your allocation becomes static instead of dynamic. We always start with context @Level 0 0@. Note [FloatOut inside INLINE] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @InlineCtxt@ very similar to @Level 0 0@, but is used for one purpose: to say "don't float anything out of here". That's exactly what we want for the body of an INLINE, where we don't want to float anything out at all. See notes with lvlMFE below. But, check this out: -- At one time I tried the effect of not float anything out of an InlineMe, -- but it sometimes works badly. For example, consider PrelArr.done. It -- has the form __inline (\d. e) -- where e doesn't mention d. If we float this to -- __inline (let x = e in \d. x) -- things are bad. The inliner doesn't even inline it because it doesn't look -- like a head-normal form. So it seems a lesser evil to let things float. -- In SetLevels we do set the context to (Level 0 0) when we get to an InlineMe -- which discourages floating out. So the conclusion is: don't do any floating at all inside an InlineMe. (In the above example, don't float the {x=e} out of the \d.) One particular case is that of workers: we don't want to float the call to the worker outside the wrapper, otherwise the worker might get inlined into the floated expression, and an importing module won't see the worker at all. -} instance Outputable FloatSpec where ppr (FloatMe l) = char 'F' <> ppr l ppr (StayPut l) = ppr l tOP_LEVEL :: Level tOP_LEVEL = Level 0 0 incMajorLvl :: Level -> Level incMajorLvl (Level major _) = Level (major + 1) 0 incMinorLvl :: Level -> Level incMinorLvl (Level major minor) = Level major (minor+1) maxLvl :: Level -> Level -> Level maxLvl l1@(Level maj1 min1) l2@(Level maj2 min2) | (maj1 > maj2) || (maj1 == maj2 && min1 > min2) = l1 | otherwise = l2 ltLvl :: Level -> Level -> Bool ltLvl (Level maj1 min1) (Level maj2 min2) = (maj1 < maj2) || (maj1 == maj2 && min1 < min2) ltMajLvl :: Level -> Level -> Bool -- Tells if one level belongs to a difft *lambda* level to another ltMajLvl (Level maj1 _) (Level maj2 _) = maj1 < maj2 isTopLvl :: Level -> Bool isTopLvl (Level 0 0) = True isTopLvl _ = False instance Outputable Level where ppr (Level maj min) = hcat [ char '<', int maj, char ',', int min, char '>' ] instance Eq Level where (Level maj1 min1) == (Level maj2 min2) = maj1 == maj2 && min1 == min2 {- ************************************************************************ * * \subsection{Main level-setting code} * * ************************************************************************ -} setLevels :: FloatOutSwitches -> CoreProgram -> UniqSupply -> [LevelledBind] setLevels float_lams binds us = initLvl us (do_them init_env binds) where init_env = initialEnv float_lams do_them :: LevelEnv -> [CoreBind] -> LvlM [LevelledBind] do_them _ [] = return [] do_them env (b:bs) = do { (lvld_bind, env') <- lvlTopBind env b ; lvld_binds <- do_them env' bs ; return (lvld_bind : lvld_binds) } lvlTopBind :: LevelEnv -> Bind Id -> LvlM (LevelledBind, LevelEnv) lvlTopBind env (NonRec bndr rhs) = do { rhs' <- lvlExpr env (freeVars rhs) ; let (env', [bndr']) = substAndLvlBndrs NonRecursive env tOP_LEVEL [bndr] ; return (NonRec bndr' rhs', env') } lvlTopBind env (Rec pairs) = do let (bndrs,rhss) = unzip pairs (env', bndrs') = substAndLvlBndrs Recursive env tOP_LEVEL bndrs rhss' <- mapM (lvlExpr env' . freeVars) rhss return (Rec (bndrs' `zip` rhss'), env') {- ************************************************************************ * * \subsection{Setting expression levels} * * ************************************************************************ Note [Floating over-saturated applications] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If we see (f x y), and (f x) is a redex (ie f's arity is 1), we call (f x) an "over-saturated application" Should we float out an over-sat app, if can escape a value lambda? It is sometimes very beneficial (-7% runtime -4% alloc over nofib -O2). But we don't want to do it for class selectors, because the work saved is minimal, and the extra local thunks allocated cost money. Arguably we could float even class-op applications if they were going to top level -- but then they must be applied to a constant dictionary and will almost certainly be optimised away anyway. -} lvlExpr :: LevelEnv -- Context -> CoreExprWithFVs -- Input expression -> LvlM LevelledExpr -- Result expression {- The @ctxt_lvl@ is, roughly, the level of the innermost enclosing binder. Here's an example v = \x -> ...\y -> let r = case (..x..) of ..x.. in .. When looking at the rhs of @r@, @ctxt_lvl@ will be 1 because that's the level of @r@, even though it's inside a level-2 @\y@. It's important that @ctxt_lvl@ is 1 and not 2 in @r@'s rhs, because we don't want @lvlExpr@ to turn the scrutinee of the @case@ into an MFE --- because it isn't a *maximal* free expression. If there were another lambda in @r@'s rhs, it would get level-2 as well. -} lvlExpr env (_, AnnType ty) = return (Type (substTy (le_subst env) ty)) lvlExpr env (_, AnnCoercion co) = return (Coercion (substCo (le_subst env) co)) lvlExpr env (_, AnnVar v) = return (lookupVar env v) lvlExpr _ (_, AnnLit lit) = return (Lit lit) lvlExpr env (_, AnnCast expr (_, co)) = do expr' <- lvlExpr env expr return (Cast expr' (substCo (le_subst env) co)) lvlExpr env (_, AnnTick tickish expr) = do expr' <- lvlExpr env expr return (Tick tickish expr') lvlExpr env expr@(_, AnnApp _ _) = do let (fun, args) = collectAnnArgs expr -- case fun of (_, AnnVar f) | floatOverSat env -- See Note [Floating over-saturated applications] , arity > 0 , arity < n_val_args , Nothing <- isClassOpId_maybe f -> do let (lapp, rargs) = left (n_val_args - arity) expr [] rargs' <- mapM (lvlMFE False env) rargs lapp' <- lvlMFE False env lapp return (foldl App lapp' rargs') where n_val_args = count (isValArg . deAnnotate) args arity = idArity f -- separate out the PAP that we are floating from the extra -- arguments, by traversing the spine until we have collected -- (n_val_args - arity) value arguments. left 0 e rargs = (e, rargs) left n (_, AnnApp f a) rargs | isValArg (deAnnotate a) = left (n-1) f (a:rargs) | otherwise = left n f (a:rargs) left _ _ _ = panic "SetLevels.lvlExpr.left" -- No PAPs that we can float: just carry on with the -- arguments and the function. _otherwise -> do args' <- mapM (lvlMFE False env) args fun' <- lvlExpr env fun return (foldl App fun' args') -- We don't split adjacent lambdas. That is, given -- \x y -> (x+1,y) -- we don't float to give -- \x -> let v = x+1 in \y -> (v,y) -- Why not? Because partial applications are fairly rare, and splitting -- lambdas makes them more expensive. lvlExpr env expr@(_, AnnLam {}) = do { new_body <- lvlMFE True new_env body ; return (mkLams new_bndrs new_body) } where (bndrs, body) = collectAnnBndrs expr (env1, bndrs1) = substBndrsSL NonRecursive env bndrs (new_env, new_bndrs) = lvlLamBndrs env1 (le_ctxt_lvl env) bndrs1 -- At one time we called a special verion of collectBinders, -- which ignored coercions, because we don't want to split -- a lambda like this (\x -> coerce t (\s -> ...)) -- This used to happen quite a bit in state-transformer programs, -- but not nearly so much now non-recursive newtypes are transparent. -- [See SetLevels rev 1.50 for a version with this approach.] lvlExpr env (_, AnnLet bind body) = do { (bind', new_env) <- lvlBind env bind ; body' <- lvlExpr new_env body -- No point in going via lvlMFE here. If the binding is alive -- (mentioned in body), and the whole let-expression doesn't -- float, then neither will the body ; return (Let bind' body') } lvlExpr env (_, AnnCase scrut case_bndr ty alts) = do { scrut' <- lvlMFE True env scrut ; lvlCase env (freeVarsOf scrut) scrut' case_bndr ty alts } ------------------------------------------- lvlCase :: LevelEnv -- Level of in-scope names/tyvars -> DVarSet -- Free vars of input scrutinee -> LevelledExpr -- Processed scrutinee -> Id -> Type -- Case binder and result type -> [CoreAltWithFVs] -- Input alternatives -> LvlM LevelledExpr -- Result expression lvlCase env scrut_fvs scrut' case_bndr ty alts | [(con@(DataAlt {}), bs, body)] <- alts , exprOkForSpeculation scrut' -- See Note [Check the output scrutinee for okForSpec] , not (isTopLvl dest_lvl) -- Can't have top-level cases = -- See Note [Floating cases] -- Always float the case if possible -- Unlike lets we don't insist that it escapes a value lambda do { (env1, (case_bndr' : bs')) <- cloneCaseBndrs env dest_lvl (case_bndr : bs) ; let rhs_env = extendCaseBndrEnv env1 case_bndr scrut' ; body' <- lvlMFE True rhs_env body ; let alt' = (con, [TB b (StayPut dest_lvl) | b <- bs'], body') ; return (Case scrut' (TB case_bndr' (FloatMe dest_lvl)) ty [alt']) } | otherwise -- Stays put = do { let (alts_env1, [case_bndr']) = substAndLvlBndrs NonRecursive env incd_lvl [case_bndr] alts_env = extendCaseBndrEnv alts_env1 case_bndr scrut' ; alts' <- mapM (lvl_alt alts_env) alts ; return (Case scrut' case_bndr' ty alts') } where incd_lvl = incMinorLvl (le_ctxt_lvl env) dest_lvl = maxFvLevel (const True) env scrut_fvs -- Don't abstact over type variables, hence const True lvl_alt alts_env (con, bs, rhs) = do { rhs' <- lvlMFE True new_env rhs ; return (con, bs', rhs') } where (new_env, bs') = substAndLvlBndrs NonRecursive alts_env incd_lvl bs {- Note [Floating cases] ~~~~~~~~~~~~~~~~~~~~~ Consider this: data T a = MkT !a f :: T Int -> blah f x vs = case x of { MkT y -> let f vs = ...(case y of I# w -> e)...f.. in f vs Here we can float the (case y ...) out , because y is sure to be evaluated, to give f x vs = case x of { MkT y -> caes y of I# w -> let f vs = ...(e)...f.. in f vs That saves unboxing it every time round the loop. It's important in some DPH stuff where we really want to avoid that repeated unboxing in the inner loop. Things to note * We can't float a case to top level * It's worth doing this float even if we don't float the case outside a value lambda. Example case x of { MkT y -> (case y of I# w2 -> ..., case y of I# w2 -> ...) If we floated the cases out we could eliminate one of them. * We only do this with a single-alternative case Note [Check the output scrutinee for okForSpec] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider this: case x of y { A -> ....(case y of alts).... } Because of the binder-swap, the inner case will get substituted to (case x of ..). So when testing whether the scrutinee is okForSpecuation we must be careful to test the *result* scrutinee ('x' in this case), not the *input* one 'y'. The latter *is* ok for speculation here, but the former is not -- and indeed we can't float the inner case out, at least not unless x is also evaluated at its binding site. That's why we apply exprOkForSpeculation to scrut' and not to scrut. -} lvlMFE :: Bool -- True <=> strict context [body of case or let] -> LevelEnv -- Level of in-scope names/tyvars -> CoreExprWithFVs -- input expression -> LvlM LevelledExpr -- Result expression -- lvlMFE is just like lvlExpr, except that it might let-bind -- the expression, so that it can itself be floated. lvlMFE _ env (_, AnnType ty) = return (Type (substTy (le_subst env) ty)) -- No point in floating out an expression wrapped in a coercion or note -- If we do we'll transform lvl = e |> co -- to lvl' = e; lvl = lvl' |> co -- and then inline lvl. Better just to float out the payload. lvlMFE strict_ctxt env (_, AnnTick t e) = do { e' <- lvlMFE strict_ctxt env e ; return (Tick t e') } lvlMFE strict_ctxt env (_, AnnCast e (_, co)) = do { e' <- lvlMFE strict_ctxt env e ; return (Cast e' (substCo (le_subst env) co)) } -- Note [Case MFEs] lvlMFE True env e@(_, AnnCase {}) = lvlExpr env e -- Don't share cases lvlMFE strict_ctxt env ann_expr | isUnliftedType (exprType expr) -- Can't let-bind it; see Note [Unlifted MFEs] -- This includes coercions, which we don't want to float anyway -- NB: no need to substitute cos isUnliftedType doesn't change || notWorthFloating ann_expr abs_vars || not float_me = -- Don't float it out lvlExpr env ann_expr | otherwise -- Float it out! = do { expr' <- lvlFloatRhs abs_vars dest_lvl env ann_expr ; var <- newLvlVar expr' is_bot ; return (Let (NonRec (TB var (FloatMe dest_lvl)) expr') (mkVarApps (Var var) abs_vars)) } where expr = deAnnotate ann_expr fvs = freeVarsOf ann_expr is_bot = exprIsBottom expr -- Note [Bottoming floats] dest_lvl = destLevel env fvs (isFunction ann_expr) is_bot abs_vars = abstractVars dest_lvl env fvs -- A decision to float entails let-binding this thing, and we only do -- that if we'll escape a value lambda, or will go to the top level. float_me = dest_lvl `ltMajLvl` (le_ctxt_lvl env) -- Escapes a value lambda -- OLD CODE: not (exprIsCheap expr) || isTopLvl dest_lvl -- see Note [Escaping a value lambda] || (isTopLvl dest_lvl -- Only float if we are going to the top level && floatConsts env -- and the floatConsts flag is on && not strict_ctxt) -- Don't float from a strict context -- We are keen to float something to the top level, even if it does not -- escape a lambda, because then it needs no allocation. But it's controlled -- by a flag, because doing this too early loses opportunities for RULES -- which (needless to say) are important in some nofib programs -- (gcd is an example). -- -- Beware: -- concat = /\ a -> foldr ..a.. (++) [] -- was getting turned into -- lvl = /\ a -> foldr ..a.. (++) [] -- concat = /\ a -> lvl a -- which is pretty stupid. Hence the strict_ctxt test -- -- Also a strict contxt includes uboxed values, and they -- can't be bound at top level {- Note [Unlifted MFEs] ~~~~~~~~~~~~~~~~~~~~ We don't float unlifted MFEs, which potentially loses big opportunites. For example: \x -> f (h y) where h :: Int -> Int# is expensive. We'd like to float the (h y) outside the \x, but we don't because it's unboxed. Possible solution: box it. Note [Bottoming floats] ~~~~~~~~~~~~~~~~~~~~~~~ If we see f = \x. g (error "urk") we'd like to float the call to error, to get lvl = error "urk" f = \x. g lvl Furthermore, we want to float a bottoming expression even if it has free variables: f = \x. g (let v = h x in error ("urk" ++ v)) Then we'd like to abstact over 'x' can float the whole arg of g: lvl = \x. let v = h x in error ("urk" ++ v) f = \x. g (lvl x) See Maessen's paper 1999 "Bottom extraction: factoring error handling out of functional programs" (unpublished I think). When we do this, we set the strictness and arity of the new bottoming Id, *immediately*, for three reasons: * To prevent the abstracted thing being immediately inlined back in again via preInlineUnconditionally. The latter has a test for bottoming Ids to stop inlining them, so we'd better make sure it *is* a bottoming Id! * So that it's properly exposed as such in the interface file, even if this is all happening after strictness analysis. * In case we do CSE with the same expression that *is* marked bottom lvl = error "urk" x{str=bot) = error "urk" Here we don't want to replace 'x' with 'lvl', else we may get Lint errors, e.g. via a case with empty alternatives: (case x of {}) Lint complains unless the scrutinee of such a case is clearly bottom. This was reported in Trac #11290. But since the whole bottoming-float thing is based on the cheap-and-cheerful exprIsBottom, I'm not sure that it'll nail all such cases. Note [Bottoming floats: eta expansion] c.f Note [Bottoming floats] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tiresomely, though, the simplifier has an invariant that the manifest arity of the RHS should be the same as the arity; but we can't call etaExpand during SetLevels because it works over a decorated form of CoreExpr. So we do the eta expansion later, in FloatOut. Note [Case MFEs] ~~~~~~~~~~~~~~~~ We don't float a case expression as an MFE from a strict context. Why not? Because in doing so we share a tiny bit of computation (the switch) but in exchange we build a thunk, which is bad. This case reduces allocation by 7% in spectral/puzzle (a rather strange benchmark) and 1.2% in real/fem. Doesn't change any other allocation at all. -} annotateBotStr :: Id -> Maybe (Arity, StrictSig) -> Id -- See Note [Bottoming floats] for why we want to add -- bottoming information right now annotateBotStr id Nothing = id annotateBotStr id (Just (arity, sig)) = id `setIdArity` arity `setIdStrictness` sig notWorthFloating :: CoreExprWithFVs -> [Var] -> Bool -- Returns True if the expression would be replaced by -- something bigger than it is now. For example: -- abs_vars = tvars only: return True if e is trivial, -- but False for anything bigger -- abs_vars = [x] (an Id): return True for trivial, or an application (f x) -- but False for (f x x) -- -- One big goal is that floating should be idempotent. Eg if -- we replace e with (lvl79 x y) and then run FloatOut again, don't want -- to replace (lvl79 x y) with (lvl83 x y)! notWorthFloating e abs_vars = go e (count isId abs_vars) where go (_, AnnVar {}) n = n >= 0 go (_, AnnLit lit) n = ASSERT( n==0 ) litIsTrivial lit -- Note [Floating literals] go (_, AnnTick t e) n = not (tickishIsCode t) && go e n go (_, AnnCast e _) n = go e n go (_, AnnApp e arg) n | (_, AnnType {}) <- arg = go e n | (_, AnnCoercion {}) <- arg = go e n | n==0 = False | is_triv arg = go e (n-1) | otherwise = False go _ _ = False is_triv (_, AnnLit {}) = True -- Treat all literals as trivial is_triv (_, AnnVar {}) = True -- (ie not worth floating) is_triv (_, AnnCast e _) = is_triv e is_triv (_, AnnApp e (_, AnnType {})) = is_triv e is_triv (_, AnnApp e (_, AnnCoercion {})) = is_triv e is_triv (_, AnnTick t e) = not (tickishIsCode t) && is_triv e is_triv _ = False {- Note [Floating literals] ~~~~~~~~~~~~~~~~~~~~~~~~ It's important to float Integer literals, so that they get shared, rather than being allocated every time round the loop. Hence the litIsTrivial. We'd *like* to share MachStr literal strings too, mainly so we could CSE them, but alas can't do so directly because they are unlifted. Note [Escaping a value lambda] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We want to float even cheap expressions out of value lambdas, because that saves allocation. Consider f = \x. .. (\y.e) ... Then we'd like to avoid allocating the (\y.e) every time we call f, (assuming e does not mention x). An example where this really makes a difference is simplrun009. Another reason it's good is because it makes SpecContr fire on functions. Consider f = \x. ....(f (\y.e)).... After floating we get lvl = \y.e f = \x. ....(f lvl)... and that is much easier for SpecConstr to generate a robust specialisation for. The OLD CODE (given where this Note is referred to) prevents floating of the example above, so I just don't understand the old code. I don't understand the old comment either (which appears below). I measured the effect on nofib of changing OLD CODE to 'True', and got zeros everywhere, but a 4% win for 'puzzle'. Very small 0.5% loss for 'cse'; turns out to be because our arity analysis isn't good enough yet (mentioned in Simon-nofib-notes). OLD comment was: Even if it escapes a value lambda, we only float if it's not cheap (unless it'll get all the way to the top). I've seen cases where we float dozens of tiny free expressions, which cost more to allocate than to evaluate. NB: exprIsCheap is also true of bottom expressions, which is good; we don't want to share them It's only Really Bad to float a cheap expression out of a strict context, because that builds a thunk that otherwise would never be built. So another alternative would be to add || (strict_ctxt && not (exprIsBottom expr)) to the condition above. We should really try this out. ************************************************************************ * * \subsection{Bindings} * * ************************************************************************ The binding stuff works for top level too. -} lvlBind :: LevelEnv -> CoreBindWithFVs -> LvlM (LevelledBind, LevelEnv) lvlBind env (AnnNonRec bndr rhs) | isTyVar bndr -- Don't do anything for TyVar binders -- (simplifier gets rid of them pronto) || isCoVar bndr -- Difficult to fix up CoVar occurrences (see extendPolyLvlEnv) -- so we will ignore this case for now || not (profitableFloat env dest_lvl) || (isTopLvl dest_lvl && isUnliftedType (idType bndr)) -- We can't float an unlifted binding to top level, so we don't -- float it at all. It's a bit brutal, but unlifted bindings -- aren't expensive either = -- No float do { rhs' <- lvlExpr env rhs ; let bind_lvl = incMinorLvl (le_ctxt_lvl env) (env', [bndr']) = substAndLvlBndrs NonRecursive env bind_lvl [bndr] ; return (NonRec bndr' rhs', env') } -- Otherwise we are going to float | null abs_vars = do { -- No type abstraction; clone existing binder rhs' <- lvlExpr (setCtxtLvl env dest_lvl) rhs ; (env', [bndr']) <- cloneLetVars NonRecursive env dest_lvl [bndr] ; return (NonRec (TB bndr' (FloatMe dest_lvl)) rhs', env') } | otherwise = do { -- Yes, type abstraction; create a new binder, extend substitution, etc rhs' <- lvlFloatRhs abs_vars dest_lvl env rhs ; (env', [bndr']) <- newPolyBndrs dest_lvl env abs_vars [bndr] ; return (NonRec (TB bndr' (FloatMe dest_lvl)) rhs', env') } where rhs_fvs = freeVarsOf rhs bind_fvs = rhs_fvs `unionDVarSet` dIdFreeVars bndr abs_vars = abstractVars dest_lvl env bind_fvs dest_lvl = destLevel env bind_fvs (isFunction rhs) is_bot is_bot = exprIsBottom (deAnnotate rhs) lvlBind env (AnnRec pairs) | not (profitableFloat env dest_lvl) = do { let bind_lvl = incMinorLvl (le_ctxt_lvl env) (env', bndrs') = substAndLvlBndrs Recursive env bind_lvl bndrs ; rhss' <- mapM (lvlExpr env') rhss ; return (Rec (bndrs' `zip` rhss'), env') } | null abs_vars = do { (new_env, new_bndrs) <- cloneLetVars Recursive env dest_lvl bndrs ; new_rhss <- mapM (lvlExpr (setCtxtLvl new_env dest_lvl)) rhss ; return ( Rec ([TB b (FloatMe dest_lvl) | b <- new_bndrs] `zip` new_rhss) , new_env) } -- ToDo: when enabling the floatLambda stuff, -- I think we want to stop doing this | [(bndr,rhs)] <- pairs , count isId abs_vars > 1 = do -- Special case for self recursion where there are -- several variables carried around: build a local loop: -- poly_f = \abs_vars. \lam_vars . letrec f = \lam_vars. rhs in f lam_vars -- This just makes the closures a bit smaller. If we don't do -- this, allocation rises significantly on some programs -- -- We could elaborate it for the case where there are several -- mutually functions, but it's quite a bit more complicated -- -- This all seems a bit ad hoc -- sigh let (rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars rhs_lvl = le_ctxt_lvl rhs_env (rhs_env', [new_bndr]) <- cloneLetVars Recursive rhs_env rhs_lvl [bndr] let (lam_bndrs, rhs_body) = collectAnnBndrs rhs (body_env1, lam_bndrs1) = substBndrsSL NonRecursive rhs_env' lam_bndrs (body_env2, lam_bndrs2) = lvlLamBndrs body_env1 rhs_lvl lam_bndrs1 new_rhs_body <- lvlExpr body_env2 rhs_body (poly_env, [poly_bndr]) <- newPolyBndrs dest_lvl env abs_vars [bndr] return (Rec [(TB poly_bndr (FloatMe dest_lvl) , mkLams abs_vars_w_lvls $ mkLams lam_bndrs2 $ Let (Rec [( TB new_bndr (StayPut rhs_lvl) , mkLams lam_bndrs2 new_rhs_body)]) (mkVarApps (Var new_bndr) lam_bndrs1))] , poly_env) | otherwise -- Non-null abs_vars = do { (new_env, new_bndrs) <- newPolyBndrs dest_lvl env abs_vars bndrs ; new_rhss <- mapM (lvlFloatRhs abs_vars dest_lvl new_env) rhss ; return ( Rec ([TB b (FloatMe dest_lvl) | b <- new_bndrs] `zip` new_rhss) , new_env) } where (bndrs,rhss) = unzip pairs -- Finding the free vars of the binding group is annoying bind_fvs = ((unionDVarSets [ freeVarsOf rhs | (_, rhs) <- pairs]) `unionDVarSet` (fvDVarSet $ unionsFV [ idFVs bndr | (bndr, (_,_)) <- pairs])) `delDVarSetList` bndrs dest_lvl = destLevel env bind_fvs (all isFunction rhss) False abs_vars = abstractVars dest_lvl env bind_fvs profitableFloat :: LevelEnv -> Level -> Bool profitableFloat env dest_lvl = (dest_lvl `ltMajLvl` le_ctxt_lvl env) -- Escapes a value lambda || isTopLvl dest_lvl -- Going all the way to top level ---------------------------------------------------- -- Three help functions for the type-abstraction case lvlFloatRhs :: [OutVar] -> Level -> LevelEnv -> CoreExprWithFVs -> UniqSM (Expr LevelledBndr) lvlFloatRhs abs_vars dest_lvl env rhs = do { rhs' <- lvlExpr rhs_env rhs ; return (mkLams abs_vars_w_lvls rhs') } where (rhs_env, abs_vars_w_lvls) = lvlLamBndrs env dest_lvl abs_vars {- ************************************************************************ * * \subsection{Deciding floatability} * * ************************************************************************ -} substAndLvlBndrs :: RecFlag -> LevelEnv -> Level -> [InVar] -> (LevelEnv, [LevelledBndr]) substAndLvlBndrs is_rec env lvl bndrs = lvlBndrs subst_env lvl subst_bndrs where (subst_env, subst_bndrs) = substBndrsSL is_rec env bndrs substBndrsSL :: RecFlag -> LevelEnv -> [InVar] -> (LevelEnv, [OutVar]) -- So named only to avoid the name clash with CoreSubst.substBndrs substBndrsSL is_rec env@(LE { le_subst = subst, le_env = id_env }) bndrs = ( env { le_subst = subst' , le_env = foldl add_id id_env (bndrs `zip` bndrs') } , bndrs') where (subst', bndrs') = case is_rec of NonRecursive -> substBndrs subst bndrs Recursive -> substRecBndrs subst bndrs lvlLamBndrs :: LevelEnv -> Level -> [OutVar] -> (LevelEnv, [LevelledBndr]) -- Compute the levels for the binders of a lambda group lvlLamBndrs env lvl bndrs = lvlBndrs env new_lvl bndrs where new_lvl | any is_major bndrs = incMajorLvl lvl | otherwise = incMinorLvl lvl is_major bndr = isId bndr && not (isProbablyOneShotLambda bndr) -- The "probably" part says "don't float things out of a -- probable one-shot lambda" -- See Note [Computing one-shot info] in Demand.hs lvlBndrs :: LevelEnv -> Level -> [CoreBndr] -> (LevelEnv, [LevelledBndr]) -- The binders returned are exactly the same as the ones passed, -- apart from applying the substitution, but they are now paired -- with a (StayPut level) -- -- The returned envt has ctxt_lvl updated to the new_lvl -- -- All the new binders get the same level, because -- any floating binding is either going to float past -- all or none. We never separate binders. lvlBndrs env@(LE { le_lvl_env = lvl_env }) new_lvl bndrs = ( env { le_ctxt_lvl = new_lvl , le_lvl_env = addLvls new_lvl lvl_env bndrs } , lvld_bndrs) where lvld_bndrs = [TB bndr (StayPut new_lvl) | bndr <- bndrs] -- Destination level is the max Id level of the expression -- (We'll abstract the type variables, if any.) destLevel :: LevelEnv -> DVarSet -> Bool -- True <=> is function -> Bool -- True <=> is bottom -> Level destLevel env fvs is_function is_bot | is_bot = tOP_LEVEL -- Send bottoming bindings to the top -- regardless; see Note [Bottoming floats] | Just n_args <- floatLams env , n_args > 0 -- n=0 case handled uniformly by the 'otherwise' case , is_function , countFreeIds fvs <= n_args = tOP_LEVEL -- Send functions to top level; see -- the comments with isFunction | otherwise = maxFvLevel isId env fvs -- Max over Ids only; the tyvars -- will be abstracted isFunction :: CoreExprWithFVs -> Bool -- The idea here is that we want to float *functions* to -- the top level. This saves no work, but -- (a) it can make the host function body a lot smaller, -- and hence inlinable. -- (b) it can also save allocation when the function is recursive: -- h = \x -> letrec f = \y -> ...f...y...x... -- in f x -- becomes -- f = \x y -> ...(f x)...y...x... -- h = \x -> f x x -- No allocation for f now. -- We may only want to do this if there are sufficiently few free -- variables. We certainly only want to do it for values, and not for -- constructors. So the simple thing is just to look for lambdas isFunction (_, AnnLam b e) | isId b = True | otherwise = isFunction e -- isFunction (_, AnnTick _ e) = isFunction e -- dubious isFunction _ = False countFreeIds :: DVarSet -> Int countFreeIds = foldVarSet add 0 . udfmToUfm where add :: Var -> Int -> Int add v n | isId v = n+1 | otherwise = n {- ************************************************************************ * * \subsection{Free-To-Level Monad} * * ************************************************************************ -} type InVar = Var -- Pre cloning type InId = Id -- Pre cloning type OutVar = Var -- Post cloning type OutId = Id -- Post cloning data LevelEnv = LE { le_switches :: FloatOutSwitches , le_ctxt_lvl :: Level -- The current level , le_lvl_env :: VarEnv Level -- Domain is *post-cloned* TyVars and Ids , le_subst :: Subst -- Domain is pre-cloned TyVars and Ids -- The Id -> CoreExpr in the Subst is ignored -- (since we want to substitute a LevelledExpr for -- an Id via le_env) but we do use the Co/TyVar substs , le_env :: IdEnv ([OutVar], LevelledExpr) -- Domain is pre-cloned Ids } -- We clone let- and case-bound variables so that they are still -- distinct when floated out; hence the le_subst/le_env. -- (see point 3 of the module overview comment). -- We also use these envs when making a variable polymorphic -- because we want to float it out past a big lambda. -- -- The le_subst and le_env always implement the same mapping, but the -- le_subst maps to CoreExpr and the le_env to LevelledExpr -- Since the range is always a variable or type application, -- there is never any difference between the two, but sadly -- the types differ. The le_subst is used when substituting in -- a variable's IdInfo; the le_env when we find a Var. -- -- In addition the le_env records a list of tyvars free in the -- type application, just so we don't have to call freeVars on -- the type application repeatedly. -- -- The domain of the both envs is *pre-cloned* Ids, though -- -- The domain of the le_lvl_env is the *post-cloned* Ids initialEnv :: FloatOutSwitches -> LevelEnv initialEnv float_lams = LE { le_switches = float_lams , le_ctxt_lvl = tOP_LEVEL , le_lvl_env = emptyVarEnv , le_subst = emptySubst , le_env = emptyVarEnv } addLvl :: Level -> VarEnv Level -> OutVar -> VarEnv Level addLvl dest_lvl env v' = extendVarEnv env v' dest_lvl addLvls :: Level -> VarEnv Level -> [OutVar] -> VarEnv Level addLvls dest_lvl env vs = foldl (addLvl dest_lvl) env vs floatLams :: LevelEnv -> Maybe Int floatLams le = floatOutLambdas (le_switches le) floatConsts :: LevelEnv -> Bool floatConsts le = floatOutConstants (le_switches le) floatOverSat :: LevelEnv -> Bool floatOverSat le = floatOutOverSatApps (le_switches le) setCtxtLvl :: LevelEnv -> Level -> LevelEnv setCtxtLvl env lvl = env { le_ctxt_lvl = lvl } -- extendCaseBndrEnv adds the mapping case-bndr->scrut-var if it can -- See Note [Binder-swap during float-out] extendCaseBndrEnv :: LevelEnv -> Id -- Pre-cloned case binder -> Expr LevelledBndr -- Post-cloned scrutinee -> LevelEnv extendCaseBndrEnv le@(LE { le_subst = subst, le_env = id_env }) case_bndr (Var scrut_var) = le { le_subst = extendSubstWithVar subst case_bndr scrut_var , le_env = add_id id_env (case_bndr, scrut_var) } extendCaseBndrEnv env _ _ = env maxFvLevel :: (Var -> Bool) -> LevelEnv -> DVarSet -> Level maxFvLevel max_me (LE { le_lvl_env = lvl_env, le_env = id_env }) var_set = foldDVarSet max_in tOP_LEVEL var_set where max_in in_var lvl = foldr max_out lvl (case lookupVarEnv id_env in_var of Just (abs_vars, _) -> abs_vars Nothing -> [in_var]) max_out out_var lvl | max_me out_var = case lookupVarEnv lvl_env out_var of Just lvl' -> maxLvl lvl' lvl Nothing -> lvl | otherwise = lvl -- Ignore some vars depending on max_me lookupVar :: LevelEnv -> Id -> LevelledExpr lookupVar le v = case lookupVarEnv (le_env le) v of Just (_, expr) -> expr _ -> Var v abstractVars :: Level -> LevelEnv -> DVarSet -> [OutVar] -- Find the variables in fvs, free vars of the target expresion, -- whose level is greater than the destination level -- These are the ones we are going to abstract out -- -- Note that to get reproducible builds, the variables need to be -- abstracted in deterministic order, not dependent on the values of -- Uniques. This is achieved by using DVarSets, deterministic free -- variable computation and deterministic sort. -- See Note [Unique Determinism] in Unique for explanation of why -- Uniques are not deterministic. abstractVars dest_lvl (LE { le_subst = subst, le_lvl_env = lvl_env }) in_fvs = -- NB: sortQuantVars might not put duplicates next to each other map zap $ sortQuantVars $ uniq [out_var | out_fv <- dVarSetElems (substDVarSet subst in_fvs) , out_var <- dVarSetElems (close out_fv) , abstract_me out_var ] -- NB: it's important to call abstract_me only on the OutIds the -- come from substDVarSet (not on fv, which is an InId) where uniq :: [Var] -> [Var] -- Remove duplicates, preserving order uniq = dVarSetElems . mkDVarSet abstract_me v = case lookupVarEnv lvl_env v of Just lvl -> dest_lvl `ltLvl` lvl Nothing -> False -- We are going to lambda-abstract, so nuke any IdInfo, -- and add the tyvars of the Id (if necessary) zap v | isId v = WARN( isStableUnfolding (idUnfolding v) || not (isEmptyRuleInfo (idSpecialisation v)), text "absVarsOf: discarding info on" <+> ppr v ) setIdInfo v vanillaIdInfo | otherwise = v close :: Var -> DVarSet -- Close over variables free in the type -- Result includes the input variable itself close v = foldDVarSet (unionDVarSet . close) (unitDVarSet v) (fvDVarSet $ varTypeTyCoFVs v) type LvlM result = UniqSM result initLvl :: UniqSupply -> UniqSM a -> a initLvl = initUs_ newPolyBndrs :: Level -> LevelEnv -> [OutVar] -> [InId] -> UniqSM (LevelEnv, [OutId]) -- The envt is extended to bind the new bndrs to dest_lvl, but -- the ctxt_lvl is unaffected newPolyBndrs dest_lvl env@(LE { le_lvl_env = lvl_env, le_subst = subst, le_env = id_env }) abs_vars bndrs = ASSERT( all (not . isCoVar) bndrs ) -- What would we add to the CoSubst in this case. No easy answer. do { uniqs <- getUniquesM ; let new_bndrs = zipWith mk_poly_bndr bndrs uniqs bndr_prs = bndrs `zip` new_bndrs env' = env { le_lvl_env = addLvls dest_lvl lvl_env new_bndrs , le_subst = foldl add_subst subst bndr_prs , le_env = foldl add_id id_env bndr_prs } ; return (env', new_bndrs) } where add_subst env (v, v') = extendIdSubst env v (mkVarApps (Var v') abs_vars) add_id env (v, v') = extendVarEnv env v ((v':abs_vars), mkVarApps (Var v') abs_vars) mk_poly_bndr bndr uniq = transferPolyIdInfo bndr abs_vars $ -- Note [transferPolyIdInfo] in Id.hs mkSysLocalOrCoVar (mkFastString str) uniq poly_ty where str = "poly_" ++ occNameString (getOccName bndr) poly_ty = mkPiTypes abs_vars (substTy subst (idType bndr)) newLvlVar :: LevelledExpr -- The RHS of the new binding -> Bool -- Whether it is bottom -> LvlM Id newLvlVar lvld_rhs is_bot = do { uniq <- getUniqueM ; return (add_bot_info (mkLocalIdOrCoVar (mk_name uniq) rhs_ty)) } where add_bot_info var -- We could call annotateBotStr always, but the is_bot -- flag just tells us when we don't need to do so | is_bot = annotateBotStr var (exprBotStrictness_maybe de_tagged_rhs) | otherwise = var de_tagged_rhs = deTagExpr lvld_rhs rhs_ty = exprType de_tagged_rhs mk_name uniq = mkSystemVarName uniq (mkFastString "lvl") cloneCaseBndrs :: LevelEnv -> Level -> [Var] -> LvlM (LevelEnv, [Var]) cloneCaseBndrs env@(LE { le_subst = subst, le_lvl_env = lvl_env, le_env = id_env }) new_lvl vs = do { us <- getUniqueSupplyM ; let (subst', vs') = cloneBndrs subst us vs env' = env { le_ctxt_lvl = new_lvl , le_lvl_env = addLvls new_lvl lvl_env vs' , le_subst = subst' , le_env = foldl add_id id_env (vs `zip` vs') } ; return (env', vs') } cloneLetVars :: RecFlag -> LevelEnv -> Level -> [Var] -> LvlM (LevelEnv, [Var]) -- See Note [Need for cloning during float-out] -- Works for Ids bound by let(rec) -- The dest_lvl is attributed to the binders in the new env, -- but cloneVars doesn't affect the ctxt_lvl of the incoming env cloneLetVars is_rec env@(LE { le_subst = subst, le_lvl_env = lvl_env, le_env = id_env }) dest_lvl vs = do { us <- getUniqueSupplyM ; let (subst', vs1) = case is_rec of NonRecursive -> cloneBndrs subst us vs Recursive -> cloneRecIdBndrs subst us vs vs2 = map zap_demand_info vs1 -- See Note [Zapping the demand info] prs = vs `zip` vs2 env' = env { le_lvl_env = addLvls dest_lvl lvl_env vs2 , le_subst = subst' , le_env = foldl add_id id_env prs } ; return (env', vs2) } add_id :: IdEnv ([Var], LevelledExpr) -> (Var, Var) -> IdEnv ([Var], LevelledExpr) add_id id_env (v, v1) | isTyVar v = delVarEnv id_env v | otherwise = extendVarEnv id_env v ([v1], ASSERT(not (isCoVar v1)) Var v1) zap_demand_info :: Var -> Var zap_demand_info v | isId v = zapIdDemandInfo v | otherwise = v {- Note [Zapping the demand info] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ VERY IMPORTANT: we must zap the demand info if the thing is going to float out, becuause it may be less demanded than at its original binding site. Eg f :: Int -> Int f x = let v = 3*4 in v+x Here v is strict; but if we float v to top level, it isn't any more. -}
GaloisInc/halvm-ghc
compiler/simplCore/SetLevels.hs
Haskell
bsd-3-clause
48,135
import System.Random (randomIO) import Data.Time.Clock (getCurrentTime, UTCTime) import Database.HDBC import Database.HDBC.PostgreSQL import Database.NySQL.TH import Database.NySQL.Options import Control.Monad import Control.Monad.Trans import Control.Exception import Control.Monad.Catch $(mkSqlWith (defOpts { paramModifier = ('_':) }) "src/sql/createUser.sql") main :: IO () main = do c <- connectPostgreSQL "dbname=example host=localhost user=postgres password=postgres" n <- getCurrentTime k0 :: Int <- randomIO k1 :: Int <- randomIO void $ runStmt c CreateUser { _key = show k0 , _username = "jsmith" , _password = "Testing0123" , _first_name = "John" , _last_name = "Smith" , _created_on = n , _deleted_on = Nothing :: Maybe UTCTime } void $ runStmt c CreateUser { _key = show k1 , _username = "jsmith" , _password = "Testing0123" , _first_name = "John" , _last_name = "Smith" , _created_on = n , _deleted_on = Nothing :: Maybe UTCTime }
broma0/nysql
src/test/Example.hs
Haskell
bsd-3-clause
1,052
-- | This module is meant to be imported qualified: -- -- @ -- import qualified Web.KISSmetrics as KISSmetrics -- @ module Web.KISSmetrics ( -- * Data types APIKey , SimpleText , Property , Timestamp(..) , generateTimestamp -- * Making calls , call , CallType(..) -- * Type classes , EventName(..) , Identity(..) ) where import Control.Applicative ((<$>)) import Control.Arrow (second) import Data.Text (Text) import Data.Time (UTCTime, formatTime, getCurrentTime) import Data.Typeable (Typeable) import qualified Data.ByteString.Char8 as B8 import qualified Data.Conduit as C import qualified Data.Conduit.List as CL import qualified Data.Text.Encoding as TE import qualified Network.HTTP.Conduit as H import qualified Network.HTTP.Types as H -- | Your KISSmetrics API key. type APIKey = B8.ByteString -- | KISSmetrics names and identities are limited to at most 255 -- characters and all commas (@,@) and colons (@:@) are changed -- to spaces (@ @). Nothing is checked by this Haskell library, -- so be careful =). type SimpleText = B8.ByteString -- | A KISSmetrics property. The property names needs to follow -- the rules outlined on 'SimpleText'@s@ documentation. The -- property value, on the other hand, are only limited to 8 KiB -- and don't have any other restrictions. type Property = (SimpleText, Text) -- | A timestamp used only to ignore duplicated events. data Timestamp = Automatic -- ^ Use KISSmetrics' servers time as the timestamp. | Manual UTCTime -- ^ Use given time as the timestamp. If possible, use -- 'Manual' since it allows you to safely resend events that -- appear to have failed without being afraid of duplicates. -- See also 'generateTimestamp'. deriving (Eq, Ord, Show, Read, Typeable) -- | Generate a 'Manual' timestamp with the current time. generateTimestamp :: IO Timestamp generateTimestamp = Manual <$> getCurrentTime -- | A type of call that may be made to KISSmetrics. It's -- defined as a GADT where event names and identities are -- existential values. -- -- Since Haddock currently has problems with GADTs, here's the -- documentation about each constructor: -- -- ['Record'] Record an event. 'eventName' is the name of the -- event being recorded, 'identity' is the identity of the -- person doing the event and 'properties' are any other -- optional properties you may want. -- -- ['SetProps'] Set user properties without recording an event. -- 'identity' is the identity of the person whose properties -- will be changed and 'properties' are the properties to be -- set. -- -- ['Alias'] Alias two identities ('identity' and 'identity'') -- as the same one. -- -- See also <http://support.kissmetrics.com/apis/specifications>. data CallType where Record :: (EventName event, Identity ident) => { eventName :: event , identity :: ident , timestamp :: Timestamp , properties :: [Property] } -> CallType SetProps :: Identity ident => { identity :: ident , timestamp :: Timestamp , properties :: [Property] } -> CallType Alias :: (Identity ident, Identity ident') => { identity :: ident , identity' :: ident' } -> CallType deriving (Typeable) -- Needs to use StandaloneDeriving since CallType is a GADT. deriving instance Show CallType -- | Type class of data types that are event names. -- -- You may just use 'SimpleText' (which is the only instance -- provided by default), but you may also create your own data -- type for event names and add an instance of this class. class Show event => EventName event where fromEventName :: event -> SimpleText -- | This is the same as 'SimpleText'. instance EventName B8.ByteString where fromEventName = id -- | Type class of data types that are user identities. -- -- You may just use 'SimpleText' (which is the only instance -- provided by default), but you may also create your own data -- type for event names and add an instance of this class. class Show ident => Identity ident where fromIdentity :: ident -> SimpleText -- | This is the same as 'SimpleText'. instance Identity B8.ByteString where fromIdentity = id -- | Call KISSmetrics' API. See 'CallType' for documentation -- about which calls you may make. -- -- KISSmetrics does not return errors even when an error occurs -- and there's absolutely no way of knowing if your request went -- through. However, this function /may/ throw an exception if -- we fail to make the request to KISSmetrics (e.g. if there's a -- problem with your server's Internet connection). -- -- Note that official KISSmetrics' APIs provide many functions -- (usually four) while we provide just this one and a sum data -- type. This function alone does the work of @record@, @set@, -- @identify@ and @alias@. -- -- TODO: Currently there's no support for automatically retrying -- failed request, you need to retry yourself. call :: H.Manager -- ^ HTTP connection manager (cf. 'H.newManager'). -> APIKey -- ^ Your KISSmetrics API key. -> CallType -- ^ Which call you would like to make. -> IO () call manager apikey callType = C.runResourceT $ do -- Create the request let (path, args) = callInfo callType request = H.def { H.method = "GET" , H.secure = True , H.host = "trk.kissmetrics.com" , H.port = 443 , H.path = path , H.queryString = H.renderSimpleQuery False $ ("_k", apikey) : args , H.redirectCount = 0 } -- Make the call r <- H.http request manager -- KISSmetrics always returns 200 Ok with an invisible 1x1 -- GIF. We need to consume the body in order to let the -- connection be reused via keep-alive. H.responseBody r C.$$+- CL.sinkNull -- | Internal function. Given a 'CallType', return the URL to be -- used and generate a list of arguments. callInfo :: CallType -> (B8.ByteString, H.SimpleQuery) callInfo Record {..} = ( "/e" , (:) ("_n", fromEventName eventName) $ (:) ("_p", fromIdentity identity) $ timestampInfo timestamp $ propsInfo properties ) callInfo SetProps {..} = ( "/s" , (:) ("_p", fromIdentity identity) $ timestampInfo timestamp $ propsInfo properties ) callInfo Alias {..} = ( "/a" , [ ("_p", fromIdentity identity) , ("_n", fromIdentity identity') ] ) -- | Generate a difference list of arguments for a timestamp. timestampInfo :: Timestamp -> (H.SimpleQuery -> H.SimpleQuery) -- ^ Difference list. timestampInfo Automatic = id timestampInfo (Manual t) = (:) ("_d", "1") . (:) ("_t", B8.pack $ formatTime locale "%s" t) where locale = error "Web.KISSmetrics.timestampInfo: locale shouldn't be needed." -- | Generate a list of arguments for a list of properties. propsInfo :: [Property] -> H.SimpleQuery propsInfo = map (second TE.encodeUtf8)
prowdsponsor/hissmetrics
src/Web/KISSmetrics.hs
Haskell
bsd-3-clause
7,062
module Core.Compiler where import Control.Monad.Identity import Control.Monad.State import Core.Raw import Data.List (intercalate) import Data.Reify import qualified Core.Val as Ix frpValues :: Ix.FRP () -> IO (Graph Val) frpValues = fromIxValues . runIdentity . flip execStateT [] compiler :: Ix.FRP () -> IO String compiler = fmap (intercalate "\n" . worker) . frpValues worker :: Graph Val -> [String] worker = foldVal (\_ i f a -> mkAssign i ++ mkId f ++ "(" ++ intercalate "," (map mkId a) ++ ")") (\_ i a b -> mkAssign i ++ mkId a ++ "(" ++ mkId b ++ ")") (\_ i fs -> mkAssign i ++ "$(listify)(" ++ intercalate "," (map mkId fs) ++ ")") (\_ i a b -> mkAssign i ++ "C(" ++ mkId a ++ "," ++ mkId b ++ ")") (\_ i t _ s -> mkAssign i ++ if t == Ix.Con then "frp(" ++ s ++ ")" else s) mkId :: Int -> String mkId i = '_':show i mkAssign :: Int -> String mkAssign i = ('_':show i) ++ " = "
sebastiaanvisser/frp-js
src/Core/Compiler.hs
Haskell
bsd-3-clause
915
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ParallelListComp #-} module FillText (benchmark, summary) where import Control.Monad.Compat import qualified Data.Text as T import Data.Text (Text) import Graphics.Blank import Prelude.Compat import System.Random import Utils benchmark :: CanvasBenchmark benchmark ctx = do xs <- replicateM numWords $ randomXCoord ctx ys <- replicateM numWords $ randomYCoord ctx ws <- cycle <$> replicateM numWords randomWord send' ctx $ sequence_ [ showText (x, y) word | x <- xs | y <- ys | word <- ws ] summary :: String summary = "FillText" numWords :: Int numWords = 1000 -- Randomly creates a four-letter, lowercase word randomWord :: IO Text randomWord = fmap T.pack . replicateM 4 $ randomRIO ('a', 'z') showText :: Point -> Text -> Canvas () showText (x, y) txt = do fillStyle("black"); font("10pt Calibri"); fillText(txt, x, y);
ku-fpg/blank-canvas-mark
hs/FillText.hs
Haskell
bsd-3-clause
1,122
module Data.Intern ( Interned , InternSet , empty , singleton , insert , fromList , toList , delete , member , notMember ) where import qualified Data.Map as M type Cache a = M.Map a a type Interned a = a newtype InternSet a = InternSet { toCache :: Cache a } empty :: InternSet a empty = InternSet M.empty singleton :: a -> InternSet a singleton x = InternSet $ M.singleton x x instance (Show a) => Show (InternSet a) where show = ("InternSet " ++) . show . M.keys . toCache insert :: (Ord a) => a -> InternSet a -> (Interned a, InternSet a) insert x iset = case M.lookup x $ toCache iset of Just y -> (y, iset) Nothing -> (x, InternSet $ M.insert x x $ toCache iset) fromList :: (Ord a) => [a] -> InternSet a -> ([a], InternSet a) fromList [] iset = ([], iset) fromList (x:xs) iset = case insert x iset of (y, iset') -> let (ys, iset'') = fromList xs iset' in (y : ys, iset'') toList :: InternSet a -> [a] toList = M.keys . toCache delete :: (Ord a) => a -> InternSet a -> InternSet a delete x = InternSet . M.delete x . toCache member :: (Ord a) => a -> InternSet a -> Bool member x = M.member x . toCache notMember :: (Ord a) => a -> InternSet a -> Bool notMember x = not . member x
thomaseding/intern
src/Data/Intern.hs
Haskell
bsd-3-clause
1,293
{-# LANGUAGE BangPatterns #-} {-# LANGUAGE FlexibleContexts #-} module Statistics.Distribution.Random.Exponential ( exponential ) where import Random.CRI import Statistics.Distribution.Random.Uniform -- q[k-1] = sum(log(2)^k / k!) k=1,..,n, q :: [Double] -- q = let factorial = foldr (*) 1 . enumFromTo 1 -- qk :: Integer -> Double -- qk k = (log 2)^k / (fromIntegral (factorial k)) -- qs :: [Double] -- qs = map qk [1..] -- in map (\ n -> sum (take n qs)) (take 16 [1..]) q = [0.6931471805599453, 0.9333736875190459, 0.9888777961838675, 0.9984959252914960, 0.9998292811061389, 0.9999833164100727, 0.9999985691438767, 0.9999998906925558, 0.9999999924734159, 0.9999999995283275, 0.9999999999728814, 0.9999999999985598, 0.9999999999999289, 0.9999999999999968, 0.9999999999999999, 1.0000000000000000] {-# INLINE q #-} exponential :: Source m g Double => g m -> m Double exponential rng = let !q0 = q !! 0 mk_au :: Double -> Double -> (Double, Double) mk_au !a !u | u > 1.0 = (a, u - 1.0) | otherwise = mk_au (a + q0) (u + u) go a _ umin ![] = return (a + umin * q0) go a u umin !(qi:qs) = do !ustar <- uniform rng let umin' = min ustar umin if u > qi then go a u umin' qs else return (a + umin' * q0) in do !u' <- uniform rng let !(a, u) = mk_au 0.0 (u' + u') if u <= q0 then return (a + u) else do !us <- uniform rng go a u us (tail q)
finlay/random-dist
Statistics/Distribution/Random/Exponential.hs
Haskell
bsd-3-clause
1,676
module System.Nemesis.DSL where import Control.Arrow ((>>>)) import Control.Lens import Control.Monad (when) import Data.List (sort, nub) import System.Directory (doesFileExist, doesDirectoryExist, removeFile, removeDirectoryRecursive) import System.Exit (ExitCode( ExitSuccess, ExitFailure), exitWith) import System.FilePath.Glob (globDir, compile) import System.Process (system) import Text.Printf (printf) import Prelude hiding ((-)) import System.Nemesis.Driver import System.Nemesis.Type import System.Nemesis.Utils desc :: String -> Unit desc = (currentDesc .=) . Just task :: String -> IO () -> Unit task s aAction = if ':' `elem` s then let h = s & takeWhile (/= ':') t = s & dropWhile (/= ':') & tail in task' (strip h ) (words t) else task' s [] where task' _name _deps = insertTask - emptyTask & name .~ _name & deps .~ _deps & action .~ ShowIO aAction strip = dropWhile (== ' ') >>> reverse >>> dropWhile (== ' ') >>> reverse namespace :: String -> Unit -> Unit namespace aName aUnit = do push aName aUnit pop where push :: String -> Unit push = (currentNamespace %=) . (:) pop :: Unit pop = (currentNamespace %= tail) sh :: String -> IO () sh s = do status <- system s case status of ExitSuccess -> return () ExitFailure code -> do putStrLn - printf "%s failed with status code: %s" s (show code) exitWith status clean :: [String] -> Unit clean xs = do desc "Remove any temporary products." task "clean" - do paths <- globDir (xs & map compile) "." <&> concat <&> nub <&> sort <&> reverse mapM_ rmAny paths where rmAny s = do _fileExist <- doesFileExist s when _fileExist - removeFile s _dirExist <- doesDirectoryExist s when _dirExist - removeDirectoryRecursive s
nfjinjing/nemesis
src/System/Nemesis/DSL.hs
Haskell
bsd-3-clause
1,939
-- Copyright (c) 2016-present, Facebook, Inc. -- All rights reserved. -- -- This source code is licensed under the BSD-style license found in the -- LICENSE file in the root directory of this source tree. module Duckling.AmountOfMoney.Tests ( tests ) where import Data.String import Prelude import Test.Tasty import qualified Duckling.AmountOfMoney.AR.Tests as AR import qualified Duckling.AmountOfMoney.CA.Tests as CA import qualified Duckling.AmountOfMoney.EN.Tests as EN import qualified Duckling.AmountOfMoney.BG.Tests as BG import qualified Duckling.AmountOfMoney.ES.Tests as ES import qualified Duckling.AmountOfMoney.FR.Tests as FR import qualified Duckling.AmountOfMoney.GA.Tests as GA import qualified Duckling.AmountOfMoney.HE.Tests as HE import qualified Duckling.AmountOfMoney.HR.Tests as HR import qualified Duckling.AmountOfMoney.ID.Tests as ID import qualified Duckling.AmountOfMoney.IT.Tests as IT import qualified Duckling.AmountOfMoney.KA.Tests as KA import qualified Duckling.AmountOfMoney.KO.Tests as KO import qualified Duckling.AmountOfMoney.MN.Tests as MN import qualified Duckling.AmountOfMoney.NB.Tests as NB import qualified Duckling.AmountOfMoney.NL.Tests as NL import qualified Duckling.AmountOfMoney.PT.Tests as PT import qualified Duckling.AmountOfMoney.RO.Tests as RO import qualified Duckling.AmountOfMoney.RU.Tests as RU import qualified Duckling.AmountOfMoney.SV.Tests as SV import qualified Duckling.AmountOfMoney.TR.Tests as TR import qualified Duckling.AmountOfMoney.VI.Tests as VI import qualified Duckling.AmountOfMoney.ZH.Tests as ZH tests :: TestTree tests = testGroup "AmountOfMoney Tests" [ AR.tests , BG.tests , CA.tests , EN.tests , ES.tests , FR.tests , GA.tests , HE.tests , HR.tests , ID.tests , IT.tests , KA.tests , KO.tests , MN.tests , NB.tests , NL.tests , PT.tests , RO.tests , RU.tests , SV.tests , TR.tests , VI.tests , ZH.tests ]
facebookincubator/duckling
tests/Duckling/AmountOfMoney/Tests.hs
Haskell
bsd-3-clause
1,944
module Unification.TyContext where import qualified Data.Map as Map import Data.Map (Map) import Unification.Substitutable import Unification.Scheme newtype TyContext = TyContext (Map String Scheme) deriving (Monoid) instance Substitutable TyContext where apply s (TyContext env) = TyContext $ Map.map (apply s) env freeTyVars (TyContext env) = foldMap freeTyVars (Map.elems env) infixl 0 +> (+>) :: TyContext -> (String, Scheme) -> TyContext TyContext cxt +> (v, scheme) = TyContext (Map.insert v scheme cxt) lookupCxt :: String -> TyContext -> Maybe Scheme lookupCxt v (TyContext c) = Map.lookup v c addContext :: (String, Scheme) -> TyContext -> TyContext addContext (x, t) (TyContext env) = TyContext $ Map.insert x t env fromList :: [(String, Scheme)] -> TyContext fromList = TyContext . Map.fromList
letsbreelhere/egg
src/Unification/TyContext.hs
Haskell
bsd-3-clause
850
{-# LANGUAGE DefaultSignatures , EmptyDataDecls , FunctionalDependencies , ScopedTypeVariables #-} module Rad.QL.Types ( OBJECT , INTERFACE , UNION , SCALAR , ENUM , GraphQLScalar(..) , GraphQLType(..) , GraphQLValue(..) -- utilities , resolveScalar ) where import qualified Data.Aeson as JSON import qualified Data.Aeson.Encode as JSON import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as BC8 import qualified Data.Text as T import qualified Data.Text.Encoding as TE import qualified Data.Trie as Trie import GHC.Generics import Rad.QL.Internal.Builders import Rad.QL.Internal.GEnum import Rad.QL.Internal.Types import Rad.QL.AST import Rad.QL.Query -- | GraphQL Kinds -- Internal GraphQL kinds, note that we assume non-null by default data OBJECT data SCALAR data ENUM data INTERFACE data UNION data INPUT_OBJECT data LIST_OF data NULLABLE -- | GraphQLType -- A GraphQLType says that a type can be resolved against a given monad class (Monad m) => GraphQLType kind m a | a -> kind where def :: GraphQLTypeDef kind m a -- | GraphQL Scalar -- A GraphQLScalar can be read off of an input value, and serialized directly to a result class GraphQLScalar a where serialize :: a -> Builder deserialize :: QValue -> Maybe a default serialize :: (IsEnum a) => a -> Builder serialize = enumSerialize default deserialize :: (IsEnum a) => QValue -> Maybe a deserialize = enumDeserialize instance (GraphQLScalar a) => GraphQLScalar (Maybe a) where serialize Nothing = buildNull serialize (Just x) = serialize x deserialize = Just . deserialize instance (GraphQLScalar a) => GraphQLScalar [a] where serialize vs = joinList [ serialize v | v <- vs ] deserialize (QList vs) = traverse deserialize vs deserialize _ = Nothing -- Built-in Scalar instances resolveScalar :: (GraphQLScalar a, Monad m) => QSelectionSet -> a -> Result m resolveScalar [] = pure . serialize resolveScalar _ = \_ -> errorMsg "Scalar cannot take a subselection" defineScalar :: (Monad m, GraphQLScalar a) => Name -> Description -> GraphQLTypeDef SCALAR m a defineScalar n d = emptyDef { gqlTypeDef = td, gqlResolve = resolveScalar } where td = TypeDefScalar $ ScalarTypeDef n d instance GraphQLScalar Int where serialize = intDec deserialize (QInt v) = Just v deserialize _ = Nothing instance (Monad m) => GraphQLValue m Int instance (Monad m) => GraphQLType SCALAR m Int where def = defineScalar "Int" $ "TODO: copy and paste description" -- TODO: other Integral types, Int16, Int32, etc. instance GraphQLScalar Double where serialize = doubleDec -- NOTE: currently slow, hopefully this will be fixed upstream deserialize (QInt v) = Just $ fromIntegral v deserialize (QFloat v) = Just v deserialize _ = Nothing instance (Monad m) => GraphQLValue m Double instance (Monad m) => GraphQLType SCALAR m Double where def = defineScalar "Double" $ "TODO: copy and paste description" -- TODO: more fractionals, e.g. Float, Real... instance GraphQLScalar B.ByteString where serialize = buildString deserialize (QInt v) = Just $ BC8.pack $ show v deserialize (QFloat v) = Just $ BC8.pack $ show v deserialize (QBool v) = Just $ BC8.pack $ show v deserialize (QString v) = Just v deserialize _ = Nothing instance (Monad m) => GraphQLValue m B.ByteString instance (Monad m) => GraphQLType SCALAR m B.ByteString where def = defineScalar "String" $ "TODO: copy and paste description" instance GraphQLScalar T.Text where serialize = buildString . TE.encodeUtf8 deserialize (QInt v) = Just $ T.pack $ show v deserialize (QFloat v) = Just $ T.pack $ show v deserialize (QBool v) = Just $ T.pack $ show v deserialize (QString v) = Just $ TE.decodeUtf8 v deserialize _ = Nothing instance (Monad m) => GraphQLValue m T.Text instance (Monad m) => GraphQLType SCALAR m T.Text where def = defineScalar "String" $ "TODO: copy and paste description" -- convenience instance for schema query instance GraphQLScalar Builder where serialize = id deserialize _ = Nothing instance (Monad m) => GraphQLValue m Builder instance (Monad m) => GraphQLType SCALAR m Builder where def = defineScalar "String" $ "TODO: copy and paste description" instance GraphQLScalar Bool where serialize True = byteString "true" serialize False = byteString "false" deserialize (QBool v) = Just v deserialize _ = Nothing instance (Monad m) => GraphQLValue m Bool instance (Monad m) => GraphQLType SCALAR m Bool where def = defineScalar "Boolean" $ "TODO: copy and paste description" instance GraphQLScalar JSON.Value where serialize = JSON.encodeToBuilder deserialize (QString v) = JSON.decodeStrict v deserialize _ = Nothing instance (Monad m) => GraphQLValue m JSON.Value instance (Monad m) => GraphQLType SCALAR m JSON.Value where def = defineScalar "JSON" $ "TODO: write a description" -- A GraphQLValue denotes some value which can be resolve, -- i.e. some type instance, its nullable (Maybe a), or list ([a]) class (Monad m) => GraphQLValue m a where graphQLValueTypeDef :: m a -> TypeDef graphQLValueTypeRef :: m a -> Type graphQLValueResolve :: QSelectionSet -> a -> Result m default graphQLValueTypeDef :: (GraphQLType kind m a) => m a -> TypeDef graphQLValueTypeDef = graphQLValueTypeDef' default graphQLValueTypeRef :: (GraphQLType kind m a) => m a -> Type graphQLValueTypeRef = graphQLValueTypeRef' default graphQLValueResolve :: (GraphQLType kind m a) => QSelectionSet -> a -> Result m graphQLValueResolve = graphQLValueResolve' graphQLValueTypeDef' :: forall m a kind. (GraphQLType kind m a) => m a -> TypeDef graphQLValueTypeDef' _ = gqlTypeDef (def :: GraphQLTypeDef kind m a) graphQLValueTypeRef' :: forall m a kind. (GraphQLType kind m a) => m a -> Type graphQLValueTypeRef' _ = TypeNonNull $ NonNullTypeNamed $ NamedType $ typeDefName $ gqlTypeDef (def :: GraphQLTypeDef kind m a) graphQLValueResolve' :: forall m a kind. (GraphQLType kind m a) => QSelectionSet -> a -> Result m graphQLValueResolve' = gqlResolve (def :: GraphQLTypeDef kind m a) instance (GraphQLValue m a) => GraphQLValue m [a] where graphQLValueTypeDef _ = graphQLValueTypeDef (undefined :: m a) graphQLValueTypeRef _ = TypeList $ ListType $ graphQLValueTypeRef (undefined :: m a) graphQLValueResolve s = collectResults . map (graphQLValueResolve s) instance (GraphQLValue m a) => GraphQLValue m (Maybe a) where graphQLValueTypeDef _ = graphQLValueTypeDef (undefined :: m a) graphQLValueTypeRef _ = unwrapNonNull $ graphQLValueTypeRef (undefined :: m a) where unwrapNonNull (TypeNonNull (NonNullTypeList l)) = TypeList l unwrapNonNull (TypeNonNull (NonNullTypeNamed n)) = TypeNamed n unwrapNonNull t = t graphQLValueResolve _ Nothing = nullResult graphQLValueResolve ss (Just x) = graphQLValueResolve ss x -- GENERIC ENUM DERIVING MAGIC
jqyu/bustle-chi
src/Rad/QL/Types.hs
Haskell
bsd-3-clause
7,424
{-# LANGUAGE CPP #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE BangPatterns #-} ----------------------------------------------------------------------------- -- | -- Module : Distribution.Client.IndexUtils -- Copyright : (c) Duncan Coutts 2008 -- License : BSD-like -- -- Maintainer : duncan@community.haskell.org -- Stability : provisional -- Portability : portable -- -- Extra utils related to the package indexes. ----------------------------------------------------------------------------- module Distribution.Client.IndexUtils ( getIndexFileAge, getInstalledPackages, getSourcePackages, getSourcePackagesStrict, Index(..), parsePackageIndex, readRepoIndex, updateRepoIndexCache, updatePackageIndexCacheFile, BuildTreeRefType(..), refTypeFromTypeCode, typeCodeFromRefType ) where import qualified Distribution.Client.Tar as Tar import Distribution.Client.Types import Distribution.Package ( PackageId, PackageIdentifier(..), PackageName(..) , Package(..), packageVersion, packageName , Dependency(Dependency) ) import Distribution.Client.PackageIndex (PackageIndex) import qualified Distribution.Client.PackageIndex as PackageIndex import Distribution.Simple.PackageIndex (InstalledPackageIndex) import qualified Distribution.PackageDescription.Parse as PackageDesc.Parse import Distribution.PackageDescription ( GenericPackageDescription ) import Distribution.PackageDescription.Parse ( parsePackageDescription ) import Distribution.Simple.Compiler ( Compiler, PackageDBStack ) import Distribution.Simple.Program ( ProgramConfiguration ) import qualified Distribution.Simple.Configure as Configure ( getInstalledPackages ) import Distribution.ParseUtils ( ParseResult(..) ) import Distribution.Version ( Version(Version), intersectVersionRanges ) import Distribution.Text ( display, simpleParse ) import Distribution.Verbosity ( Verbosity, normal, lessVerbose ) import Distribution.Simple.Utils ( die, warn, info, fromUTF8, ignoreBOM ) import Data.Char (isAlphaNum) import Data.Maybe (mapMaybe, catMaybes, maybeToList) import Data.List (isPrefixOf) #if !MIN_VERSION_base(4,8,0) import Data.Monoid (Monoid(..)) #endif import qualified Data.Map as Map import Control.Monad (when, liftM) import Control.Exception (evaluate) import qualified Data.ByteString.Lazy as BS import qualified Data.ByteString.Lazy.Char8 as BS.Char8 import qualified Data.ByteString.Char8 as BSS import Data.ByteString.Lazy (ByteString) import Distribution.Client.GZipUtils (maybeDecompress) import Distribution.Client.Utils ( byteStringToFilePath , tryFindAddSourcePackageDesc ) import Distribution.Compat.Exception (catchIO) import Distribution.Client.Compat.Time (getFileAge, getModTime) import System.Directory (doesFileExist, doesDirectoryExist) import System.FilePath ( (</>), takeExtension, replaceExtension, splitDirectories, normalise ) import System.FilePath.Posix as FilePath.Posix ( takeFileName ) import System.IO import System.IO.Unsafe (unsafeInterleaveIO) import System.IO.Error (isDoesNotExistError) getInstalledPackages :: Verbosity -> Compiler -> PackageDBStack -> ProgramConfiguration -> IO InstalledPackageIndex getInstalledPackages verbosity comp packageDbs conf = Configure.getInstalledPackages verbosity' comp packageDbs conf where --FIXME: make getInstalledPackages use sensible verbosity in the first place verbosity' = lessVerbose verbosity ------------------------------------------------------------------------ -- Reading the source package index -- -- | Read a repository index from disk, from the local files specified by -- a list of 'Repo's. -- -- All the 'SourcePackage's are marked as having come from the appropriate -- 'Repo'. -- -- This is a higher level wrapper used internally in cabal-install. -- getSourcePackages :: Verbosity -> [Repo] -> IO SourcePackageDb getSourcePackages verbosity repos = getSourcePackages' verbosity repos ReadPackageIndexLazyIO -- | Like 'getSourcePackages', but reads the package index strictly. Useful if -- you want to write to the package index after having read it. getSourcePackagesStrict :: Verbosity -> [Repo] -> IO SourcePackageDb getSourcePackagesStrict verbosity repos = getSourcePackages' verbosity repos ReadPackageIndexStrict -- | Common implementation used by getSourcePackages and -- getSourcePackagesStrict. getSourcePackages' :: Verbosity -> [Repo] -> ReadPackageIndexMode -> IO SourcePackageDb getSourcePackages' verbosity [] _mode = do warn verbosity $ "No remote package servers have been specified. Usually " ++ "you would have one specified in the config file." return SourcePackageDb { packageIndex = mempty, packagePreferences = mempty } getSourcePackages' verbosity repos mode = do info verbosity "Reading available packages..." pkgss <- mapM (\r -> readRepoIndex verbosity r mode) repos let (pkgs, prefs) = mconcat pkgss prefs' = Map.fromListWith intersectVersionRanges [ (name, range) | Dependency name range <- prefs ] _ <- evaluate pkgs _ <- evaluate prefs' return SourcePackageDb { packageIndex = pkgs, packagePreferences = prefs' } -- | Read a repository index from disk, from the local file specified by -- the 'Repo'. -- -- All the 'SourcePackage's are marked as having come from the given 'Repo'. -- -- This is a higher level wrapper used internally in cabal-install. -- readRepoIndex :: Verbosity -> Repo -> ReadPackageIndexMode -> IO (PackageIndex SourcePackage, [Dependency]) readRepoIndex verbosity repo mode = handleNotFound $ do warnIfIndexIsOld =<< getIndexFileAge repo updateRepoIndexCache verbosity (RepoIndex repo) readPackageIndexCacheFile mkAvailablePackage (RepoIndex repo) mode where mkAvailablePackage pkgEntry = SourcePackage { packageInfoId = pkgid, packageDescription = packageDesc pkgEntry, packageSource = case pkgEntry of NormalPackage _ _ _ _ -> RepoTarballPackage repo pkgid Nothing BuildTreeRef _ _ _ path _ -> LocalUnpackedPackage path, packageDescrOverride = case pkgEntry of NormalPackage _ _ pkgtxt _ -> Just pkgtxt _ -> Nothing } where pkgid = packageId pkgEntry handleNotFound action = catchIO action $ \e -> if isDoesNotExistError e then do case repo of RepoRemote{..} -> warn verbosity $ "The package list for '" ++ remoteRepoName repoRemote ++ "' does not exist. Run 'cabal update' to download it." RepoLocal{..} -> warn verbosity $ "The package list for the local repo '" ++ repoLocalDir ++ "' is missing. The repo is invalid." return mempty else ioError e isOldThreshold = 15 --days warnIfIndexIsOld dt = do when (dt >= isOldThreshold) $ case repo of RepoRemote{..} -> warn verbosity $ "The package list for '" ++ remoteRepoName repoRemote ++ "' is " ++ shows (floor dt :: Int) " days old.\nRun " ++ "'cabal update' to get the latest list of available packages." RepoLocal{..} -> return () -- | Return the age of the index file in days (as a Double). getIndexFileAge :: Repo -> IO Double getIndexFileAge repo = getFileAge $ repoLocalDir repo </> "00-index.tar" -- | It is not necessary to call this, as the cache will be updated when the -- index is read normally. However you can do the work earlier if you like. -- updateRepoIndexCache :: Verbosity -> Index -> IO () updateRepoIndexCache verbosity index = whenCacheOutOfDate index $ do updatePackageIndexCacheFile verbosity index whenCacheOutOfDate :: Index -> IO () -> IO () whenCacheOutOfDate index action = do exists <- doesFileExist $ cacheFile index if not exists then action else do indexTime <- getModTime $ indexFile index cacheTime <- getModTime $ cacheFile index when (indexTime > cacheTime) action ------------------------------------------------------------------------ -- Reading the index file -- -- | An index entry is either a normal package, or a local build tree reference. data PackageEntry = NormalPackage PackageId GenericPackageDescription ByteString BlockNo | BuildTreeRef BuildTreeRefType PackageId GenericPackageDescription FilePath BlockNo -- | A build tree reference is either a link or a snapshot. data BuildTreeRefType = SnapshotRef | LinkRef deriving Eq refTypeFromTypeCode :: Tar.TypeCode -> BuildTreeRefType refTypeFromTypeCode t | t == Tar.buildTreeRefTypeCode = LinkRef | t == Tar.buildTreeSnapshotTypeCode = SnapshotRef | otherwise = error "Distribution.Client.IndexUtils.refTypeFromTypeCode: unknown type code" typeCodeFromRefType :: BuildTreeRefType -> Tar.TypeCode typeCodeFromRefType LinkRef = Tar.buildTreeRefTypeCode typeCodeFromRefType SnapshotRef = Tar.buildTreeSnapshotTypeCode instance Package PackageEntry where packageId (NormalPackage pkgid _ _ _) = pkgid packageId (BuildTreeRef _ pkgid _ _ _) = pkgid packageDesc :: PackageEntry -> GenericPackageDescription packageDesc (NormalPackage _ descr _ _) = descr packageDesc (BuildTreeRef _ _ descr _ _) = descr -- | Parse an uncompressed \"00-index.tar\" repository index file represented -- as a 'ByteString'. -- data PackageOrDep = Pkg PackageEntry | Dep Dependency -- | Read @00-index.tar.gz@ and extract @.cabal@ and @preferred-versions@ files -- -- We read the index using 'Tar.read', which gives us a lazily constructed -- 'TarEntries'. We translate it to a list of entries using 'tarEntriesList', -- which preserves the lazy nature of 'TarEntries', and finally 'concatMap' a -- function over this to translate it to a list of IO actions returning -- 'PackageOrDep's. We can use 'lazySequence' to turn this into a list of -- 'PackageOrDep's, still maintaining the lazy nature of the original tar read. parsePackageIndex :: ByteString -> [IO (Maybe PackageOrDep)] parsePackageIndex = concatMap (uncurry extract) . tarEntriesList . Tar.read where extract :: BlockNo -> Tar.Entry -> [IO (Maybe PackageOrDep)] extract blockNo entry = tryExtractPkg ++ tryExtractPrefs where tryExtractPkg = do mkPkgEntry <- maybeToList $ extractPkg entry blockNo return $ fmap (fmap Pkg) mkPkgEntry tryExtractPrefs = do prefs' <- maybeToList $ extractPrefs entry fmap (return . Just . Dep) prefs' -- | Turn the 'Entries' data structure from the @tar@ package into a list, -- and pair each entry with its block number. -- -- NOTE: This preserves the lazy nature of 'Entries': the tar file is only read -- as far as the list is evaluated. tarEntriesList :: Tar.Entries -> [(BlockNo, Tar.Entry)] tarEntriesList = go 0 where go !_ Tar.Done = [] go !_ (Tar.Fail e) = error ("tarEntriesList: " ++ e) go !n (Tar.Next e es') = (n, e) : go (n + Tar.entrySizeInBlocks e) es' extractPkg :: Tar.Entry -> BlockNo -> Maybe (IO (Maybe PackageEntry)) extractPkg entry blockNo = case Tar.entryContent entry of Tar.NormalFile content _ | takeExtension fileName == ".cabal" -> case splitDirectories (normalise fileName) of [pkgname,vers,_] -> case simpleParse vers of Just ver -> Just . return $ Just (NormalPackage pkgid descr content blockNo) where pkgid = PackageIdentifier (PackageName pkgname) ver parsed = parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content descr = case parsed of ParseOk _ d -> d _ -> error $ "Couldn't read cabal file " ++ show fileName _ -> Nothing _ -> Nothing Tar.OtherEntryType typeCode content _ | Tar.isBuildTreeRefTypeCode typeCode -> Just $ do let path = byteStringToFilePath content dirExists <- doesDirectoryExist path result <- if not dirExists then return Nothing else do cabalFile <- tryFindAddSourcePackageDesc path "Error reading package index." descr <- PackageDesc.Parse.readPackageDescription normal cabalFile return . Just $ BuildTreeRef (refTypeFromTypeCode typeCode) (packageId descr) descr path blockNo return result _ -> Nothing where fileName = Tar.entryPath entry extractPrefs :: Tar.Entry -> Maybe [Dependency] extractPrefs entry = case Tar.entryContent entry of Tar.NormalFile content _ | takeFileName entrypath == "preferred-versions" -> Just prefs where entrypath = Tar.entryPath entry prefs = parsePreferredVersions content _ -> Nothing parsePreferredVersions :: ByteString -> [Dependency] parsePreferredVersions = mapMaybe simpleParse . filter (not . isPrefixOf "--") . lines . BS.Char8.unpack -- TODO: Are we sure no unicode? ------------------------------------------------------------------------ -- Reading and updating the index cache -- -- | Variation on 'sequence' which evaluates the actions lazily -- -- Pattern matching on the result list will execute just the first action; -- more generally pattern matching on the first @n@ '(:)' nodes will execute -- the first @n@ actions. lazySequence :: [IO a] -> IO [a] lazySequence = unsafeInterleaveIO . go where go [] = return [] go (x:xs) = do x' <- x xs' <- lazySequence xs return (x' : xs') -- | Which index do we mean? data Index = -- | The main index for the specified repository RepoIndex Repo -- | A sandbox-local repository -- Argument is the location of the index file | SandboxIndex FilePath indexFile :: Index -> FilePath indexFile (RepoIndex repo) = repoLocalDir repo </> "00-index.tar" indexFile (SandboxIndex index) = index cacheFile :: Index -> FilePath cacheFile (RepoIndex repo) = repoLocalDir repo </> "00-index.cache" cacheFile (SandboxIndex index) = index `replaceExtension` "cache" updatePackageIndexCacheFile :: Verbosity -> Index -> IO () updatePackageIndexCacheFile verbosity index = do info verbosity ("Updating index cache file " ++ cacheFile index) withIndexEntries index $ \entries -> do let cache = Cache { cacheEntries = entries } writeFile (cacheFile index) (showIndexCache cache) -- | Read the index (for the purpose of building a cache) -- -- The callback is provided with list of cache entries, which is guaranteed to -- be lazily constructed. This list must ONLY be used in the scope of the -- callback; when the callback is terminated the file handle to the index will -- be closed and further attempts to read from the list will result in (pure) -- I/O exceptions. withIndexEntries :: Index -> ([IndexCacheEntry] -> IO a) -> IO a withIndexEntries index callback = do withFile (indexFile index) ReadMode $ \h -> do bs <- maybeDecompress `fmap` BS.hGetContents h pkgsOrPrefs <- lazySequence $ parsePackageIndex bs callback $ map toCache (catMaybes pkgsOrPrefs) where toCache :: PackageOrDep -> IndexCacheEntry toCache (Pkg (NormalPackage pkgid _ _ blockNo)) = CachePackageId pkgid blockNo toCache (Pkg (BuildTreeRef refType _ _ _ blockNo)) = CacheBuildTreeRef refType blockNo toCache (Dep d) = CachePreference d data ReadPackageIndexMode = ReadPackageIndexStrict | ReadPackageIndexLazyIO readPackageIndexCacheFile :: Package pkg => (PackageEntry -> pkg) -> Index -> ReadPackageIndexMode -> IO (PackageIndex pkg, [Dependency]) readPackageIndexCacheFile mkPkg index mode = do cache <- liftM readIndexCache $ BSS.readFile (cacheFile index) myWithFile (indexFile index) ReadMode $ \indexHnd -> packageIndexFromCache mkPkg indexHnd cache mode where myWithFile f m act = case mode of ReadPackageIndexStrict -> withFile f m act ReadPackageIndexLazyIO -> do indexHnd <- openFile f m act indexHnd packageIndexFromCache :: Package pkg => (PackageEntry -> pkg) -> Handle -> Cache -> ReadPackageIndexMode -> IO (PackageIndex pkg, [Dependency]) packageIndexFromCache mkPkg hnd Cache{..} mode = accum mempty [] cacheEntries where accum srcpkgs prefs [] = do -- Have to reverse entries, since in a tar file, later entries mask -- earlier ones, and PackageIndex.fromList does the same, but we -- accumulate the list of entries in reverse order, so need to reverse. pkgIndex <- evaluate $ PackageIndex.fromList (reverse srcpkgs) return (pkgIndex, prefs) accum srcpkgs prefs (CachePackageId pkgid blockno : entries) = do -- Given the cache entry, make a package index entry. -- The magic here is that we use lazy IO to read the .cabal file -- from the index tarball if it turns out that we need it. -- Most of the time we only need the package id. ~(pkg, pkgtxt) <- unsafeInterleaveIO $ do pkgtxt <- getEntryContent blockno pkg <- readPackageDescription pkgtxt return (pkg, pkgtxt) let srcpkg = case mode of ReadPackageIndexLazyIO -> mkPkg (NormalPackage pkgid pkg pkgtxt blockno) ReadPackageIndexStrict -> pkg `seq` pkgtxt `seq` mkPkg (NormalPackage pkgid pkg pkgtxt blockno) accum (srcpkg:srcpkgs) prefs entries accum srcpkgs prefs (CacheBuildTreeRef refType blockno : entries) = do -- We have to read the .cabal file eagerly here because we can't cache the -- package id for build tree references - the user might edit the .cabal -- file after the reference was added to the index. path <- liftM byteStringToFilePath . getEntryContent $ blockno pkg <- do let err = "Error reading package index from cache." file <- tryFindAddSourcePackageDesc path err PackageDesc.Parse.readPackageDescription normal file let srcpkg = mkPkg (BuildTreeRef refType (packageId pkg) pkg path blockno) accum (srcpkg:srcpkgs) prefs entries accum srcpkgs prefs (CachePreference pref : entries) = accum srcpkgs (pref:prefs) entries getEntryContent :: BlockNo -> IO ByteString getEntryContent blockno = do hSeek hnd AbsoluteSeek (fromIntegral (blockno * 512)) header <- BS.hGet hnd 512 size <- getEntrySize header BS.hGet hnd (fromIntegral size) getEntrySize :: ByteString -> IO Tar.FileSize getEntrySize header = case Tar.read header of Tar.Next e _ -> case Tar.entryContent e of Tar.NormalFile _ size -> return size Tar.OtherEntryType typecode _ size | Tar.isBuildTreeRefTypeCode typecode -> return size _ -> interror "unexpected tar entry type" _ -> interror "could not read tar file entry" readPackageDescription :: ByteString -> IO GenericPackageDescription readPackageDescription content = case parsePackageDescription . ignoreBOM . fromUTF8 . BS.Char8.unpack $ content of ParseOk _ d -> return d _ -> interror "failed to parse .cabal file" interror msg = die $ "internal error when reading package index: " ++ msg ++ "The package index or index cache is probably " ++ "corrupt. Running cabal update might fix it." ------------------------------------------------------------------------ -- Index cache data structure -- -- | Tar files are block structured with 512 byte blocks. Every header and file -- content starts on a block boundary. -- type BlockNo = Int data IndexCacheEntry = CachePackageId PackageId BlockNo | CacheBuildTreeRef BuildTreeRefType BlockNo | CachePreference Dependency deriving (Eq) installedComponentId, blocknoKey, buildTreeRefKey, preferredVersionKey :: String installedComponentId = "pkg:" blocknoKey = "b#" buildTreeRefKey = "build-tree-ref:" preferredVersionKey = "pref-ver:" readIndexCacheEntry :: BSS.ByteString -> Maybe IndexCacheEntry readIndexCacheEntry = \line -> case BSS.words line of [key, pkgnamestr, pkgverstr, sep, blocknostr] | key == BSS.pack installedComponentId && sep == BSS.pack blocknoKey -> case (parseName pkgnamestr, parseVer pkgverstr [], parseBlockNo blocknostr) of (Just pkgname, Just pkgver, Just blockno) -> Just (CachePackageId (PackageIdentifier pkgname pkgver) blockno) _ -> Nothing [key, typecodestr, blocknostr] | key == BSS.pack buildTreeRefKey -> case (parseRefType typecodestr, parseBlockNo blocknostr) of (Just refType, Just blockno) -> Just (CacheBuildTreeRef refType blockno) _ -> Nothing (key: remainder) | key == BSS.pack preferredVersionKey -> fmap CachePreference (simpleParse (BSS.unpack (BSS.unwords remainder))) _ -> Nothing where parseName str | BSS.all (\c -> isAlphaNum c || c == '-') str = Just (PackageName (BSS.unpack str)) | otherwise = Nothing parseVer str vs = case BSS.readInt str of Nothing -> Nothing Just (v, str') -> case BSS.uncons str' of Just ('.', str'') -> parseVer str'' (v:vs) Just _ -> Nothing Nothing -> Just (Version (reverse (v:vs)) []) parseBlockNo str = case BSS.readInt str of Just (blockno, remainder) | BSS.null remainder -> Just blockno _ -> Nothing parseRefType str = case BSS.uncons str of Just (typeCode, remainder) | BSS.null remainder && Tar.isBuildTreeRefTypeCode typeCode -> Just (refTypeFromTypeCode typeCode) _ -> Nothing showIndexCacheEntry :: IndexCacheEntry -> String showIndexCacheEntry entry = unwords $ case entry of CachePackageId pkgid b -> [ installedComponentId , display (packageName pkgid) , display (packageVersion pkgid) , blocknoKey , show b ] CacheBuildTreeRef t b -> [ buildTreeRefKey , [typeCodeFromRefType t] , show b ] CachePreference dep -> [ preferredVersionKey , display dep ] -- | Cabal caches various information about the Hackage index data Cache = Cache { cacheEntries :: [IndexCacheEntry] } readIndexCache :: BSS.ByteString -> Cache readIndexCache bs = Cache { cacheEntries = mapMaybe readIndexCacheEntry $ BSS.lines bs } showIndexCache :: Cache -> String showIndexCache Cache{..} = unlines $ map showIndexCacheEntry cacheEntries
martinvlk/cabal
cabal-install/Distribution/Client/IndexUtils.hs
Haskell
bsd-3-clause
23,788
-------------------------------------------------------------------------- -- -- -- NfaMisc.hs -- -- -- -- Misecllaneous definitions for the NFA system. Includes -- -- examples of machines, and functions to print an NFA and the -- -- equivalence classes produces by minimisation. -- -- -- -- Regular expressions are defined in RegExp, and the type of -- -- NFAs in NfaTypes. The implementation of sets used is in -- -- Sets. -- -- -- -- (c) Simon Thompson, 1995, 2000 -- -- -- -------------------------------------------------------------------------- module Language.Mira.NfaMisc where import qualified Data.Set as Set import Data.Set ( Set, union, singleton ) import Language.Mira.RegExp import Language.Mira.NfaTypes -------------------------------------------------------------------------- -- -- -- Examples -- -- -- -------------------------------------------------------------------------- machM, machN :: Nfa Int machM = NFA (Set.fromList [0..3]) (Set.fromList [ Move 0 'a' 0 , Move 0 'a' 1, Move 0 'b' 0, Move 1 'b' 2, Move 2 'b' 3 ] ) 0 (singleton 3) machN = NFA (Set.fromList [0..5]) (Set.fromList [ Move 0 'a' 1 , Move 1 'b' 2, Move 0 'a' 3, Move 3 'b' 4, Emove 3 4, Move 4 'b' 5 ] ) 0 (Set.fromList [2,5]) -------------------------------------------------------------------------- -- -- -- Printing an NFA. -- -- -- -------------------------------------------------------------------------- print_nfa :: Nfa Int -> [Char] print_nfa (NFA states moves start finish) = "digraph Mira {\n" ++ show_states (Set.toList states) ++ (concat (map print_move (Set.toList moves))) ++ show_final (Set.toList finish) ++ show start ++ "[shape=box]\n" ++ "}\n" show_states :: [Int] -> [Char] show_states = concat . (map ((++"\n") . show)) show_final :: [Int] -> [Char] show_final = concat . (map ((++"[shape=doublecircle]\n") . show)) print_move :: Move Int -> [Char] print_move (Move s1 c s2) = "\t" ++ show s1 ++ "\t->\t" ++ show s2 ++ "\t[label=" ++ [c] ++ "]\n" print_move (Emove s1 s2) = "\t" ++ show s1 ++ "\t->\t" ++ show s2 ++ "[label=\"@\"]\n" -------------------------------------------------------------------------- -- -- -- Printing a set of equivalence classes. -- -- -- -------------------------------------------------------------------------- print_classes :: Set (Set Int) -> [Char] print_classes ss = pcs (map Set.toList (Set.toList ss)) where pcs = concat . map pc pc = (++"\n") . concat . (map ((++"\t").show_int)) show_int :: Int -> [Char] show_int = show
AidanDelaney/Mira
src/Language/Mira/NfaMisc.hs
Haskell
bsd-3-clause
2,791
{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE DerivingStrategies #-} -- | -- Module: $HEADER$ -- Description: Example of using @DerivingStrategies@ along with default -- HasVerbosity implementation based on generic-lens package. -- Copyright: (c) 2015-2019, Peter Trško -- License: BSD3 -- -- Maintainer: peter.trsko@gmail.com -- -- Example of using @DerivingStrategies@ along with default 'HasVerbosity' -- implementation based on @generic-lens@ package. module Example.Config ( Config , module Data.Verbosity.Class ) where import GHC.Generics (Generic) import Data.Verbosity.Class data Config = Config { _appVerbosity :: Verbosity -- , ... } deriving stock (Generic, Show) deriving anyclass (HasVerbosity)
trskop/verbosity
example/Example/ConfigGenericLens.hs
Haskell
bsd-3-clause
801
{-# LANGUAGE TupleSections #-} module Main where import AdventOfCode import qualified Data.Set as S data TurnDirection = L | R data Instruction = Instruction { iDir :: TurnDirection , iBlocks :: Blocks } data Direction = N | E | S | W deriving (Show) type Blocks = Int data Position = Position { pNetN :: Blocks , pNetE :: Blocks } deriving (Eq, Ord, Show) shortestDistance :: Position -> Blocks shortestDistance (Position n e) = abs n + abs e move :: Position -> Direction -> Position move p@(Position n e) N = p { pNetN = n + 1 } move p@(Position n e) S = p { pNetN = n - 1 } move p@(Position n e) E = p { pNetE = e + 1 } move p@(Position n e) W = p { pNetE = e - 1 } rotate :: Direction -> TurnDirection -> Direction rotate N L = W rotate E L = N rotate S L = E rotate W L = S rotate d R = rotate (rotate (rotate d L) L) L parseInstruction :: Parser Instruction parseInstruction = Instruction <$> parseDirection <*> parseNumber where parseDirection = try (string "R" *> return R) <|> (string "L" *> return L) parseNumber = read <$> many1 digit parseFile :: String -> IO (Either ParseError [Instruction]) parseFile = parseFromFile (sepBy parseInstruction (string ", ") <* newline <* eof) runInstructions :: [Instruction] -> [Position] runInstructions instrs = scanl move (Position 0 0) moves where moves = concatMap expand (zip directions distances) expand (dir, dist) = replicate dist dir directions = tail $ scanl rotate N (iDir <$> instrs) distances = iBlocks <$> instrs firstDuplicate :: Ord a => [a] -> Maybe a firstDuplicate = go S.empty where go _ [] = Nothing go seen (x:xs) = if x `S.member` seen then Just x else go (S.insert x seen) xs main :: IO () main = runDay $ Day 1 (sepBy parseInstruction (string ", ") <* newline) (return . show . shortestDistance . last . runInstructions) (return . show . fmap shortestDistance . firstDuplicate . runInstructions)
purcell/adventofcode2016
src/Day1.hs
Haskell
bsd-3-clause
2,021
{-# LANGUAGE MultiParamTypeClasses, RankNTypes, FunctionalDependencies #-} {-# LANGUAGE ConstraintKinds, UndecidableInstances #-} module FunDeps where ------------------------------------------------------------------------------ {- TEST 1: MultiParamTypeClass but NO functional dependencies -} class TCa a b where -- GOOD: tca1 :: a -> b -> Int -- BAD: it compiles but I can't find any way to actually use tc3, all -- tries result in 'b' being ambiguous. tca3 :: a -> Int -- BAD: -- tca2 :: Int -- BAD: -- tca3yes :: forall a b. TCa a b => a -> b -> Int -- tca3yes a b = tca1 a b + tca3 a instance TCa Int Char where tca1 _ _ = 1 tca3 _ = 2 instance TCa Int Int where tca1 _ _ = 3 tca3 _ = 4 tcaTest :: Int tcaTest = x + y + z where x = tca1 (1 :: Int) 'c' y = tca1 (1 :: Int) (2 :: Int) -- BAD: -- z = tca3 (1 :: Int) -- z = tca3yes (1 :: Int) 'c' z = 0 ------------------------------------------------------------------------------ {- TEST 2: MultiParamTypeClass WITH functional dependencies -} class TCb a b | a -> b where -- GOOD: tcb1 :: a -> b -> Int -- GOOD: this now works due to fun deps tcb2 :: a -> Int -- BAD: won't compile, now way to determine dictionary -- tcb3 :: Int -- BAD: compiles, but no way to use it tcb4 :: b -> Int instance TCb Int Char where tcb1 _ _ = 1 tcb2 _ = 2 tcb4 _ = 3 -- BAD: 'a uniquely determines b', so we can't have Int map to both Char and -- Int, OR in other words, a must be unique as b is now ignored. -- instance TCb Int Int where -- tcb1 _ _ = 3 -- tcb2 _ = 4 instance TCb Char Char where tcb1 _ _ = 4 tcb2 _ = 5 tcb4 _ = 6 tcbTest :: Int tcbTest = w + x + y + z where w = tcb1 (1 :: Int) 'c' -- BAD: anything with a instantiated to Int must have b instantiated to -- Char as that is the only valid instance for a as Int. -- y = tcb1 (1 :: Int) (2 :: Int) x = tcb1 'c' 'c' -- GOOD: These now work due to fun deps y = tcb2 (1 :: Int) z = tcb2 'c' -- BAD: No way to resolve amibuity of 'a' type variable -- k = tcb4 'c' ------------------------------------------------------------------------------ {- TEST 3: MultiParamTypeClass WITH functional dependencies AND a bidirectional relation-} class TCc a b | a -> b, b -> a where -- GOOD: tcc1 :: a -> b -> Int -- GOOD: this now works due to fun deps tcc2 :: a -> Int -- BAD: won't compile, now way to determine dictionary -- tcb3 :: Int -- GOOD: this now works due to fun deps tcc4 :: b -> Int instance TCc Int Char where tcc1 _ _ = 1 tcc2 _ = 2 tcc4 _ = 3 -- BAD: 'a uniquely determines b', so we can't have Int map to both Char and -- Int, OR in other words, a must be unique as b is now ignored. -- instance TCb Int Int where -- tcb1 _ _ = 3 -- tcb2 _ = 4 -- BAD: As for above as now 'a uniquely determins b' AND 'b uniquely determines -- a'. So thinking of b as ignored isn't really true, it is like said above. OR -- perhaps think this, if we have 'x -> y' then x is now given the constraint -- that all instances must have unique instantiations of x. e.g without we have -- UNQIUE (x,y), with fun deps with have UNIQUE (x), with 'x -> y, y -> x' we -- have UNIQUE (x) && UNIQUE (y). -- -- instance TCc Char Char where -- tcc1 _ _ = 4 -- tcc2 _ = 5 -- tcc4 _ = 6 instance TCc Bool Int where tcc1 _ _ = 4 tcc2 _ = 5 tcc4 _ = 6 tccTest :: Int tccTest = w + x + y + z + k + l where w = tcc1 (1 :: Int) 'c' -- BAD: anything with a instantiated to Int must have b instantiated to -- Char as that is the only valid instance for a as Int. -- y = tcb1 (1 :: Int) (2 :: Int) x = tcc1 True (1 :: Int) -- GOOD: These now work due to fun deps y = tcc2 (1 :: Int) z = tcc2 True -- GOOD: These now work due to fun deps k = tcc4 'c' l = tcc4 (1 :: Int) ------------------------------------------------------------------------------ {- TEST 4: Subclass of MPTC ... -} class TCa a b => TCd1 a b where tcd12 :: a -> b -> Int class TCa a b => TCd2 a b | a -> b where tcd21 :: a -> b -> Int tcd22 :: a -> Int instance TCd2 Int Char where tcd21 a b = tca1 a b -- BAD: Even this won't work... -- tcd22 a = tca3 a tcd22 _ = 0 class TCb a b => TCd3 a b where -- do the fun deps carry through from TCb? tcd31 :: a -> b -> Int tcd32 :: a -> Int instance TCd3 Int Char where tcd31 = tcb1 -- yes they seem to... tcd32 = tcb2 -- BAD: due to fun dep constraint of superclass TCb... -- instance TCd3 Int Int where -- tcd31 = 0 -- tcd32 = 1 -- GOOD! needed ConstraintKinds + UndecidableInstances for this to work. class (forall b. TCb a b) => TCd4 a where tcd41 :: TCb a b => a -> Int -- BAD: Complains about not being able to deduce '(forall b. TCb Int b)' -- instance TCd4 Int where -- tcd41 = tcb2 -- BAD: Compiles but seems to be unusuable instance (forall b. TCb Int b) => TCd4 Int where tcd41 = tcb2 -- BAD: Won't compiles, complians of not being able to deduce '(forall b. TCb -- Int b)'... -- tcd4Test :: Int -- tcd4Test = tcd41 (1 :: Int) ------------------------------------------------------------------------------ {- TEST 6: From GHC Userguide -} class HasConverter a b | a -> b where convert :: a -> b data Foo a = MkFoo a instance (HasConverter a b, Show b) => Show (Foo a) where show (MkFoo value) = show (convert value) instance HasConverter Char Int where convert _ = 0 converterTest :: String converterTest = show $ MkFoo 'c'
dterei/Scraps
haskell/FunDeps.hs
Haskell
bsd-3-clause
5,788
{-# LANGUAGE DeriveGeneric , OverloadedStrings , RecordWildCards #-} module Docker.JSON.Types.Container where import Data.Aeson import Data.Default import qualified Data.Map as Map import Data.Maybe import qualified Data.Text as T import GHC.Generics ------------------------------------------------------------------------------ -- * Containers -- ** Request -- *** ContainerSpec -- | Specification for a container. -- -- FIXME : This needs to be better typed, and cleaned up.... Emphasis on the -- cleaning part... -- -- See <https://docs.docker.com/reference/api/docker_remote_api_v1.18/#create-a-container create a container> -- for more information. data ContainerSpec = ContainerSpec { containerHostname :: T.Text , containerDomainName :: T.Text , containerUser :: T.Text , attachedStdin :: Bool , attachedStdout :: Bool , attachedStderr :: Bool , containerTty :: Bool , openStdin :: Bool , stdinOnce :: Bool , containerEnv :: Value -- possibly null , containerCmd :: [T.Text] , entryPoint :: T.Text , containerFromImage :: T.Text , containerLabels :: Map.Map T.Text T.Text , containerVolumes :: Map.Map T.Text T.Text , containerWorkingDir :: T.Text -- should be a path , networkDisabled :: Bool , macAddress :: T.Text -- likely already MAC addr type , exposedPorts :: Map.Map T.Text Object -- according to docs, Object here is always empty... , securityOpts :: [T.Text] , hostConfig :: HostConfig } deriving (Eq, Show) -- | The Default instance for a ContainerSpec. instance Default ContainerSpec where def = ContainerSpec { containerHostname = "" , containerDomainName= "" , containerUser = "" , attachedStdin = False , attachedStdout = True , attachedStderr = True , containerTty = False , openStdin = False , stdinOnce = False , containerEnv = Null , containerCmd = ["echo", "test"] , entryPoint = "" , containerFromImage = "docker.io/fedora" , containerLabels = Map.empty , containerVolumes = Map.empty , containerWorkingDir= "" , networkDisabled = False , macAddress = "00:00:00:00:00:00"-- maybe this is auto if empty??? , exposedPorts = Map.empty , securityOpts = [""] , hostConfig = def } -- | Convert JSON to a ContainerSpec. instance FromJSON ContainerSpec where parseJSON (Object x) = ContainerSpec <$> x .: "HostName" <*> x .: "DomainName" <*> x .: "User" <*> x .: "AttachedStdin" <*> x .: "AttachedStdout" <*> x .: "AttachedStderr" <*> x .: "Tty" <*> x .: "OpenStdin" <*> x .: "StdinOnce" <*> x .: "Env" <*> x .: "Cmd" <*> x .: "EntryPoint" <*> x .: "Image" <*> x .: "Labels" <*> x .: "Volumes" <*> x .: "WorkingDir" <*> x .: "NetworkDisabled" <*> x .: "MacAddress" <*> x .: "ExposedPorts" <*> x .: "SecurityOpts" <*> x .: "HostConfig" parseJSON _ = fail "Expecting an Object!" -- | Convert a ContainerSpec into JSON. instance ToJSON ContainerSpec where toJSON (ContainerSpec {..}) = object [ "HostName" .= containerHostname , "DomainName" .= containerDomainName , "User" .= containerUser , "AttachedStdin" .= attachedStdin , "AttachedStdout" .= attachedStdout , "AttachedStderr" .= attachedStderr , "Tty" .= containerTty , "OpenStdin" .= openStdin , "StdinOnce" .= stdinOnce , "Env" .= containerEnv , "Cmd" .= containerCmd , "EntryPoint" .= entryPoint , "Image" .= containerFromImage , "Labels" .= containerLabels , "Volumes" .= containerVolumes , "WorkingDir" .= containerWorkingDir , "NetworkDisabled".= networkDisabled , "MacAddress" .= macAddress , "ExposedPorts" .= exposedPorts , "SecurityOpts" .= securityOpts , "HostConfig" .= hostConfig ] -- ** HostConfig -- | A containers HostConfig. -- See example request in -- <https://docs.docker.com/reference/api/docker_remote_api_v1.18/#create-a-container create a container.> data HostConfig = HostConfig { binds :: Maybe [T.Text] -- see binds, and create a type , links :: Maybe [T.Text] , lxcConf :: Maybe (Map.Map T.Text T.Text) , memory :: Int , memorySwap :: Int , cpuShares :: Int , cpusetCpus :: T.Text -- create a type , portBindings :: Map.Map T.Text [Map.Map T.Text T.Text] -- Why is it defined like this!!! , publishAllPorts :: Bool , privileged :: Bool , readOnlyRootfs :: Bool , dns :: [T.Text] , dnsSearch :: [T.Text] -- Search domains , extraHosts :: Value -- ["hostname:ip"], can be null??? , volumesFrom :: [T.Text] , capAdd :: Maybe [T.Text] , capDrop :: [T.Text] , restartPolicy :: RestartPolicy , networkMode :: T.Text , devices :: [HCDevice] , ulimits :: [Map.Map T.Text T.Text] , logConfig :: HCLogConfig , cgroupParent :: T.Text -- path , securityOpt :: Maybe Value , mountRun :: Bool } deriving (Eq, Generic, Show) -- | Default instance for HostConfig instance Default HostConfig where def = HostConfig { binds = Nothing , links = Nothing , lxcConf = Nothing , memory = 0 , memorySwap = 0 , cpuShares = 512 , cpusetCpus = "0,1" , portBindings = Map.empty , publishAllPorts = False , privileged = False , readOnlyRootfs = False , dns = ["8.8.8.8"] , dnsSearch = [] , extraHosts = Null , volumesFrom = [] , capAdd = Just [] , capDrop = [] , restartPolicy = RestartPolicy { rpName = "" , rpMaxRetryCount = 0 } , networkMode = "bridge" , devices = [] , ulimits = [] , logConfig = def , cgroupParent = "" , securityOpt = Nothing , mountRun = False } -- | Convert JSON to a HostConfig. instance FromJSON HostConfig where parseJSON (Object x) = HostConfig <$> x .:? "Binds" <*> x .:? "Links" <*> x .:? "LxcConf" <*> x .: "Memory" <*> x .: "MemorySwap" <*> x .: "CpuShares" <*> x .: "CpusetCpus" <*> x .: "PortBindings" <*> x .: "PublishAllPorts" <*> x .: "Privileged" <*> x .: "ReadonlyRootfs" <*> x .: "Dns" <*> x .: "DnsSearch" <*> x .: "ExtraHosts" <*> x .: "VolumesFrom" <*> x .:? "CappAdd" <*> x .: "CapDrop" <*> x .: "RestartPolicy" <*> x .: "NetworkMode" <*> x .: "Devices" <*> x .: "Ulimits" <*> x .: "LogConfig" <*> x .: "CgroupParent" <*> x .:? "SecurityOpt" <*> x .: "MountRun" -- FIXME : Return something logical... parseJSON _ = error "Failure!" -- | Convert a HostConfig to JSON. instance ToJSON HostConfig where toJSON (HostConfig {..}) = object $ catMaybes [ ("Binds" .=) <$> binds , ("Links" .=) <$> links , ("LxcConf" .=) <$> lxcConf , ("Memory" .=) <$> pure memory , ("MemorySwap" .=) <$> pure memorySwap , ("CpuShares" .=) <$> pure cpuShares , ("CpusetCpus" .=) <$> pure cpusetCpus , ("PortBindings" .=) <$> pure portBindings , ("PublishAllPorts" .=) <$> pure publishAllPorts , ("Privileged" .=) <$> pure privileged , ("ReadonlyRootfs" .=) <$> pure readOnlyRootfs , ("Dns" .=) <$> pure dns , ("DnsSearch" .=) <$> pure dnsSearch , ("ExtraHosts" .=) <$> pure extraHosts , ("VolumesFrom" .=) <$> pure volumesFrom , ("CappAdd" .=) <$> capAdd , ("CapDrop" .=) <$> pure capDrop , ("RestartPolicy" .=) <$> pure restartPolicy , ("NetworkMode" .=) <$> pure networkMode , ("Devices" .=) <$> pure devices , ("Ulimits" .=) <$> pure ulimits , ("LogConfig" .=) <$> pure logConfig , ("CgroupParent" .=) <$> pure cgroupParent , ("SecurityOpt" .=) <$> securityOpt , ("MountRun" .=) <$> pure mountRun ] -- | RestartPolicy data RestartPolicy = RestartPolicy { rpName :: T.Text , rpMaxRetryCount :: Int } deriving (Eq, Show) instance FromJSON RestartPolicy where parseJSON (Object x) = RestartPolicy <$> x .: "Name" <*> x .: "MaximumRetryCount" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON RestartPolicy where toJSON (RestartPolicy {..}) = object [ "Name" .= rpName , "MaximumRetryCount" .= rpMaxRetryCount ] -- | A device in the "Devices" field of a HostConfig object. data HCDevice = HCDevice { pathOnHost :: T.Text , pathInContainer :: T.Text , cgroupPermissions :: T.Text } deriving (Eq, Generic, Show) -- | Convert JSON to a HCDevice. instance FromJSON HCDevice -- | Convert a HCDevice to JSON. instance ToJSON HCDevice where toJSON (HCDevice poh pic cgp) = object [ "PathOnHost" .= poh , "PathInContainer" .= pic , "CgroupPermissions" .= cgp ] -- | Log config setting in the "LogConfig" field of a -- HostConfig object. data HCLogConfig = HCLogConfig { driverType :: T.Text , config :: Map.Map T.Text T.Text } deriving (Eq, Generic, Show) -- | Default instance for HCLogConfig. instance Default HCLogConfig where def = HCLogConfig { driverType = "json-file" , config = Map.empty } -- | Convert JSON to a HCLogConfig. instance FromJSON HCLogConfig where parseJSON (Object x) = HCLogConfig <$> x .: "Type" <*> x .: "Config" parseJSON _ = fail "Expecting and object of type HCLogConfig!" -- | Convert a HCLogConfig to JSON. instance ToJSON HCLogConfig where toJSON (HCLogConfig driver config) = object [ "Type" .= driver , "Config" .= config ] -- ** Response -- | Response from a POST to \/containers\/create -- These should really just be lenses into an Object, I think.. -- Seems like it would be way more flexible. data ContainerCreateResponse = ContainerCreateResponse { containerId :: T.Text , containerWarnings :: Maybe [T.Text] } deriving (Eq, Show) instance FromJSON ContainerCreateResponse where parseJSON (Object x) = ContainerCreateResponse <$> x .: "Id" <*> x .:? "Warnings" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ContainerCreateResponse where toJSON (ContainerCreateResponse {..}) = object $ catMaybes [ ("Id" .=) <$> pure containerId , ("Warnings" .=) <$> containerWarnings ] -- | Reponse from a GET to \/containers\/\(id\)\/json data ContainerInfo = ContainerInfo { ciAppArmorProfile :: T.Text , ciArgs :: [T.Text] , ciConfig :: ContainerInfoConfig , ciCreated :: T.Text , ciDriver :: T.Text , ciExecDriver :: T.Text , ciExecIds :: Maybe [T.Text] , ciHostConfig :: HostConfig , ciHostnamePath :: T.Text , ciHostsPath :: T.Text , ciLogPath :: T.Text , ciId :: T.Text , ciImage :: T.Text , ciMountLabel :: T.Text , ciName :: T.Text , ciNetworkSettings :: ContainerInfoNetworkSettings , ciPath :: T.Text , ciProcessLabel :: T.Text , ciResolvConfPath :: T.Text , ciRestartCount :: Int , ciState :: ContainerInfoState , ciVolumes :: Value , ciVolumesRW :: Value } deriving (Eq, Show) instance FromJSON ContainerInfo where parseJSON (Object x) = ContainerInfo <$> x .: "AppArmorProfile" <*> x .: "Args" <*> x .: "Config" <*> x .: "Created" <*> x .: "Driver" <*> x .: "ExecDriver" <*> x .:? "ExecIDs" <*> x .: "HostConfig" <*> x .: "HostnamePath" <*> x .: "HostsPath" <*> x .: "LogPath" <*> x .: "Id" <*> x .: "Image" <*> x .: "MountLabel" <*> x .: "Name" <*> x .: "NetworkSettings" <*> x .: "Path" <*> x .: "ProcessLabel" <*> x .: "ResolvConfPath" <*> x .: "RestartCount" <*> x .: "State" <*> x .: "Volumes" <*> x .: "VolumesRW" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ContainerInfo where toJSON (ContainerInfo {..}) = object $ catMaybes [ ("AppArmorProfile" .=) <$> pure ciAppArmorProfile , ("Args" .=) <$> pure ciArgs , ("Config" .=) <$> pure ciConfig , ("Created" .=) <$> pure ciCreated , ("Driver" .=) <$> pure ciDriver , ("ExecDriver" .=) <$> pure ciExecDriver , ("ExecIDs" .=) <$> ciExecIds , ("HostConfig" .=) <$> pure ciHostConfig , ("HostnamePath" .=) <$> pure ciHostnamePath , ("HostsPath" .=) <$> pure ciHostsPath , ("LogPath" .=) <$> pure ciLogPath , ("Id" .=) <$> pure ciId , ("Image" .=) <$> pure ciImage , ("MountLabel" .=) <$> pure ciMountLabel , ("Name" .=) <$> pure ciName , ("NetworkSettings" .=) <$> pure ciNetworkSettings , ("Path" .=) <$> pure ciPath , ("ProcessLabel" .=) <$> pure ciProcessLabel , ("ResolvConfPath" .=) <$> pure ciResolvConfPath , ("RestartCount" .=) <$> pure ciRestartCount , ("State" .=) <$> pure ciState , ("Volumes" .=) <$> pure ciVolumes , ("VolumesRW" .=) <$> pure ciVolumesRW ] data ContainerInfoConfig = ContainerInfoConfig { cicAttachStderr :: Bool , cicAttachStdin :: Bool , cicAttachStdout :: Bool , cicCmd :: [T.Text] , cicDomainName :: T.Text , cicEntryPoint :: Maybe T.Text , cicEnv :: Maybe [T.Text] , cicExposedPorts :: Maybe Value , cicHostname :: T.Text , cicImage :: T.Text , cicLabels :: Value , cicMacAddress :: T.Text , cicNetworkDisabled :: Bool , cicOnBuild :: Maybe Value , cicOpenStdin :: Bool , cicPortSpecs :: Maybe Value , cicStdinOnce :: Bool , cicTty :: Bool , cicUser :: T.Text , cicVolumes :: Maybe Value , cicWorkingDir :: T.Text } deriving (Eq, Show) instance FromJSON ContainerInfoConfig where parseJSON (Object x) = ContainerInfoConfig <$> x .: "AttachStderr" <*> x .: "AttachStdin" <*> x .: "AttachStdout" <*> x .: "Cmd" <*> x .: "Domainname" <*> x .:? "Entrypoint" <*> x .:? "Env" <*> x .:? "ExposedPorts" <*> x .: "Hostname" <*> x .: "Image" <*> x .: "Labels" <*> x .: "MacAddress" <*> x .: "NetworkDisabled" <*> x .:? "OnBuild" <*> x .: "OpenStdin" <*> x .:? "PortSpecs" <*> x .: "StdinOnce" <*> x .: "Tty" <*> x .: "User" <*> x .:? "Volumes" <*> x .: "WorkingDir" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ContainerInfoConfig where toJSON (ContainerInfoConfig {..}) = object $ catMaybes [ ("AttachStderr" .=) <$> pure cicAttachStderr , ("AttachStdin" .=) <$> pure cicAttachStdin , ("AttachStdout" .=) <$> pure cicAttachStdout , ("Cmd" .=) <$> pure cicCmd , ("Domainname" .=) <$> pure cicDomainName , ("EntryPoint" .=) <$> cicEntryPoint , ("Env" .=) <$> cicEnv , ("ExposedPorts" .=) <$> cicExposedPorts , ("Hostname" .=) <$> pure cicHostname , ("Image" .=) <$> pure cicImage , ("Labels" .=) <$> pure cicLabels , ("MacAddress" .=) <$> pure cicMacAddress , ("NetworkDisabled" .=) <$> pure cicNetworkDisabled , ("OnBuild" .=) <$> cicOnBuild , ("OpenStdin" .=) <$> pure cicOpenStdin , ("PortSpecs" .=) <$> cicPortSpecs , ("StdinOnce" .=) <$> pure cicStdinOnce , ("Tty" .=) <$> pure cicTty , ("User" .=) <$> pure cicUser , ("Volumes" .=) <$> cicVolumes , ("WorkingDir" .=) <$> pure cicWorkingDir ] data ContainerInfoNetworkSettings = ContainerInfoNetworkSettings { cinBridge :: T.Text , cinGateway :: T.Text , cinIpAddress :: T.Text , cinIpPrefixLen:: Int , cinMacAddress :: T.Text , cinPortMapping:: Maybe Value , cinPorts :: Maybe Value } deriving (Eq, Show) instance FromJSON ContainerInfoNetworkSettings where parseJSON (Object x) = ContainerInfoNetworkSettings <$> x .: "Bridge" <*> x .: "Gateway" <*> x .: "IPAddress" <*> x .: "IPPrefixLen" <*> x .: "MacAddress" <*> x .:? "PortMapping" <*> x .:? "Ports" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ContainerInfoNetworkSettings where toJSON (ContainerInfoNetworkSettings {..}) = object $ catMaybes [ ("Bridge" .=) <$> pure cinBridge , ("Gateway" .=) <$> pure cinGateway , ("IPAddress" .=) <$> pure cinIpAddress , ("IPPrefixLen" .=) <$> pure cinIpPrefixLen , ("MacAddress" .=) <$> pure cinMacAddress , ("PortMapping" .=) <$> cinPortMapping , ("Ports" .=) <$> cinPorts ] data ContainerInfoState = ContainerInfoState { cisError :: T.Text , cisExitCode :: Int , cisFinishedAt :: T.Text , cisOOMKilled :: Bool , cisPaused :: Bool , cisPid :: Int , cisRestarting :: Bool , cisRunning :: Bool , cisStartedAt :: T.Text } deriving (Eq, Show) instance FromJSON ContainerInfoState where parseJSON (Object x) = ContainerInfoState <$> x .: "Error" <*> x .: "ExitCode" <*> x .: "FinishedAt" <*> x .: "OOMKilled" <*> x .: "Paused" <*> x .: "Pid" <*> x .: "Restarting" <*> x .: "Running" <*> x .: "StartedAt" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ContainerInfoState where toJSON (ContainerInfoState {..}) = object [ "Error" .= cisError , "Exitode" .= cisExitCode , "FinishedAt" .= cisFinishedAt , "OOMKilled" .= cisOOMKilled , "Paused" .= cisPaused , "Pid" .= cisPid , "Restarting" .= cisRestarting , "Running" .= cisRunning , "StartedAt" .= cisStartedAt ] -- * Exec data ContainerExecInit = ContainerExecInit { execInitAttachStdin :: Bool , execInitAttachStdout:: Bool , execInitAttachStderr:: Bool , execInitTty :: Bool , execInitCmd :: [T.Text] } deriving (Eq, Show) instance FromJSON ContainerExecInit where parseJSON (Object x) = ContainerExecInit <$> x .: "AttachStdin" <*> x .: "AttachStdout" <*> x .: "AttachStderr" <*> x .: "Tty" <*> x .: "Cmd" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ContainerExecInit where toJSON (ContainerExecInit {..}) = object [ "AttachStdin" .= execInitAttachStdin , "AttachStdout".= execInitAttachStdout , "AttachStderr".= execInitAttachStderr , "Tty" .= execInitTty , "Cmd" .= execInitCmd ] data ExecStart = ExecStart { execStartDetach :: Bool , execStartTty :: Bool } deriving (Eq, Show) instance FromJSON ExecStart where parseJSON (Object x) = ExecStart <$> x .: "Detach" <*> x .: "Tty" -- FIXME : Return something logical... parseJSON _ = error "Failure!" instance ToJSON ExecStart where toJSON (ExecStart {..}) = object [ "Detach" .= execStartDetach , "Tty" .= execStartTty ]
wayofthepie/docker-client
src/Docker/JSON/Types/Container.hs
Haskell
bsd-3-clause
21,168
{-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE NoMonomorphismRestriction #-} module T16976 where import Language.Haskell.TH (reifyType, runIO) import Language.Haskell.TH.Ppr (ppr_sig) import Data.Foldable (for_) import System.IO (hPrint, stderr) data T s = MkT1 | MkT2 aNumber = 5 aString = "hi" pattern P = MkT1 do let names = [ 'aNumber, 'aString -- local value declarations , 'MkT1, 'MkT2 -- local data constructor declarations , ''T -- local type constructor declarations , 'P -- local pattern synonyms , 'not, 'id -- library value declarations , 'Nothing -- library data constructor declarations , ''Maybe, ''Functor -- library type constructor declarations ] for_ names $ \name -> do t <- reifyType name -- Why 'hPrint stderr' instead of 'print'? This is a workaround for the -- testsuite driver quirk, otherwise the test fails in 'ext-interp' way. runIO . hPrint stderr $ ppr_sig name t return []
sdiehl/ghc
testsuite/tests/th/T16976.hs
Haskell
bsd-3-clause
1,131
import Control.Exception import Control.Monad (when) import qualified Data.ByteString.Char8 as B import Data.Loc import System.Environment (getArgs) import Text.PrettyPrint.Mainland import qualified Language.C.Parser as P import qualified Language.C.Parser.Tokens as T import qualified Language.C.Syntax as C import Language.C.Properties import Opts extsMap :: [(Flag, C.Extensions)] extsMap = [(C99, C.C99) ,(C11, C.C11) ,(Gcc, C.Gcc) ,(Blocks, C.Blocks) ,(ObjC, C.ObjC) ,(CUDA, C.CUDA) ] main :: IO () main = do args <- getArgs (flags, files) <- compilerOpts args let exts = [ext | (f, ext) <- extsMap, f `elem` flags] let doTokens = Tokens `elem` flags case length files of 0 -> return () _ -> do when doTokens $ mapM_ (lexFile exts) files mapM_ (parseFile flags exts) files lexFile :: [C.Extensions] -> String -> IO () lexFile exts filename = do buf <- B.readFile filename case tokens buf of Left err -> fail $ show err Right ts -> mapM_ print ts where tokens :: B.ByteString -> Either SomeException [L T.Token] tokens buf = P.evalP tokensP (P.emptyPState exts [] buf start) start :: Pos start = startPos filename tokensP :: P.P [L T.Token] tokensP = do t <- P.lexToken case t of L _ T.Teof -> return [] _ -> tokensP >>= \ts -> return (t : ts) parseFile :: [Flag] -> [C.Extensions] -> String -> IO () parseFile flags exts filename = do s <- B.readFile filename case P.parse exts [] P.parseUnit s start of Left err -> fail $ show err Right defs -> if doPrint then if doPrama then putStr $ prettyPragma 80 (ppr defs) else putStr $ pretty 80 (ppr defs) else return () when (not (prop_ParsePrintUnitId exts s)) $ putStrLn $ "Bad pretty-printing: " ++ filename where doPrint :: Bool doPrint = Print `elem` flags doPrama :: Bool doPrama = Pragma `elem` flags start :: Pos start = startPos filename
flowbox-public/language-c-quote
examples/parse/Main.hs
Haskell
bsd-3-clause
2,170
{-# LANGUAGE TypeFamilies #-} module T6018failclosed12 where -- This exposed a subtle bug in the implementation during development. After -- unifying the RHS of (1) and (2) the LHS substitution was done only in (2) -- which made it look like an overlapped equation. This is not the case and this -- definition should be rejected. The first two equations are here to make sure -- that the internal implementation does list indexing corrcectly (this is a bit -- tricky because the list is kept in reverse order). type family F a b = r | r -> a b where F Float IO = Float F Bool IO = Bool F a IO = IO a -- (1) F Char b = b Int -- (2)
acowley/ghc
testsuite/tests/typecheck/should_fail/T6018failclosed11.hs
Haskell
bsd-3-clause
676
{-# LANGUAGE TemplateHaskell, GADTs, FlexibleInstances, ViewPatterns, CPP #-} -- | -- Module : Language.C.Inline.TH -- Copyright : 2014 Manuel M T Chakravarty -- License : BSD3 -- -- Maintainer : Manuel M T Chakravarty <chak@justtesting.org> -- Stability : experimental -- Portability : non-portable (GHC extensions) -- -- This module provides Template Haskell convenience functions. module Language.C.Inline.TH ( -- * Decompose type expressions headTyConName, headTyConNameOrError, -- * Decompose idiomatic declarations foreignWrapperDatacon, ptrOfForeignPtrWrapper, unwrapForeignPtrWrapper ) where -- standard libraries import Control.Applicative import Foreign.Ptr import Foreign.ForeignPtr import Language.Haskell.TH as TH -- quasi-quotation libraries import Language.C.Quote as QC -- friends import Language.C.Inline.Error -- |Project the name of the head of a type term if it is a type constructor. -- headTyConName :: TH.Type -> Maybe TH.Name headTyConName ty = case splitAppTy ty of (ConT name, _) -> Just name _ -> Nothing -- |Like 'headTyConName', but fail if the head is not a type constructor. -- headTyConNameOrError :: QC.Extensions -> TH.Type -> Q TH.Name headTyConNameOrError lang ty = case headTyConName ty of Just name -> return name Nothing -> reportErrorAndFail lang $ "expected the head of '" ++ show ty ++ "' to be a type constructor" -- |Decompose an n-ary type application into its head and arguments. -- splitAppTy :: TH.Type -> (TH.Type, [TH.Type]) splitAppTy = split [] where split args (ty `AppT` arg) = split (arg:args) ty split args (SigT ty _) = split args ty split args ty = (ty, args) -- |Obtain the data constructor of the newtype in an idiomatic 'ForeignPtr' wrapper of the form -- -- > newtype Wrapper <tvs> = Wrapper (ForeignPtr (Wrapper <tvs>)) -- foreignWrapperDatacon :: TH.Type -> Q TH.Exp foreignWrapperDatacon ty = do { (datacon, _) <- decomposeForeignPtrWrapper ty ; return $ ConE datacon } -- |Unwraps a newtype wrapper around a foreign pointer and turns the 'ForeignPtr' into a 'Ptr'. -- ptrOfForeignPtrWrapper :: TH.Type -> Q TH.Type ptrOfForeignPtrWrapper ty = [t| Ptr $(snd <$> decomposeForeignPtrWrapper ty) |] -- |Generate code that unwraps the foreign pointer inside the given foreign pointer wrapper type. -- unwrapForeignPtrWrapper :: TH.Type -> Q TH.Exp unwrapForeignPtrWrapper ty = do { (datacon, _) <- decomposeForeignPtrWrapper ty ; v <- newName "v" ; [| \e -> $(caseE [| e |] [match (conP datacon [varP v]) (normalB $ varE v) []]) |] } -- |Given a type whose head is a newtype wrapper around a foreign pointer of the form -- -- > newtype Wrapper <tvs> = Wrapper (ForeignPtr (Wrapper <tvs>)) -- -- return the name of the wrapper data constructor and type argument of the 'ForeignPtr', where all '<tvs>' have been -- substituted by the arguments in the type application constituting the input type (might be nullary). -- decomposeForeignPtrWrapper :: TH.Type -> Q (TH.Name, TH.Type) decomposeForeignPtrWrapper ty = do { let (tycon, args) = splitAppTy ty ; name <- case tycon of ConT name -> return name _ -> do { reportErrorAndFail QC.ObjC $ "expected '" ++ show tycon ++ "' be a type constructor of a 'ForeignPtr' wrapper" } ; reifyUntilFixedPoint args name } where reifyUntilFixedPoint args name = do { info <- reify name ; case info of TyConI (NewtypeD [] _name tvs (NormalC dataconName [(_strict, ConT fptr `AppT` ptrArg)]) _deriv) | fptr == ''ForeignPtr -> return (dataconName, substitute (zip args tvs) ptrArg) TyConI (TySynD _name tvs (headTyConName -> Just name')) -> do { (dcname, type0) <- reifyUntilFixedPoint (drop (length tvs) args) name' ; return (dcname, substitute (zip args tvs) type0) } nonForeign -> do { reportErrorAndFail QC.ObjC $ "expected '" ++ show name ++ "' to refer to a 'ForeignPtr' wrapped into a newtype, but it is " ++ show (TH.ppr nonForeign) } } substitute :: [(TH.Type, TH.TyVarBndr)] -> TH.Type -> TH.Type substitute subst (ForallT boundTvs cxt' body) = ForallT boundTvs (substituteCxt subst' cxt') (substitute subst' body) where subst' = filter (`notShadowedBy` map theTV boundTvs) subst -- (_, tv) `notShadowedBy` boundTvs' = theTV tv `notElem` boundTvs' -- theTV (PlainTV tv) = tv theTV (KindedTV tv _) = tv substitute subst (t1 `AppT` t2) = (substitute subst t1) `AppT` (substitute subst t2) substitute subst (SigT ty' ki) = SigT (substitute subst ty') ki substitute subst (VarT tv) = substituteName subst tv substitute _subst ty' = ty' substituteCxt subst cxt' = map (substitutePred subst) cxt' #if __GLASGOW_HASKELL__ < 709 substitutePred subst (ClassP name tys) = ClassP name (map (substitute subst) tys) substitutePred subst (EqualP ty1 ty2) = EqualP (substitute subst ty1) (substitute subst ty2) #else -- Constraints are just types now. substitutePred = substitute #endif substituteName [] tv = VarT tv substituteName ((arg, tv):_args) thisTv | tv `matches` thisTv = arg | otherwise = VarT thisTv PlainTV name `matches` thisTv = name == thisTv KindedTV name _ki `matches` thisTv = name == thisTv
beni55/language-c-inline
Language/C/Inline/TH.hs
Haskell
bsd-3-clause
5,745
{-# LANGUAGE QuasiQuotes, BangPatterns, ScopedTypeVariables, TemplateHaskell, OverloadedStrings #-} {- The compactor does link-time optimization. It is much simpler than the Optimizer, no fancy dataflow analysis here. Optimizations: - rewrite all variables starting with h$$ to shorter names, these are internal names - write all function metadata compactly -} module Gen2.Compactor where import DynFlags import Control.Applicative import Control.Lens import Control.Monad.State.Strict import qualified Data.Binary.Get as DB import qualified Data.Binary.Put as DB import Data.Bits import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString as BS import Data.Char (chr, ord) import Data.Function (on) import Data.HashMap.Strict (HashMap) import qualified Data.HashMap.Strict as HM import Data.Int import Data.List import Data.Maybe import Data.Monoid import Data.Text (Text) import qualified Data.Text as T import Compiler.JMacro import Compiler.Settings import Gen2.Base import Gen2.ClosureInfo import Gen2.Utils (buildingProf, buildingDebug) -- | collect global objects (data / CAFs). rename them and add them to the table collectGlobals :: [StaticInfo] -> State CompactorState () collectGlobals = mapM_ (\(StaticInfo i _ _) -> renameObj i) debugShowStat :: (JStat, [ClosureInfo], [StaticInfo]) -> String debugShowStat (_s, cis, sis) = "closures:\n" ++ unlines (map show cis) ++ "\nstatics:" ++ unlines (map show sis) ++ "\n\n" renameInternals :: GhcjsSettings -> DynFlags -> CompactorState -> [(JStat, [ClosureInfo], [StaticInfo])] -> (CompactorState, [JStat], JStat) renameInternals _settings dflags cs0 stats0 = (cs, stats, meta) where ((stats, meta), cs) = runState renamed cs0 renamed :: State CompactorState ([JStat], JStat) renamed | buildingDebug dflags || buildingProf dflags = do cs <- get let renamedStats = map (\(s,_,_) -> s & identsS %~ lookupRenamed cs) stats0 statics = map (renameStaticInfo cs) $ concatMap (\(_,_,x) -> x) stats0 infos = map (renameClosureInfo cs) $ concatMap (\(_,x,_) -> x) stats0 -- render metadata as individual statements meta = mconcat (map staticDeclStat statics) <> mconcat (map (staticInitStat $ buildingProf dflags) statics) <> mconcat (map (closureInfoStat True) infos) return (renamedStats, meta) | otherwise = do -- collect all global objects and entries, add them to the renaming table mapM_ (\(_, cis, sis) -> do mapM_ (renameEntry . TxtI . ciVar) cis mapM_ (renameObj . siVar) sis mapM_ collectLabels sis) stats0 -- sort our entries, store the results -- propagate all renamings throughtout the code cs <- get let renamedStats = map (\(s,_,_) -> s & identsS %~ lookupRenamed cs) stats0 sortedInfo = concatMap (\(_,xs,_) -> map (renameClosureInfo cs) xs) stats0 entryArr = map (TxtI . fst) . sortBy (compare `on` snd) . HM.toList $ cs ^. entries lblArr = map (TxtI . fst) . sortBy (compare `on` snd) . HM.toList $ cs ^. labels ss = concatMap (\(_,_,xs) -> map (renameStaticInfo cs) xs) stats0 infoBlock = encodeStr (concatMap (encodeInfo cs) sortedInfo) staticBlock = encodeStr (concatMap (encodeStatic cs) ss) staticDecls = mconcat (map staticDeclStat ss) meta = staticDecls <> [j| h$scheduleInit(`entryArr`, h$staticDelayed, `lblArr`, `infoBlock`, `staticBlock`); h$staticDelayed = []; |] return (renamedStats, meta) -- | rename a heap object, which means adding it to the -- static init table in addition to the renamer renameObj :: Text -> State CompactorState Text renameObj xs = do (TxtI xs') <- renameVar (TxtI xs) addItem statics statics numStatics numStatics parentStatics xs' return xs' renameEntry :: Ident -> State CompactorState Ident renameEntry i = do i'@(TxtI i'') <- renameVar i addItem entries entries numEntries numEntries parentEntries i'' return i' addItem :: Getting (HashMap Text Int) CompactorState (HashMap Text Int) -> Setting (->) CompactorState CompactorState (HashMap Text Int) (HashMap Text Int) -> Getting Int CompactorState Int -> ASetter' CompactorState Int -> Getting (HashMap Text Int) CompactorState (HashMap Text Int) -> Text -> State CompactorState () addItem items items' numItems numItems' parentItems i = do s <- use items case HM.lookup i s of Just _ -> return () Nothing -> do sp <- use parentItems case HM.lookup i sp of Just _ -> return () Nothing -> do ni <- use numItems items' %= HM.insert i ni numItems' += 1 collectLabels :: StaticInfo -> State CompactorState () collectLabels si = mapM_ (addItem labels labels numLabels numLabels parentLabels) (labelsV . siVal $ si) where labelsV (StaticData _ args) = concatMap labelsA args labelsV (StaticList args _) = concatMap labelsA args labelsV _ = [] labelsA (StaticLitArg l) = labelsL l labelsA _ = [] labelsL (LabelLit _ lbl) = [lbl] labelsL _ = [] lookupRenamed :: CompactorState -> Ident -> Ident lookupRenamed cs i@(TxtI t) = case HM.lookup t (cs ^. nameMap) of Nothing -> i Just i' -> i' renameVar :: Ident -- ^ text identifier to rename -> State CompactorState Ident -- ^ the updated renamer state and the new ident renameVar i@(TxtI t) | "h$$" `T.isPrefixOf` t = do m <- use nameMap case HM.lookup t m of Just r -> return r Nothing -> do y <- newIdent nameMap %= HM.insert t y return y | otherwise = return i newIdent :: State CompactorState Ident newIdent = do (y:ys) <- use identSupply identSupply .= ys return y -- | rename a compactor info entry according to the compactor state (no new renamings are added) renameClosureInfo :: CompactorState -> ClosureInfo -> ClosureInfo renameClosureInfo cs (ClosureInfo v rs n l t s) = (ClosureInfo (renameV v) rs n l t (f s)) where renameV t = maybe t (\(TxtI t') -> t') (HM.lookup t m) m = cs ^. nameMap f (CIStaticRefs rs) = CIStaticRefs (map renameV rs) -- | rename a static info entry according to the compactor state (no new renamings are added) renameStaticInfo :: CompactorState -> StaticInfo -> StaticInfo renameStaticInfo cs si = si & staticIdents %~ renameIdent where renameIdent t = maybe t (\(TxtI t') -> t') (HM.lookup t $ cs ^. nameMap) staticIdents :: Traversal' StaticInfo Text staticIdents f (StaticInfo i v cc) = StaticInfo <$> f i <*> staticIdentsV f v <*> pure cc staticIdentsV :: Traversal' StaticVal Text staticIdentsV f (StaticFun i) = StaticFun <$> f i staticIdentsV f (StaticThunk (Just i)) = StaticThunk . Just <$> f i staticIdentsV f (StaticData con args) = StaticData <$> f con <*> traverse (staticIdentsA f) args staticIdentsV f (StaticList xs t) = StaticList <$> traverse (staticIdentsA f) xs <*> traverse f t staticIdentsV _ x = pure x staticIdentsA :: Traversal' StaticArg Text staticIdentsA f (StaticObjArg t) = StaticObjArg <$> f t staticIdentsA _ x = pure x {- simple encoding of naturals using only printable low char points, rely on gzip to compress repeating sequences, most significant bits first 1 byte: ascii code 32-123 (0-89), \ and " unused 2 byte: 124 a b (90-8189) 3 byte: 125 a b c (8190-737189) -} encodeStr :: [Int] -> String encodeStr = concatMap encodeChr where c :: Int -> Char c i | i > 90 || i < 0 = error ("encodeStr: c " ++ show i) | i >= 59 = chr (34+i) | i >= 2 = chr (33+i) | otherwise = chr (32+i) encodeChr i | i < 0 = error "encodeStr: negative" | i <= 89 = [c i] | i <= 8189 = let (c1, c2) = (i - 90) `divMod` 90 in [chr 124, c c1, c c2] | i <= 737189 = let (c2a, c3) = (i - 8190) `divMod` 90 (c1, c2) = c2a `divMod` 90 in [chr 125, c c1, c c2, c c3] | otherwise = error "encodeStr: overflow" entryIdx :: String -> CompactorState -> Text -> Int entryIdx msg cs i = fromMaybe lookupParent (HM.lookup i' (cs ^. entries)) where (TxtI i') = lookupRenamed cs (TxtI i) lookupParent = maybe err (+ cs ^. numEntries) (HM.lookup i' (cs ^. parentEntries)) err = error (msg ++ ": invalid entry: " ++ T.unpack i') objectIdx :: String -> CompactorState -> Text -> Int objectIdx msg cs i = fromMaybe lookupParent (HM.lookup i' (cs ^. statics)) where (TxtI i') = lookupRenamed cs (TxtI i) lookupParent = maybe err (+ cs ^. numStatics) (HM.lookup i' (cs ^. parentStatics)) err = error (msg ++ ": invalid static: " ++ T.unpack i') labelIdx :: String -> CompactorState -> Text -> Int labelIdx msg cs l = fromMaybe lookupParent (HM.lookup l (cs ^. labels)) where lookupParent = maybe err (+ cs ^. numLabels) (HM.lookup l (cs ^. parentLabels)) err = error (msg ++ ": invalid label: " ++ T.unpack l) encodeInfo :: CompactorState -> ClosureInfo -- ^ information to encode -> [Int] encodeInfo cs (ClosureInfo _var regs name layout typ static) | CIThunk <- typ = [0] ++ ls | (CIFun _arity regs0) <- typ, regs0 /= argSize regs = error ("encodeInfo: inconsistent register metadata for " ++ T.unpack name) | (CIFun arity _regs0) <- typ = [1, arity, encodeRegs regs] ++ ls | (CICon tag) <- typ = [2, tag] ++ ls | CIStackFrame <- typ = [3, encodeRegs regs] ++ ls -- (CIPap ar) <- typ = [4, ar] ++ ls -- these should only appear during runtime | otherwise = error ("encodeInfo, unexpected closure type: " ++ show typ) where ls = encodeLayout layout ++ encodeSrt static encodeLayout CILayoutVariable = [0] encodeLayout (CILayoutUnknown s) = [s+1] encodeLayout (CILayoutFixed s _vs) = [s+1] encodeSrt (CIStaticRefs rs) = length rs : map (objectIdx "encodeInfo" cs) rs encodeRegs CIRegsUnknown = 0 encodeRegs (CIRegs skip regTypes) = let nregs = sum (map varSize regTypes) in encodeRegsTag skip nregs encodeRegsTag skip nregs | skip < 0 || skip > 1 = error "encodeRegsTag: unexpected skip" | otherwise = 1 + (nregs `shiftL` 1) + skip argSize (CIRegs skip regTypes) = sum (map varSize regTypes) - 1 + skip argSize _ = 0 encodeStatic :: CompactorState -> StaticInfo -> [Int] encodeStatic cs (StaticInfo _to sv _) | StaticFun f <- sv = [1, entry f] | StaticThunk (Just t) <- sv = [2, entry t] | StaticThunk Nothing <- sv = [0] | StaticUnboxed (StaticUnboxedBool b) <- sv = [3 + fromEnum b] | StaticUnboxed (StaticUnboxedInt i) <- sv = [5] -- ++ encodeInt i | StaticUnboxed (StaticUnboxedDouble d) <- sv = [6] -- ++ encodeDouble d -- | StaticString t <- sv = [7, T.length t] ++ map encodeChar (T.unpack t) -- | StaticBin bs <- sv = [8, BS.length bs] ++ map fromIntegral (BS.unpack bs) | StaticList [] Nothing <- sv = [8] | StaticList args t <- sv = [9, length args] ++ maybe [0] (\t' -> [1, obj t']) t ++ concatMap encodeArg (reverse args) | StaticData con args <- sv = (if length args <= 6 then [11+length args] else [10,length args]) ++ [entry con] ++ concatMap encodeArg args where obj = objectIdx "encodeStatic" cs entry = entryIdx "encodeStatic" cs lbl = labelIdx "encodeStatic" cs -- | an argument is either a reference to a heap object or a primitive value encodeArg (StaticLitArg (BoolLit b)) = [0 + fromEnum b] encodeArg (StaticLitArg (IntLit 0)) = [2] encodeArg (StaticLitArg (IntLit 1)) = [3] encodeArg (StaticLitArg (IntLit i)) = [4] ++ encodeInt i encodeArg (StaticLitArg NullLit) = [5] encodeArg (StaticLitArg (DoubleLit d)) = [6] ++ encodeDouble d encodeArg (StaticLitArg (StringLit s)) = [7] ++ encodeString s encodeArg (StaticLitArg (BinLit b)) = [8] ++ encodeBinary b encodeArg (StaticLitArg (LabelLit b l)) = [9, fromEnum b, lbl l] encodeArg (StaticConArg con args) = [10, entry con, length args] ++ concatMap encodeArg args encodeArg (StaticObjArg t) = [11 + obj t] -- encodeArg x = error ("encodeArg: unexpected: " ++ show x) encodeChar = ord -- fixme make characters more readable encodeString :: Text -> [Int] encodeString xs = T.length xs : map ord (T.unpack xs) -- ByteString is prefixed with length, then blocks of 4 numbers encoding 3 bytes encodeBinary :: BS.ByteString -> [Int] encodeBinary bs = BS.length bs : go bs where go b | BS.null b = [] | l == 1 = let b0 = b `BS.index` 0 in map fromIntegral [ b0 `shiftR` 2, (b0 .&. 3) `shiftL` 4 ] | l == 2 = let b0 = b `BS.index` 0 b1 = b `BS.index` 1 in map fromIntegral [ b0 `shiftR` 2 , ((b0 .&. 3) `shiftL` 4) .|. (b1 `shiftR` 4) , (b1 .&. 15) `shiftL` 2 ] | otherwise = let b0 = b `BS.index` 0 b1 = b `BS.index` 1 b2 = b `BS.index` 2 in map fromIntegral [ b0 `shiftR` 2 , ((b0 .&. 3) `shiftL` 4) .|. (b1 `shiftR` 4) , ((b1 .&. 15) `shiftL` 2) .|. (b2 `shiftR` 6) , b2 .&. 63 ] ++ go (BS.drop 3 b) where l = BS.length b encodeInt :: Integer -> [Int] encodeInt i | i >= -10 && i < encodeMax - 11 = [fromIntegral i + 12] | i > 2^(31::Int)-1 || i < -2^(31::Int) = error "encodeInt: integer outside 32 bit range" | otherwise = let i' :: Int32 = fromIntegral i in [0, fromIntegral ((i' `shiftR` 16) .&. 0xffff), fromIntegral (i' .&. 0xffff)] -- encode a possibly 53 bit int encodeSignificand :: Integer -> [Int] encodeSignificand i | i >= -10 && i < encodeMax - 11 = [fromIntegral i + 12] | i > 2^(53::Int) || i < -2^(53::Int) = error ("encodeInt: integer outside 53 bit range: " ++ show i) | otherwise = let i' = abs i in [if i < 0 then 0 else 1] ++ map (\r -> fromIntegral ((i' `shiftR` r) .&. 0xffff)) [48,32,16,0] encodeDouble :: SaneDouble -> [Int] encodeDouble (SaneDouble d) | isNegativeZero d = [0] | d == 0 = [1] | isInfinite d && d > 0 = [2] | isInfinite d = [3] | isNaN d = [4] | abs exponent <= 30 = [6 + fromIntegral exponent + 30] ++ encodeSignificand significand | otherwise = [5] ++ encodeInt (fromIntegral exponent) ++ encodeSignificand significand where (significand, exponent) = decodeFloat d encodeMax :: Integer encodeMax = 737189 {- | The Base data structure contains the information we need to do incremental linking against a base bundle. base file format: GHCJSBASE [renamer state] [linkedPackages] [packages] [modules] [symbols] -} renderBase :: Base -- ^ base metadata -> BL.ByteString -- ^ rendered result renderBase = DB.runPut . putBase loadBase :: FilePath -> IO Base loadBase file = DB.runGet (getBase file) <$> BL.readFile file ---------------------------- {-# INLINE identsS #-} identsS :: Traversal' JStat Ident identsS f (DeclStat i) = DeclStat <$> f i identsS f (ReturnStat e) = ReturnStat <$> identsE f e identsS f (IfStat e s1 s2) = IfStat <$> identsE f e <*> identsS f s1 <*> identsS f s2 identsS f (WhileStat b e s) = WhileStat b <$> identsE f e <*> identsS f s identsS f (ForInStat b i e s) = ForInStat b <$> f i <*> identsE f e <*> identsS f s identsS f (SwitchStat e xs s) = SwitchStat <$> identsE f e <*> (traverse . traverseCase) f xs <*> identsS f s where traverseCase g (e,s) = (,) <$> identsE g e <*> identsS g s identsS f (TryStat s1 i s2 s3) = TryStat <$> identsS f s1 <*> f i <*> identsS f s2 <*> identsS f s3 identsS f (BlockStat xs) = BlockStat <$> (traverse . identsS) f xs identsS f (ApplStat e es) = ApplStat <$> identsE f e <*> (traverse . identsE) f es identsS f (UOpStat op e) = UOpStat op <$> identsE f e identsS f (AssignStat e1 e2) = AssignStat <$> identsE f e1 <*> identsE f e2 identsS _ (UnsatBlock{}) = error "identsS: UnsatBlock" identsS _ (AntiStat{}) = error "identsS: AntiStat" identsS f (LabelStat l s) = LabelStat l <$> identsS f s identsS _ b@(BreakStat{}) = pure b identsS _ c@(ContinueStat{}) = pure c {-# INLINE identsE #-} identsE :: Traversal' JExpr Ident identsE f (ValExpr v) = ValExpr <$> identsV f v identsE f (SelExpr e i) = SelExpr <$> identsE f e <*> pure i -- do not rename properties identsE f (IdxExpr e1 e2) = IdxExpr <$> identsE f e1 <*> identsE f e2 identsE f (InfixExpr s e1 e2) = InfixExpr s <$> identsE f e1 <*> identsE f e2 identsE f (UOpExpr o e) = UOpExpr o <$> identsE f e identsE f (IfExpr e1 e2 e3) = IfExpr <$> identsE f e1 <*> identsE f e2 <*> identsE f e3 identsE f (ApplExpr e es) = ApplExpr <$> identsE f e <*> (traverse . identsE) f es identsE _ (UnsatExpr{}) = error "identsE: UnsatExpr" identsE _ (AntiExpr{}) = error "identsE: AntiExpr" {-# INLINE identsV #-} identsV :: Traversal' JVal Ident identsV f (JVar i) = JVar <$> f i identsV f (JList xs) = JList <$> (traverse . identsE) f xs identsV _ d@(JDouble{}) = pure d identsV _ i@(JInt{}) = pure i identsV _ s@(JStr{}) = pure s identsV _ r@(JRegEx{}) = pure r identsV f (JHash m) = JHash <$> (traverse . identsE) f m identsV f (JFunc args s) = JFunc <$> traverse f args <*> identsS f s identsV _ (UnsatVal{}) = error "identsV: UnsatVal" compact :: GhcjsSettings -> DynFlags -> CompactorState -> [(JStat, [ClosureInfo], [StaticInfo])] -> (CompactorState, [JStat], JStat) -- ^ renamer state, statements for each unit, metadata compact settings dflags rs input = renameInternals settings dflags rs input
manyoo/ghcjs
src/Gen2/Compactor.hs
Haskell
mit
19,376
{-# LANGUAGE CPP, MagicHash #-} ----------------------------------------------------------------------------- -- | -- Module : Haddock.Interface.AttachInstances -- Copyright : (c) Simon Marlow 2006, -- David Waern 2006-2009, -- Isaac Dupree 2009 -- License : BSD-like -- -- Maintainer : haddock@projects.haskell.org -- Stability : experimental -- Portability : portable ----------------------------------------------------------------------------- module Haddock.Interface.AttachInstances (attachInstances) where import Haddock.Types import Haddock.Convert import Haddock.GhcUtils import Control.Arrow hiding ((<+>)) import Data.List import Data.Ord (comparing) import Data.Function (on) import qualified Data.Map as Map import qualified Data.Set as Set import Class import DynFlags import ErrUtils import FamInstEnv import FastString import GHC import GhcMonad (withSession) import Id import InstEnv import MonadUtils (liftIO) import Name import Outputable (text, sep, (<+>)) import PrelNames import SrcLoc import TcRnDriver (tcRnGetInfo) import TcType (tcSplitSigmaTy) import TyCon import TypeRep import TysPrim( funTyCon ) import Var hiding (varName) #define FSLIT(x) (mkFastString# (x#)) type ExportedNames = Set.Set Name type Modules = Set.Set Module type ExportInfo = (ExportedNames, Modules) -- Also attaches fixities attachInstances :: ExportInfo -> [Interface] -> InstIfaceMap -> Ghc [Interface] attachInstances expInfo ifaces instIfaceMap = mapM attach ifaces where -- TODO: take an IfaceMap as input ifaceMap = Map.fromList [ (ifaceMod i, i) | i <- ifaces ] attach iface = do newItems <- mapM (attachToExportItem expInfo iface ifaceMap instIfaceMap) (ifaceExportItems iface) return $ iface { ifaceExportItems = newItems } attachToExportItem :: ExportInfo -> Interface -> IfaceMap -> InstIfaceMap -> ExportItem Name -> Ghc (ExportItem Name) attachToExportItem expInfo iface ifaceMap instIfaceMap export = case attachFixities export of e@ExportDecl { expItemDecl = L eSpan (TyClD d) } -> do mb_info <- getAllInfo (tcdName d) insts <- case mb_info of Just (_, _, cls_instances, fam_instances) -> let fam_insts = [ (synifyFamInst i opaque, doc,spanNameE n (synifyFamInst i opaque) (L eSpan (tcdName d)) ) | i <- sortBy (comparing instFam) fam_instances , let n = getName i , let doc = instLookup instDocMap n iface ifaceMap instIfaceMap , not $ isNameHidden expInfo (fi_fam i) , not $ any (isTypeHidden expInfo) (fi_tys i) , let opaque = isTypeHidden expInfo (fi_rhs i) ] cls_insts = [ (synifyInstHead i, instLookup instDocMap n iface ifaceMap instIfaceMap, spanName n (synifyInstHead i) (L eSpan (tcdName d))) | let is = [ (instanceHead' i, getName i) | i <- cls_instances ] , (i@(_,_,cls,tys), n) <- sortBy (comparing $ first instHead) is , not $ isInstanceHidden expInfo cls tys ] -- fam_insts but with failing type fams filtered out cleanFamInsts = [ (fi, n, L l r) | (Right fi, n, L l (Right r)) <- fam_insts ] famInstErrs = [ errm | (Left errm, _, _) <- fam_insts ] in do dfs <- getDynFlags let mkBug = (text "haddock-bug:" <+>) . text liftIO $ putMsg dfs (sep $ map mkBug famInstErrs) return $ cls_insts ++ cleanFamInsts Nothing -> return [] return $ e { expItemInstances = insts } e -> return e where attachFixities e@ExportDecl{ expItemDecl = L _ d } = e { expItemFixities = nubBy ((==) `on` fst) $ expItemFixities e ++ [ (n',f) | n <- getMainDeclBinder d , Just subs <- [instLookup instSubMap n iface ifaceMap instIfaceMap] , n' <- n : subs , Just f <- [instLookup instFixMap n' iface ifaceMap instIfaceMap] ] } attachFixities e = e -- spanName: attach the location to the name that is the same file as the instance location spanName s (clsn,_,_,_) (L instL instn) = let s1 = getSrcSpan s sn = if srcSpanFileName_maybe s1 == srcSpanFileName_maybe instL then instn else clsn in L (getSrcSpan s) sn -- spanName on Either spanNameE s (Left e) _ = L (getSrcSpan s) (Left e) spanNameE s (Right ok) linst = let L l r = spanName s ok linst in L l (Right r) instLookup :: (InstalledInterface -> Map.Map Name a) -> Name -> Interface -> IfaceMap -> InstIfaceMap -> Maybe a instLookup f name iface ifaceMap instIfaceMap = case Map.lookup name (f $ toInstalledIface iface) of res@(Just _) -> res Nothing -> do let ifaceMaps = Map.union (fmap toInstalledIface ifaceMap) instIfaceMap iface' <- Map.lookup (nameModule name) ifaceMaps Map.lookup name (f iface') -- | Like GHC's 'instanceHead' but drops "silent" arguments. instanceHead' :: ClsInst -> ([TyVar], ThetaType, Class, [Type]) instanceHead' ispec = (tvs, dropSilentArgs dfun theta, cls, tys) where dfun = is_dfun ispec (tvs, cls, tys) = instanceHead ispec (_, theta, _) = tcSplitSigmaTy (idType dfun) -- | Drop "silent" arguments. See GHC Note [Silent superclass -- arguments]. dropSilentArgs :: DFunId -> ThetaType -> ThetaType dropSilentArgs dfun theta = drop (dfunNSilent dfun) theta -- | Like GHC's getInfo but doesn't cut things out depending on the -- interative context, which we don't set sufficiently anyway. getAllInfo :: GhcMonad m => Name -> m (Maybe (TyThing,Fixity,[ClsInst],[FamInst])) getAllInfo name = withSession $ \hsc_env -> do (_msgs, r) <- liftIO $ tcRnGetInfo hsc_env name return r -------------------------------------------------------------------------------- -- Collecting and sorting instances -------------------------------------------------------------------------------- -- | Simplified type for sorting types, ignoring qualification (not visible -- in Haddock output) and unifying special tycons with normal ones. -- For the benefit of the user (looks nice and predictable) and the -- tests (which prefer output to be deterministic). data SimpleType = SimpleType Name [SimpleType] | SimpleTyLit TyLit deriving (Eq,Ord) instHead :: ([TyVar], [PredType], Class, [Type]) -> ([Int], Name, [SimpleType]) instHead (_, _, cls, args) = (map argCount args, className cls, map simplify args) argCount :: Type -> Int argCount (AppTy t _) = argCount t + 1 argCount (TyConApp _ ts) = length ts argCount (FunTy _ _ ) = 2 argCount (ForAllTy _ t) = argCount t argCount _ = 0 simplify :: Type -> SimpleType simplify (ForAllTy _ t) = simplify t simplify (FunTy t1 t2) = SimpleType funTyConName [simplify t1, simplify t2] simplify (AppTy t1 t2) = SimpleType s (ts ++ [simplify t2]) where (SimpleType s ts) = simplify t1 simplify (TyVarTy v) = SimpleType (tyVarName v) [] simplify (TyConApp tc ts) = SimpleType (tyConName tc) (map simplify ts) simplify (LitTy l) = SimpleTyLit l -- Used for sorting instFam :: FamInst -> ([Int], Name, [SimpleType], Int, SimpleType) instFam FamInst { fi_fam = n, fi_tys = ts, fi_rhs = t } = (map argCount ts, n, map simplify ts, argCount t, simplify t) funTyConName :: Name funTyConName = mkWiredInName gHC_PRIM (mkOccNameFS tcName FSLIT("(->)")) funTyConKey (ATyCon funTyCon) -- Relevant TyCon BuiltInSyntax -------------------------------------------------------------------------------- -- Filtering hidden instances -------------------------------------------------------------------------------- -- | A class or data type is hidden iff -- -- * it is defined in one of the modules that are being processed -- -- * and it is not exported by any non-hidden module isNameHidden :: ExportInfo -> Name -> Bool isNameHidden (names, modules) name = nameModule name `Set.member` modules && not (name `Set.member` names) -- | We say that an instance is «hidden» iff its class or any (part) -- of its type(s) is hidden. isInstanceHidden :: ExportInfo -> Class -> [Type] -> Bool isInstanceHidden expInfo cls tys = instClassHidden || instTypeHidden where instClassHidden :: Bool instClassHidden = isNameHidden expInfo $ getName cls instTypeHidden :: Bool instTypeHidden = any (isTypeHidden expInfo) tys isTypeHidden :: ExportInfo -> Type -> Bool isTypeHidden expInfo = typeHidden where typeHidden :: Type -> Bool typeHidden t = case t of TyVarTy {} -> False AppTy t1 t2 -> typeHidden t1 || typeHidden t2 TyConApp tcon args -> nameHidden (getName tcon) || any typeHidden args FunTy t1 t2 -> typeHidden t1 || typeHidden t2 ForAllTy _ ty -> typeHidden ty LitTy _ -> False nameHidden :: Name -> Bool nameHidden = isNameHidden expInfo
adamse/haddock
haddock-api/src/Haddock/Interface/AttachInstances.hs
Haskell
bsd-2-clause
9,235
{-# LANGUAGE GADTs, AllowAmbiguousTypes #-} module T8392a where -- Should complain even with AllowAmbiguousTypes -- -- But (#12466) we now don't complain about -- contradictory signatures -- Instead we get a redundant pattern-match warning, -- in the post-typechecking pattern-match checks foo :: (Int ~ Bool) => a -> a foo x = x
sdiehl/ghc
testsuite/tests/typecheck/should_fail/T8392a.hs
Haskell
bsd-3-clause
331
module NameMapsPropDecorate where import NameMaps import TiPropDecorate --import PropSyntax(AssertionI,PredicateI) import NameMapsDecorate(mts) import TiTypes --import TiKinds import NameMapsProp() --import NameMapsPropStruct(bothtype,bothval) import HsIdent(mapHsIdent2) import MapDeclM --import MapDeclMBaseStruct() import MapDeclMPropStruct() import MUtils import AccList instance AccNames i (TiDecls i) where accNames f (Decs ds (ks,ts)) = accNames f ds . accNames f ts -- hmm, ks? instance AccNames i (TiDecl i) where accNames f (Dec d) = accNames f d instance AccNames i (TiExp i) where accNames f (Exp e) = accNames f e accNames f (TiSpec i _ ts) = accNames f i . accNames f ts accNames f (TiTyped e t) = a e . a t where a x = accNames f x instance AccNames i (TiAssertion i) where accNames = accNamesRec instance AccNames i (TiPredicate i) where accNames = accNamesRec instance AccNames i (OTiAssertion i) where accNames f (OA is ds pa) = accList f is . accNames f ds . accNames f pa -------------------------------------------------------------------------------- instance MapNames i1 (TiDecls i1) i2 (TiDecls i2) where mapNames2 c f (Decs ds (ks,ts)) = Decs (m ds) (mks ks,mts c f ts) where m x = mapNames2 c f x mks = map mk mk (i:>:(k,ti)) = bothtype mapHsIdent2 f i:>:(k,m ti) instance MapNames i1 (TiDecl i1) i2 (TiDecl i2) where mapNames2 c f (Dec d) = Dec (mapNames2 c f d) instance MapNames i1 (TiExp i1) i2 (TiExp i2) where mapNames2 c f (Exp e) = Exp (mapNames2 c f e) mapNames2 c f (TiSpec i sc ts)= TiSpec (bothval mapHsIdent2 f i) (m sc) (m ts) where m x = mapNames2 c f x mapNames2 c f (TiTyped e t) = TiTyped (m e) (m t) where m x = mapNames2 c f x instance MapNames i1 (TiAssertion i1) i2 (TiAssertion i2) where mapNames2 = mapNames2Rec instance MapNames i1 (TiPredicate i1) i2 (TiPredicate i2) where mapNames2 = mapNames2Rec instance MapNames i1 (OTiAssertion i1) i2 (OTiAssertion i2) where mapNames2 c f@(vf,cf) (OA is ds pa) = OA is' ds' (m pa) where is' = map (vf (defval Pattern)) is ds' = [(vf (defval Local) i:>:m' t,m e)|(i:>:t,e)<-ds] m x = mapNames2 c f x m' x = mapNames2 Local f x -------------------------------------------------------------------------------- instance MapDeclM (TiDecls i) (TiDecls i) where mapDeclM f (Decs ds dt) = flip Decs dt # mapDeclM f ds instance MapDeclM (TiDecl i) (TiDecls i) where mapDeclM = std_mapDeclM instance MapDeclM (TiExp i) (TiDecls i) where mapDeclM f (Exp e) = Exp # mapDeclM f e mapDeclM f (TiTyped e t) = flip TiTyped t # mapDeclM f e mapDeclM f e@(TiSpec{}) = return e instance MapDeclM (TiAssertion i) (TiDecls i) where mapDeclM = std_mapDeclM instance MapDeclM (TiPredicate i) (TiDecls i) where mapDeclM = std_mapDeclM instance MapDeclM (OTiAssertion i) (TiDecls i) where mapDeclM f (OA is ds pa) = OA is ds # mapDeclM f pa
forste/haReFork
tools/property/TI/NameMapsPropDecorate.hs
Haskell
bsd-3-clause
2,930
-- | Extra Maybe utilities. module Data.Maybe.Extra where import Control.Monad import Data.Maybe -- | Monadic 'mapMaybe'. mapMaybeM :: Monad f => (a -> f (Maybe b)) -> [a] -> f [b] mapMaybeM f = liftM catMaybes . mapM f
mathhun/stack
src/Data/Maybe/Extra.hs
Haskell
bsd-3-clause
223