code
stringlengths
2
1.05M
repo_name
stringlengths
5
101
path
stringlengths
4
991
language
stringclasses
3 values
license
stringclasses
5 values
size
int64
2
1.05M
module GeneratorSpec where import TestImport import qualified Data.Map as M import Data.List.NonEmpty (NonEmpty(..)) type UserId = Int type NonEmptyUserId = NonEmpty UserId resources :: [ResourceTree String] resources = [parseRoutes| /api ApiP: /users/#UserId UsersR GET /users/#Int/foo UsersFooR GET /users/#NonEmptyUserId/bar UsersBarR GET /users/#Other/baz UsersBazR GET |] spec :: Spec spec = do describe "genFlowClasses" $ do it "should generate classes" $ do let classes = genFlowClasses M.empty [] [] resources map className classes `shouldBe` [ "PATHS_TYPE_paths" , "PATHS_TYPE_paths_static_pages" , "PATHS_TYPE_paths_api" ] it "should identify path variable types" $ do let classes = genFlowClasses M.empty [] [] resources apiClass = find ((== "PATHS_TYPE_paths_api") . className) classes map classMembers apiClass `shouldBe` Just [ Method "users" [ Path "api" , Path "users" , Dyn NumberT ] , Method "users_bar" [ Path "api" , Path "users" , Dyn (NonEmptyT NumberT) , Path "bar" ] , Method "users_baz" [ Path "api" , Path "users" , Dyn StringT , Path "baz" ] , Method "users_foo" [ Path "api" , Path "users" , Dyn NumberT , Path "foo" ] ] it "should respect overrides " $ do let classes = genFlowClasses (M.fromList [("UserId", StringT)]) [] [] resources apiClass = find ((== "PATHS_TYPE_paths_api") . className) classes map classMembers apiClass `shouldBe` Just [ Method "users" [ Path "api" , Path "users" , Dyn StringT ] , Method "users_bar" [ Path "api" , Path "users" , Dyn (NonEmptyT StringT) , Path "bar" ] , Method "users_baz" [ Path "api" , Path "users" , Dyn StringT , Path "baz" ] , Method "users_foo" [ Path "api" , Path "users" , Dyn NumberT , Path "foo" ] ] describe "classesToFlow" $ do it "should generate formal parameters for each path variable" $ do let cls = [ Class "x" [ Method "y" [ Path "x" , Dyn StringT , Path "y" , Dyn NumberT , Path "z" , Dyn StringT ] ] ] normalizeText (classesToFlow cls) `shouldBe` normalizeText [st| class x { y(a: string, aa: number, aaa: string): string { return this.root + '/x/' + a + '/y/' + aa.toString() + '/z/' + aaa + ''; } root: string; constructor(root: string) { this.root = root; } } |] it "should avoid name shadowing in nested binders" $ do let cls = [ Class "x" [ Method "y" [ Path "x" , Dyn (NonEmptyT (NonEmptyT NumberT)) , Path "y" ] ] ] normalizeText (classesToFlow cls) `shouldBe` normalizeText [st| class x { y(a: Array<Array<number>>): string { return this.root + '/x/' + a.map(function(a1) { return a1.map(function(a2) { return a2.toString() }).join(',') }).join(',') + '/y'; } root: string; constructor(root: string) { this.root = root; } } |] describe "genFlowSource" $ it "should work end-to-end" $ do let source = genFlowSource M.empty [] [] "'test'" resources normalizeText source `shouldBe` normalizeText [st| /* @flow */ class PATHS_TYPE_paths { api : PATHS_TYPE_paths_api; static_pages : PATHS_TYPE_paths_static_pages; root: string; constructor(root: string) { this.root = root; this.api = new PATHS_TYPE_paths_api(root); this.static_pages = new PATHS_TYPE_paths_static_pages(root); } } class PATHS_TYPE_paths_static_pages { root: string; constructor(root: string) { this.root = root; } } class PATHS_TYPE_paths_api { users(a: number): string { return this.root + '/api/users/' + a.toString() + ''; } users_bar(a: Array<number>): string { return this.root + '/api/users/' + a.map(function(a1) { return a1.toString() }).join(',') + '/bar'; } users_baz(a: string): string { return this.root + '/api/users/' + a + '/baz'; } users_foo(a: number): string { return this.root + '/api/users/' + a.toString() + '/foo'; } root: string; constructor(root: string) { this.root = root; } } var PATHS: PATHS_TYPE_paths = new PATHS_TYPE_paths('test'); |]
frontrowed/yesod-routes-flow
test/GeneratorSpec.hs
Haskell
mit
5,708
module System.Logging.Facade.Types where data LogLevel = TRACE | DEBUG | INFO | WARN | ERROR deriving (Eq, Show, Ord, Bounded, Enum) data Location = Location { locationPackage :: String , locationModule :: String , locationFile :: String , locationLine :: Int , locationColumn :: Int } deriving (Eq, Show) data LogRecord = LogRecord { logRecordLevel :: LogLevel , logRecordLocation :: Maybe Location , logRecordMessage :: String } deriving (Eq, Show)
beni55/logging-facade
src/System/Logging/Facade/Types.hs
Haskell
mit
460
{-| Module : TestPrint.Problen.BSP.CNF Description : The CNF type Printer tests Copyright : (c) Andrew Burnett 2014-2015 Maintainer : [email protected] Stability : experimental Portability : Unknown The Test Tree Node for the CNF type's Printer Tests -} module TestPrint.Problem.BSP.CNF ( printer, -- TestTree cnfList -- :: [CNF] ) where import HSat.Problem.BSP.CNF import HSat.Problem.BSP.Common import TestPrint import qualified TestPrint.Problem.BSP.CNF.Builder as CNFBuilder name :: String name = "CNF" printer :: TestTree printer = testGroup name [ printList "CNF" cnfList, CNFBuilder.printer ] cnfList :: [CNF] cnfList = map (mkCNFFromClauses . mkClausesFromIntegers) [ [], [[-1,-2,-3],[],[1,2,3]], [ [-53,-345,234,237], [-24,24,675,1346], [2467,860,-1,2] ] ]
aburnett88/HSat
tests-src/TestPrint/Problem/BSP/CNF.hs
Haskell
mit
867
{-# LANGUAGE OverloadedStrings #-} module Web.Twitter.PleaseCaption.Status ( getExtendedEntities , hasPhotoEntities , hasAltText , asText ) where import Data.Maybe (fromMaybe, isJust) import qualified Data.Text as Text import Web.Twitter.Types (Status(..), ExtendedEntities(..), ExtendedEntity(..), Entity(..), User(..)) getExtendedEntities :: Status -> [ExtendedEntity] getExtendedEntities status = fromMaybe [] $ map entityBody . exeMedia <$> statusExtendedEntities status hasPhotoEntities :: Status -> Bool hasPhotoEntities status = let entities = getExtendedEntities status in (not $ null entities) && all (=="photo") (map exeType entities) hasAltText :: Status -> Bool hasAltText status = let entities = getExtendedEntities status in all (isJust . exeExtAltText) entities asText :: Status -> Text.Text asText Status { statusText = text, statusUser = User { userScreenName = screenName }} = Text.concat [ "@", screenName, ":", text ]
stillinbeta/pleasecaption
src/Web/Twitter/PleaseCaption/Status.hs
Haskell
mit
1,169
module TruthTable.Entry where import TruthTable.Logic import TruthTable.Parser import TruthTable.Printing import System.Environment import Data.List entry :: IO () entry = getArgs >>= return . intercalate " " >>= putStrLn . printResultOrErrorWithDefaultConfig . genTruthTable . parseGrammar . lexer
tjakway/truth-table-generator
src/TruthTable/Entry.hs
Haskell
mit
323
{- Alec Snyder - hw 10 Globber Test Suite - github repo: https://github.com/allonsy/globber -} module Main (main) where import Test.Hspec import Globber main :: IO () main = hspec $ describe "Testing Globber" $ do describe "empty pattern" $ do it "matches empty string" $ matchGlob "" "" `shouldBe` True it "shouldn't match non-empty string" $ matchGlob "" "string" `shouldBe` False describe "String literals" $ do it "matches literal string" $ matchGlob "hello" "hello" `shouldBe` True it "shouldn't match literal string" $ matchGlob "hello" "string" `shouldBe` False describe "Question Marks" $ do it "should match question mark" $ matchGlob "h?llo" "hallo" `shouldBe` True it "Shouldn't match the empty string" $ matchGlob "?" "" `shouldBe` False it "Shouldn't match more empty strings" $ matchGlob "hello?" "hello" `shouldBe` False it "shouldn't match question mark" $ matchGlob "hel?o" "hallo" `shouldBe` False describe "Asterisks" $ do it "Matches everything" $ matchGlob "*" "" `shouldBe` True it "matches trailing asterisks" $ matchGlob "he*" "hello" `shouldBe` True it "matches leading asterisk" $ matchGlob "*.txt" "file.txt" `shouldBe` True it "matches double asterisk" $ matchGlob "*l*" "hello" `shouldBe` True it "should fail on bad non asterisk match" $ matchGlob "*.txt" "file.tar" `shouldBe` False describe "Escape Sequences" $ do --Note that when we encode strings here, the backslash is escaped so that the backslash is part of the string" it "matches escaped regular character" $ matchGlob "\\h\\e\\l\\l\\o" "hello" `shouldBe` True it "matches escaped questions" $ matchGlob "hello\\?" "hello?" `shouldBe` True it "matches escaped asterisk" $ matchGlob "\\*hello\\*" "*hello*" `shouldBe` True it "matches escaped backslash" $ matchGlob "\\\\hello" "\\hello" `shouldBe` True it "shouldn't glob on escaped asterisk" $ matchGlob "\\*.txt" "file.txt" `shouldBe` False
allonsy/globber
TestGlobber.hs
Haskell
mit
2,210
module Tyle.Parsers where import Text.Parsec import Text.Parsec.Char import Text.Parsec.String import Text.Parsec.Combinator import Control.Monad (liftM) import Tyle.Grammar lambda :: Parser String lambda = choice $ map string [ "lambda" , "\\" , "\955" ] term :: Parser Term term = do x <- lower xs <- many (alphaNum <|> oneOf "_-><") return (x:xs) variable :: Parser Expr variable = liftM Var term application :: Parser Expr application = try $ do t0 <- term spaces t1 <- term return (App t0 t1) typeTerm :: Parser Term typeTerm = try $ do t <- upper ts <- many alphaNum return (t:ts) tyleType :: Parser Type tyleType = char ':' >> spaces >> liftM Type typeTerm function :: Parser Expr function = try $ do lambda >> spaces t0 <- term t1 <- tyleType char '.' >> spaces exp <- expression return (Fun t0 t1 exp) expression :: Parser Expr expression = choice [ function , application , variable ] program :: Parser [Expr] program = many1 $ expression <* (spaces >> char '#')
gambogi/tyle
Tyle/Parsers.hs
Haskell
mit
1,188
{-# htermination enumFromThen :: () -> () -> [()] #-}
ComputationWithBoundedResources/ara-inference
doc/tpdb_trs/Haskell/full_haskell/Prelude_enumFromThen_2.hs
Haskell
mit
54
-- Getting the last element from a list of n numbers using recursion my_last n = if length n == 0 then (error "Empty list has no end defined") else if length n == 1 then head [] else my_last (tail n) main = print (my_last [1,2,3,4])
creativcoder/recurse
99Problems/last_element.hs
Haskell
mit
285
----------------------------------------------------------------------------- -- -- Module : Drool.UI.GLWindow -- Copyright : Tobias Fuchs -- License : AllRightsReserved -- -- Maintainer : [email protected] -- Stability : experimental -- Portability : POSIX -- -- | -- ----------------------------------------------------------------------------- {-# OPTIONS -O2 -Wall #-} module Drool.UI.GLWindow ( initComponent ) where import Debug.Trace import Data.IORef(IORef, readIORef, newIORef, modifyIORef, writeIORef ) import Data.Array.IO import Control.Monad.Trans ( liftIO ) import qualified Graphics.UI.Gtk.Builder as GtkBuilder import Graphics.Rendering.OpenGL as GL import qualified Graphics.Rendering.FTGL as FTGL import Graphics.UI.Gtk as Gtk import qualified Graphics.UI.GLUT as GLUT ( initialize ) import qualified Graphics.UI.Gtk.OpenGL as GtkGL import qualified Drool.Utils.SigGen as SigGen ( SignalGenerator(..) ) import qualified Drool.Utils.Conversions as Conv import qualified Drool.Utils.RenderHelpers as RH import qualified Drool.Utils.FeatureExtraction as FE ( SignalFeaturesList(..), FeatureTarget(..) ) import qualified Drool.Types as DT import qualified Drool.ApplicationContext as AC import qualified Drool.ContextObjects as AC import qualified Control.Monad as M ( forever, forM ) import qualified Control.Concurrent as C import qualified Control.Concurrent.MVar as MV ( MVar, swapMVar, takeMVar, putMVar ) import qualified Control.Concurrent.Chan as CC ( readChan ) import qualified Drool.UI.Visuals as Visuals import Graphics.UI.GLUT ( Object(Sphere'), renderObject, Flavour(..) ) display :: IORef AC.ContextSettings -> IORef RH.RenderSettings -> [ IORef (Visuals.Visual) ] -> IO () display contextSettingsIORef renderSettingsIORef visualIORefs = do contextSettings <- readIORef contextSettingsIORef let blendModeFMSource = Conv.blendModeSourceFromIndex $ AC.blendModeFMSourceIdx contextSettings blendModeFMFrameBuffer = Conv.blendModeFrameBufferFromIndex $ AC.blendModeFMFrameBufferIdx contextSettings blendModeMBSource = Conv.blendModeSourceFromIndex $ AC.blendModeMBSourceIdx contextSettings blendModeMBFrameBuffer = Conv.blendModeFrameBufferFromIndex $ AC.blendModeMBFrameBufferIdx contextSettings clear [ColorBuffer, DepthBuffer] preservingMatrix $ do -- Render foreground model: displayModel contextSettingsIORef renderSettingsIORef (visualIORefs !! 0) blendFunc $= (blendModeMBSource, blendModeMBFrameBuffer) preservingMatrix $ do -- Render middleground model: displayModel contextSettingsIORef renderSettingsIORef (visualIORefs !! 1) blendFunc $= (blendModeFMSource, blendModeFMFrameBuffer) preservingMatrix $ do -- Render middleground model: displayModel contextSettingsIORef renderSettingsIORef (visualIORefs !! 2) return () displayModel :: IORef AC.ContextSettings -> IORef RH.RenderSettings -> IORef (Visuals.Visual) -> IO () displayModel contextSettingsIORef renderSettingsIORef visualIORef = do -- {{{ -- renderSettingsPrev <- readIORef renderSettingsIORef contextSettings <- readIORef contextSettingsIORef renderSettingsPrev <- readIORef renderSettingsIORef visual <- readIORef visualIORef let timeoutMs = (Conv.freqToMs $ AC.renderingFrequency contextSettings) let tick = RH.tick renderSettingsPrev let tickMs = tick * timeoutMs let renderSettingsCurr = renderSettingsPrev { RH.tick = (tick+1) `mod` 1000 } let samplingSem = RH.samplingSem renderSettingsCurr signalBuf <- readIORef (RH.signalBuf renderSettingsCurr) featuresBuf <- readIORef (RH.featuresBuf renderSettingsCurr) let featuresCurr = head (FE.signalFeaturesList featuresBuf) -- Wait until there is at least one signal ready for rendering. let nNewSignals = RH.numNewSignals renderSettingsCurr -- Load most recent signal from buffer (last signal in list): let recentSignal = DT.getRecentSignal signalBuf let lastSignal = DT.getLastSignal signalBuf -- Get length of most recent signal (= number of samples per signal): numSamplesCurr <- case recentSignal of Just s -> do signalBounds <- getBounds $ DT.signalArray s return $ rangeSize signalBounds Nothing -> return 0 numSamplesLast <- case lastSignal of Just s -> do signalBounds <- getBounds $ DT.signalArray s return $ rangeSize signalBounds Nothing -> return 0 let nSamples = min numSamplesCurr numSamplesLast let nSignals = length $ DT.signalList signalBuf modifyIORef renderSettingsIORef ( \_ -> renderSettingsCurr { RH.numSignals = nSignals, RH.numNewSignals = nNewSignals, RH.numSamples = nSamples } ) renderSettings <- readIORef renderSettingsIORef let accIncRotation = (AC.incRotationAccum contextSettings) let incRotationStep = (AC.incRotation contextSettings) let nextIncRotation = DT.CRotationVector { DT.rotY = (DT.rotY accIncRotation + DT.rotY incRotationStep), DT.rotX = (DT.rotX accIncRotation + DT.rotX incRotationStep), DT.rotZ = (DT.rotZ accIncRotation + DT.rotZ incRotationStep) } modifyIORef contextSettingsIORef (\settings -> settings { AC.incRotationAccum = nextIncRotation } ) -- Push new signal(s) to visual: (Visuals.update visual) renderSettings tick visualUpdated <- readIORef visualIORef matrixMode $= Projection loadIdentity perspective (realToFrac (AC.viewAngle contextSettings)) (fromIntegral canvasInitWidth / fromIntegral canvasInitHeight) 0.1 100 matrixMode $= Modelview 0 loadIdentity let hScale = (AC.scaling contextSettings) / (100.0::Float) surfOpacity = (AC.surfaceOpacity contextSettings) / (100.0::GLfloat) fixedRotation = AC.fixedRotation contextSettings accIncRotation = AC.incRotationAccum contextSettings viewDistance = AC.viewDistance contextSettings maxNumSignals = AC.signalBufferSize contextSettings lightPos0 = RH.lightPos0 renderSettingsCurr lightPos1 = RH.lightPos1 renderSettingsCurr vPerspective = AC.renderPerspective contextSettings let updatePerspective p = if AC.autoPerspectiveSwitch contextSettings && tickMs >= AC.autoPerspectiveSwitchInterval contextSettings then ( do let nextPerspective = RH.nextPerspective p modifyIORef contextSettingsIORef ( \_ -> contextSettings { AC.renderPerspective = nextPerspective } ) modifyIORef renderSettingsIORef ( \_ -> renderSettingsCurr { RH.tick = 0 } ) return nextPerspective ) else return p curPerspective <- updatePerspective vPerspective let blendModeSource = Conv.blendModeSourceFromIndex $ AC.blendModeSourceIdx contextSettings let blendModeFrameBuffer = Conv.blendModeFrameBufferFromIndex $ AC.blendModeFrameBufferIdx contextSettings -- blendFunc $= (blendModeSource, blendModeFrameBuffer) clear [DepthBuffer] preservingMatrix $ do let lightIntensity = lCoeff + bCoeff where (lCoeff,bCoeff) = RH.featuresToIntensity featuresCurr FE.GlobalTarget contextSettings RH.useLight $ (AC.light0 contextSettings) { AC.lightIntensity = lightIntensity } RH.useLight $ (AC.light1 contextSettings) { AC.lightIntensity = lightIntensity } GL.position (Light 0) $= lightPos0 GL.position (Light 1) $= lightPos1 --------------------------------------------------------------------------------------------------- -- Perspective transformations --------------------------------------------------------------------------------------------------- GL.translate $ Vector3 0 0 viewDistance RH.applyPerspective curPerspective RH.applyGlobalRotation fixedRotation accIncRotation --------------------------------------------------------------------------------------------------- -- End of perspective transformations --------------------------------------------------------------------------------------------------- -- (visWidth,visHeight,visDepth) <- (Visuals.dimensions visualUpdated) -- fogMode $= Linear 0.0 (visDepth * 20.0) -- fogColor $= (Color4 0.0 0.0 0.0 1.0) -- GL.translate $ Vector3 (-0.5 * visWidth) 0 0 -- GL.translate $ Vector3 0 0 (-0.5 * visDepth) Visuals.render visualUpdated -- }}} reshape :: IORef AC.ContextSettings -> Gtk.Rectangle -> IO () reshape settingsIORef allocation = do let rectangleWidthHeight (Gtk.Rectangle _ _ w' h') = (w',h') let (w,h) = rectangleWidthHeight allocation settings <- readIORef settingsIORef let viewAngle = AC.viewAngle settings matrixMode $= Projection viewport $= (Position 0 0, Size (fromIntegral w) (fromIntegral h)) perspective (realToFrac viewAngle) (fromIntegral w / fromIntegral h) 0.1 100 matrixMode $= Modelview 0 return () -- Component init interface for main UI. initComponent :: GtkBuilder.Builder -> IORef AC.ContextSettings -> IORef AC.ContextObjects -> IO () initComponent _ contextSettingsIORef contextObjectsIORef = do -- {{{ window <- Gtk.windowNew _ <- GLUT.initialize "drool visualizer" [] Gtk.set window [ Gtk.containerBorderWidth := 0, Gtk.windowTitle := "drool visualizer" ] putStrLn "Initializing OpenGL viewport" glConfig <- GtkGL.glConfigNew [GtkGL.GLModeRGBA, GtkGL.GLModeMultiSample, GtkGL.GLModeStencil, GtkGL.GLModeDouble, GtkGL.GLModeDepth, GtkGL.GLModeAlpha] _ <- GtkGL.initGL canvas <- GtkGL.glDrawingAreaNew glConfig cObjects <- readIORef contextObjectsIORef cSettings <- readIORef contextSettingsIORef let sigGen = AC.signalGenerator cObjects let renderSettings = RH.RenderSettings { RH.signalGenerator = sigGen, RH.samplingSem = AC.samplingSem cObjects, RH.numNewSignalsChan = AC.numNewSignalsChan cObjects, RH.signalBuf = AC.signalBuf cObjects, RH.featuresBuf = AC.featuresBuf cObjects, RH.lightPos0 = (Vertex4 (-1.0) 3.0 (-2.0) 0.0), RH.lightPos1 = (Vertex4 1.0 3.0 2.0 0.0), RH.numSignals = 0, RH.numNewSignals = 0, RH.numSamples = SigGen.numSamples sigGen, RH.tick = 0 } renderSettingsIORef <- newIORef renderSettings let visualForegroundIORef = AC.visualForeground cObjects visualMiddlegroundIORef = AC.visualMiddleground cObjects visualBackgroundIORef = AC.visualBackground cObjects -- Initialise some GL setting just before the canvas first gets shown -- (We can't initialise these things earlier since the GL resources that -- we are using wouldn't have been setup yet) _ <- Gtk.onRealize canvas $ GtkGL.withGLDrawingArea canvas $ \_ -> do -- {{{ -- depthMask $= Disabled -- dither $= Enabled normalize $= Enabled -- Automatically normaliye normal vectors to (-1.0,1.0) shadeModel $= Smooth depthFunc $= Just Less -- polygonSmooth $= Enabled -- lineSmooth $= Enabled lighting $= Enabled light (Light 0) $= Enabled light (Light 1) $= Enabled frontFace $= CCW blend $= Enabled multisample $= Enabled sampleAlphaToCoverage $= Enabled -- fog $= Enabled lineWidthRange <- GL.get smoothLineWidthRange lineWidth $= fst lineWidthRange -- use thinnest possible lines colorMaterial $= Just (FrontAndBack, AmbientAndDiffuse) let blendModeSource = Conv.blendModeSourceFromIndex $ AC.blendModeSourceIdx cSettings let blendModeFrameBuffer = Conv.blendModeFrameBufferFromIndex $ AC.blendModeFrameBufferIdx cSettings blendFunc $= (blendModeSource, blendModeFrameBuffer) hint PerspectiveCorrection $= Nicest -- hint PolygonSmooth $= Nicest -- hint LineSmooth $= Nicest matrixMode $= Projection loadIdentity viewport $= (Position 0 0, Size (fromIntegral canvasInitWidth) (fromIntegral canvasInitHeight)) perspective (realToFrac $ AC.viewAngle cSettings) (fromIntegral canvasInitWidth / fromIntegral canvasInitHeight) 0.1 10 matrixMode $= Modelview 0 loadIdentity return () -- }}} -- OnShow handler for GL canvas: _ <- Gtk.onExpose canvas $ \_ -> do GtkGL.withGLDrawingArea canvas $ \glwindow -> do let visualModels = [ visualBackgroundIORef, visualMiddlegroundIORef, visualForegroundIORef ] display contextSettingsIORef renderSettingsIORef visualModels GtkGL.glDrawableSwapBuffers glwindow return True -- Resize handler: _ <- Gtk.onSizeAllocate canvas (reshape contextSettingsIORef) -- Add canvas (OpenGL drawing area) to GUI: Gtk.widgetSetSizeRequest canvas canvasInitWidth canvasInitHeight Gtk.set window [ Gtk.containerChild := canvas ] -- Fullscreen mode: _ <- Gtk.on window Gtk.keyPressEvent $ Gtk.tryEvent $ do [Gtk.Control] <- Gtk.eventModifier "f" <- Gtk.eventKeyName liftIO $ Gtk.windowSetKeepAbove window True liftIO $ Gtk.windowFullscreen window _ <- Gtk.on window Gtk.keyPressEvent $ Gtk.tryEvent $ do "Escape" <- Gtk.eventKeyName liftIO $ Gtk.windowUnfullscreen window liftIO $ Gtk.windowSetKeepAbove window False let timeoutMs = (Conv.freqToMs $ AC.renderingFrequency cSettings) {- -- Redraw canvas according to rendering frequency: updateCanvasTimer <- Gtk.timeoutAddFull (do Gtk.widgetQueueDraw canvas return True) Gtk.priorityDefaultIdle timeoutMs -- Remove timer for redrawing canvas when closing window: _ <- Gtk.onDestroy window (Gtk.timeoutRemove updateCanvasTimer) -} -- Try to redraw canvas on every new signal: sampleThread <- C.forkOS . M.forever $ do nNewSignals <- MV.takeMVar $ AC.samplingSem cObjects modifyIORef renderSettingsIORef ( \rs -> rs { RH.numNewSignals = nNewSignals } ) Gtk.postGUISync $ Gtk.widgetQueueDraw canvas Gtk.widgetShowAll window -- }}} canvasInitWidth :: Int canvasInitWidth = 800 canvasInitHeight :: Int canvasInitHeight = 600 mulColor4Value :: Color4 GLfloat -> GLfloat -> Color4 GLfloat mulColor4Value (Color4 r g b a) value = Color4 r' g' b' a' where r' = r * value g' = g * value b' = b * value a' = a * value
fuchsto/drool
src/Drool/UI/GLWindow.hs
Haskell
mit
15,073
import System.IO -- For now, only takes one line -- Curried function, predicate (Char -> Bool) on String results in List of Strings splitBy :: (Char -> Bool) -> String -> [String] -- p is predicate, s is string -- dropWhile is the remains of taking while (takeWhile) p is satisfied splitBy p s = case dropWhile p s of -- Empty string, return empty List "" -> [] -- String? then do -- concatenate w and result of (splitBy p s'') -- where w is the longest prefix that continuously fails p -- and s'' is the remainder s' -> w : splitBy p s'' where (w, s'') = break p s' main = do name <- getLine -- Dollar sign is syntactic sugar for avoiding parenthesis -- in this case, equivalent to (splitBy (==' ') name) print $ splitBy (==' ') name
michaelx11/multilingo
StringOperations/src/splitBySpace.hs
Haskell
mit
929
module Examples.Reader where import Control.Effects.Eff import Control.Effects.Reader import Control.Effects.Exception prg1 = ask prg1run x = runPure . handle (readerHandler x) prg1res :: Int -> Int prg1res x = prg1run x prg1
edofic/effect-handlers
test/Examples/Reader.hs
Haskell
mit
231
{-# LANGUAGE Trustworthy #-} {-# LANGUAGE ScopedTypeVariables #-} {- | This module exports a class 'Groups' that policy modules must define an instance of to define groups, or mappings between a group 'Principal'and the principals in the group. An app may then relabel a labeled value by using 'labelRewrite'. -} module Hails.PolicyModule.Groups ( Groups(..) , labelRewrite ) where import Data.Monoid import qualified Data.Set as Set import qualified Data.Map as Map import Control.Monad import LIO import LIO.DCLabel import Hails.Database import Hails.Database.TCB (dbActionPriv, getActionStateTCB) import Hails.PolicyModule class PolicyModule pm => Groups pm where -- | Typically, the action should expand a principal such as @#group@ to -- list of group members @[alice, bob]@. groups :: pm -- ^ Unused type-enforcing param -> DCPriv -- ^ Policy module privs -> Principal -- ^ Group -> DBAction [Principal] -- ^ (Policy module, group members) -- | Endorse the implementation of this instance. Note that this is -- reduced to WHNF to catch invalid instances that use 'undefined'. -- -- Example implementation: -- -- > groupsInstanceEndorse _ = MyPolicyModuleTCB {- Leave other values undefined -} groupsInstanceEndorse :: pm -- | Given the policy module (which is used to invoke the right -- 'groups' function) and labeled value, relabel the value according -- to the 'Groups' of the policy module. Note that the first argument -- may be bottom since it is solely used for typing purposes. labelRewrite :: forall unused_pm a. Groups unused_pm => unused_pm -- ^ Policy module -> DCLabeled a -- ^ Label -> DBAction (DCLabeled a) labelRewrite pm lx = withDBContext "labelRewrite" $ do -- Make sure that 'groupsInstanceEndorse' is not bottom _ <- liftLIO $ evaluate (groupsInstanceEndorse :: unused_pm) pmPriv <- getPMPriv -- Build map from principals to list of princpals pMap <- Set.fold (\p act -> act >>= \m -> do ps <- groups pm pmPriv p return (Map.insert p ps m)) (return Map.empty) principals -- Apply map to all principals in the label let lnew = (expandPrincipals pMap s) %% (expandPrincipals pMap i) -- Relabel labeled value liftLIO $ withPMClearanceP pmPriv $ relabelLabeledP pmPriv lnew lx where getPMPriv = do pmPriv <- dbActionPriv `liftM` getActionStateTCB -- Make sure that the underlying policy module -- and one named in the first parameter are the same case Map.lookup (policyModuleTypeName pm) availablePolicyModules of Nothing -> return mempty Just (p,_,_) -> return $ if toCNF p == privDesc pmPriv then pmPriv else mempty -- Modify label by expanding principals according to the map expandPrincipals pMap origPrincipals = -- Function to fold over disjunctions in a CNF, expanding each -- principal with the groups map let cFoldF :: Disjunction -> CNF -> CNF cFoldF disj accm = (Set.foldr expandOne cFalse $ dToSet disj) /\ accm -- Inner fold function, expands a single principal and adds -- to a CNF (that represents a Disjunction expandOne :: Principal -> CNF -> CNF expandOne princ accm = (dFromList $ pMap Map.! princ) \/ accm in Set.foldr cFoldF cTrue $ cToSet origPrincipals -- Label components s = dcSecrecy $ labelOf lx i = dcIntegrity $ labelOf lx -- All unique principals in the labe principals = getPrincipals s <> getPrincipals i -- Get principals form component getPrincipals = mconcat . (map dToSet) . Set.elems . cToSet
scslab/hails
Hails/PolicyModule/Groups.hs
Haskell
mit
4,087
{-# LANGUAGE OverloadedStrings #-} module Main where -- import Lib -- main :: IO () -- main = someFunc import Control.Applicative import App main :: IO () main = run
DominikDitoIvosevic/yafebe
back/app/Main.hs
Haskell
apache-2.0
170
module Main where import MonteCarloGui main :: IO () main = monteCarloGui
alexandersgreen/alex-haskell
MonteCarloPi/Main.hs
Haskell
apache-2.0
76
-- | The fully-qualified HaskellExpr representation of some functions from base. module Data.HaskellExpr.Base where import Data.HaskellExpr eId :: HaskellExpr (a -> a) eId = qualified "Prelude" "id" eConst :: HaskellExpr (a -> b -> a) eConst = qualified "Prelude" "const" eFlip :: HaskellExpr ((a -> b -> c) -> b -> a -> c) eFlip = qualified "Prelude" "flip" eMap :: HaskellExpr ((a -> b) -> [a] -> [b]) eMap = qualified "Prelude" "map" eHead :: HaskellExpr ([a] -> a) eHead = qualified "Prelude" "head" eComp :: HaskellExpr (b -> c) -> HaskellExpr (a -> b) -> HaskellExpr (a -> c) eComp = qualifiedInfix "Prelude" "."
gelisam/hawk
src/Data/HaskellExpr/Base.hs
Haskell
apache-2.0
627
{-# OPTIONS -fglasgow-exts #-} ----------------------------------------------------------------------------- {-| Module : QStyleHintReturn.hs Copyright : (c) David Harley 2010 Project : qtHaskell Version : 1.1.4 Modified : 2010-09-02 17:02:35 Warning : this file is machine generated - do not modify. --} ----------------------------------------------------------------------------- module Qtc.Enums.Gui.QStyleHintReturn ( HintReturnType, eSH_Default, eSH_Mask, eSH_Variant , QStyleHintReturnStyleOptionType , QStyleHintReturnStyleOptionVersion ) where import Qtc.Classes.Base import Qtc.ClassTypes.Core (QObject, TQObject, qObjectFromPtr) import Qtc.Core.Base (Qcs, connectSlot, qtc_connectSlot_int, wrapSlotHandler_int) import Qtc.Enums.Base import Qtc.Enums.Classes.Core data CHintReturnType a = CHintReturnType a type HintReturnType = QEnum(CHintReturnType Int) ieHintReturnType :: Int -> HintReturnType ieHintReturnType x = QEnum (CHintReturnType x) instance QEnumC (CHintReturnType Int) where qEnum_toInt (QEnum (CHintReturnType x)) = x qEnum_fromInt x = QEnum (CHintReturnType x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> HintReturnType -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () eSH_Default :: HintReturnType eSH_Default = ieHintReturnType $ 61440 eSH_Mask :: HintReturnType eSH_Mask = ieHintReturnType $ 61441 eSH_Variant :: HintReturnType eSH_Variant = ieHintReturnType $ 61442 data CQStyleHintReturnStyleOptionType a = CQStyleHintReturnStyleOptionType a type QStyleHintReturnStyleOptionType = QEnum(CQStyleHintReturnStyleOptionType Int) ieQStyleHintReturnStyleOptionType :: Int -> QStyleHintReturnStyleOptionType ieQStyleHintReturnStyleOptionType x = QEnum (CQStyleHintReturnStyleOptionType x) instance QEnumC (CQStyleHintReturnStyleOptionType Int) where qEnum_toInt (QEnum (CQStyleHintReturnStyleOptionType x)) = x qEnum_fromInt x = QEnum (CQStyleHintReturnStyleOptionType x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> QStyleHintReturnStyleOptionType -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () instance QeType QStyleHintReturnStyleOptionType where eType = ieQStyleHintReturnStyleOptionType $ 61440 data CQStyleHintReturnStyleOptionVersion a = CQStyleHintReturnStyleOptionVersion a type QStyleHintReturnStyleOptionVersion = QEnum(CQStyleHintReturnStyleOptionVersion Int) ieQStyleHintReturnStyleOptionVersion :: Int -> QStyleHintReturnStyleOptionVersion ieQStyleHintReturnStyleOptionVersion x = QEnum (CQStyleHintReturnStyleOptionVersion x) instance QEnumC (CQStyleHintReturnStyleOptionVersion Int) where qEnum_toInt (QEnum (CQStyleHintReturnStyleOptionVersion x)) = x qEnum_fromInt x = QEnum (CQStyleHintReturnStyleOptionVersion x) withQEnumResult x = do ti <- x return $ qEnum_fromInt $ fromIntegral ti withQEnumListResult x = do til <- x return $ map qEnum_fromInt til instance Qcs (QObject c -> QStyleHintReturnStyleOptionVersion -> IO ()) where connectSlot _qsig_obj _qsig_nam _qslt_obj _qslt_nam _handler = do funptr <- wrapSlotHandler_int slotHandlerWrapper_int stptr <- newStablePtr (Wrap _handler) withObjectPtr _qsig_obj $ \cobj_sig -> withCWString _qsig_nam $ \cstr_sig -> withObjectPtr _qslt_obj $ \cobj_slt -> withCWString _qslt_nam $ \cstr_slt -> qtc_connectSlot_int cobj_sig cstr_sig cobj_slt cstr_slt (toCFunPtr funptr) (castStablePtrToPtr stptr) return () where slotHandlerWrapper_int :: Ptr fun -> Ptr () -> Ptr (TQObject c) -> CInt -> IO () slotHandlerWrapper_int funptr stptr qobjptr cint = do qobj <- qObjectFromPtr qobjptr let hint = fromCInt cint if (objectIsNull qobj) then do when (stptr/=ptrNull) (freeStablePtr (castPtrToStablePtr stptr)) when (funptr/=ptrNull) (freeHaskellFunPtr (castPtrToFunPtr funptr)) else _handler qobj (qEnum_fromInt hint) return () instance QeVersion QStyleHintReturnStyleOptionVersion where eVersion = ieQStyleHintReturnStyleOptionVersion $ 1
uduki/hsQt
Qtc/Enums/Gui/QStyleHintReturn.hs
Haskell
bsd-2-clause
6,376
----------------------------------------------------------------------------- -- | -- Module : Data.SBV.Compilers.C -- Copyright : (c) Levent Erkok -- License : BSD3 -- Maintainer : [email protected] -- Stability : experimental -- -- Compilation of symbolic programs to C ----------------------------------------------------------------------------- {-# LANGUAGE PatternGuards #-} module Data.SBV.Compilers.C(compileToC, compileToCLib, compileToC', compileToCLib') where import Control.DeepSeq (rnf) import Data.Char (isSpace) import Data.List (nub, intercalate) import Data.Maybe (isJust, isNothing, fromJust) import qualified Data.Foldable as F (toList) import qualified Data.Set as Set (member, toList) import System.FilePath (takeBaseName, replaceExtension) import System.Random import Text.PrettyPrint.HughesPJ import Data.SBV.BitVectors.Data import Data.SBV.BitVectors.AlgReals import Data.SBV.BitVectors.PrettyNum (shex, showCFloat, showCDouble) import Data.SBV.Compilers.CodeGen --------------------------------------------------------------------------- -- * API --------------------------------------------------------------------------- -- | Given a symbolic computation, render it as an equivalent collection of files -- that make up a C program: -- -- * The first argument is the directory name under which the files will be saved. To save -- files in the current directory pass @'Just' \".\"@. Use 'Nothing' for printing to stdout. -- -- * The second argument is the name of the C function to generate. -- -- * The final argument is the function to be compiled. -- -- Compilation will also generate a @Makefile@, a header file, and a driver (test) program, etc. compileToC :: Maybe FilePath -> String -> SBVCodeGen () -> IO () compileToC mbDirName nm f = compileToC' nm f >>= renderCgPgmBundle mbDirName -- | Lower level version of 'compileToC', producing a 'CgPgmBundle' compileToC' :: String -> SBVCodeGen () -> IO CgPgmBundle compileToC' nm f = do rands <- randoms `fmap` newStdGen codeGen SBVToC (defaultCgConfig { cgDriverVals = rands }) nm f -- | Create code to generate a library archive (.a) from given symbolic functions. Useful when generating code -- from multiple functions that work together as a library. -- -- * The first argument is the directory name under which the files will be saved. To save -- files in the current directory pass @'Just' \".\"@. Use 'Nothing' for printing to stdout. -- -- * The second argument is the name of the archive to generate. -- -- * The third argument is the list of functions to include, in the form of function-name/code pairs, similar -- to the second and third arguments of 'compileToC', except in a list. compileToCLib :: Maybe FilePath -> String -> [(String, SBVCodeGen ())] -> IO () compileToCLib mbDirName libName comps = compileToCLib' libName comps >>= renderCgPgmBundle mbDirName -- | Lower level version of 'compileToCLib', producing a 'CgPgmBundle' compileToCLib' :: String -> [(String, SBVCodeGen ())] -> IO CgPgmBundle compileToCLib' libName comps = mergeToLib libName `fmap` mapM (uncurry compileToC') comps --------------------------------------------------------------------------- -- * Implementation --------------------------------------------------------------------------- -- token for the target language data SBVToC = SBVToC instance CgTarget SBVToC where targetName _ = "C" translate _ = cgen -- Unexpected input, or things we will probably never support die :: String -> a die msg = error $ "SBV->C: Unexpected: " ++ msg -- Unsupported features, or features TBD tbd :: String -> a tbd msg = error $ "SBV->C: Not yet supported: " ++ msg cgen :: CgConfig -> String -> CgState -> Result -> CgPgmBundle cgen cfg nm st sbvProg -- we rnf the main pg and the sig to make sure any exceptions in type conversion pop-out early enough -- this is purely cosmetic, of course.. = rnf (render sig) `seq` rnf (render (vcat body)) `seq` result where result = CgPgmBundle bundleKind $ filt [ ("Makefile", (CgMakefile flags, [genMake (cgGenDriver cfg) nm nmd flags])) , (nm ++ ".h", (CgHeader [sig], [genHeader bundleKind nm [sig] extProtos])) , (nmd ++ ".c", (CgDriver, genDriver cfg randVals nm ins outs mbRet)) , (nm ++ ".c", (CgSource, body)) ] body = genCProg cfg nm sig sbvProg ins outs mbRet extDecls bundleKind = (cgInteger cfg, cgReal cfg) randVals = cgDriverVals cfg filt xs = [c | c@(_, (k, _)) <- xs, need k] where need k | isCgDriver k = cgGenDriver cfg | isCgMakefile k = cgGenMakefile cfg | True = True nmd = nm ++ "_driver" sig = pprCFunHeader nm ins outs mbRet ins = cgInputs st outs = cgOutputs st mbRet = case cgReturns st of [] -> Nothing [CgAtomic o] -> Just o [CgArray _] -> tbd "Non-atomic return values" _ -> tbd "Multiple return values" extProtos = case cgPrototypes st of [] -> empty xs -> vcat $ text "/* User given prototypes: */" : map text xs extDecls = case cgDecls st of [] -> empty xs -> vcat $ text "/* User given declarations: */" : map text xs ++ [text ""] flags = cgLDFlags st -- | Pretty print a functions type. If there is only one output, we compile it -- as a function that returns that value. Otherwise, we compile it as a void function -- that takes return values as pointers to be updated. pprCFunHeader :: String -> [(String, CgVal)] -> [(String, CgVal)] -> Maybe SW -> Doc pprCFunHeader fn ins outs mbRet = retType <+> text fn <> parens (fsep (punctuate comma (map mkParam ins ++ map mkPParam outs))) where retType = case mbRet of Nothing -> text "void" Just sw -> pprCWord False sw mkParam, mkPParam :: (String, CgVal) -> Doc mkParam (n, CgAtomic sw) = pprCWord True sw <+> text n mkParam (_, CgArray []) = die "mkParam: CgArray with no elements!" mkParam (n, CgArray (sw:_)) = pprCWord True sw <+> text "*" <> text n mkPParam (n, CgAtomic sw) = pprCWord False sw <+> text "*" <> text n mkPParam (_, CgArray []) = die "mPkParam: CgArray with no elements!" mkPParam (n, CgArray (sw:_)) = pprCWord False sw <+> text "*" <> text n -- | Renders as "const SWord8 s0", etc. the first parameter is the width of the typefield declSW :: Int -> SW -> Doc declSW w sw = text "const" <+> pad (showCType sw) <+> text (show sw) where pad s = text $ s ++ replicate (w - length s) ' ' -- | Renders as "s0", etc, or the corresponding constant showSW :: CgConfig -> [(SW, CW)] -> SW -> Doc showSW cfg consts sw | sw == falseSW = text "false" | sw == trueSW = text "true" | Just cw <- sw `lookup` consts = mkConst cfg cw | True = text $ show sw -- | Words as it would map to a C word pprCWord :: HasKind a => Bool -> a -> Doc pprCWord cnst v = (if cnst then text "const" else empty) <+> text (showCType v) showCType :: HasKind a => a -> String showCType = show . kindOf -- | The printf specifier for the type specifier :: CgConfig -> SW -> Doc specifier cfg sw = case kindOf sw of KBounded b i -> spec (b, i) KUnbounded -> spec (True, fromJust (cgInteger cfg)) KReal -> specF (fromJust (cgReal cfg)) KFloat -> specF CgFloat KDouble -> specF CgDouble KUninterpreted s -> die $ "uninterpreted sort: " ++ s where spec :: (Bool, Int) -> Doc spec (False, 1) = text "%d" spec (False, 8) = text "%\"PRIu8\"" spec (True, 8) = text "%\"PRId8\"" spec (False, 16) = text "0x%04\"PRIx16\"U" spec (True, 16) = text "%\"PRId16\"" spec (False, 32) = text "0x%08\"PRIx32\"UL" spec (True, 32) = text "%\"PRId32\"L" spec (False, 64) = text "0x%016\"PRIx64\"ULL" spec (True, 64) = text "%\"PRId64\"LL" spec (s, sz) = die $ "Format specifier at type " ++ (if s then "SInt" else "SWord") ++ show sz specF :: CgSRealType -> Doc specF CgFloat = text "%f" specF CgDouble = text "%f" specF CgLongDouble = text "%Lf" -- | Make a constant value of the given type. We don't check for out of bounds here, as it should not be needed. -- There are many options here, using binary, decimal, etc. We simply -- 8-bit or less constants using decimal; otherwise we use hex. -- Note that this automatically takes care of the boolean (1-bit) value problem, since it -- shows the result as an integer, which is OK as far as C is concerned. mkConst :: CgConfig -> CW -> Doc mkConst cfg (CW KReal (CWAlgReal (AlgRational _ r))) = double (fromRational r :: Double) <> sRealSuffix (fromJust (cgReal cfg)) where sRealSuffix CgFloat = text "F" sRealSuffix CgDouble = empty sRealSuffix CgLongDouble = text "L" mkConst cfg (CW KUnbounded (CWInteger i)) = showSizedConst i (True, fromJust (cgInteger cfg)) mkConst _ (CW (KBounded sg sz) (CWInteger i)) = showSizedConst i (sg, sz) mkConst _ (CW KFloat (CWFloat f)) = text $ showCFloat f mkConst _ (CW KDouble (CWDouble d)) = text $ showCDouble d mkConst _ cw = die $ "mkConst: " ++ show cw showSizedConst :: Integer -> (Bool, Int) -> Doc showSizedConst i (False, 1) = text (if i == 0 then "false" else "true") showSizedConst i (False, 8) = integer i showSizedConst i (True, 8) = integer i showSizedConst i t@(False, 16) = text (shex False True t i) <> text "U" showSizedConst i t@(True, 16) = text (shex False True t i) showSizedConst i t@(False, 32) = text (shex False True t i) <> text "UL" showSizedConst i t@(True, 32) = text (shex False True t i) <> text "L" showSizedConst i t@(False, 64) = text (shex False True t i) <> text "ULL" showSizedConst i t@(True, 64) = text (shex False True t i) <> text "LL" showSizedConst i (True, 1) = die $ "Signed 1-bit value " ++ show i showSizedConst i (s, sz) = die $ "Constant " ++ show i ++ " at type " ++ (if s then "SInt" else "SWord") ++ show sz -- | Generate a makefile. The first argument is True if we have a driver. genMake :: Bool -> String -> String -> [String] -> Doc genMake ifdr fn dn ldFlags = foldr1 ($$) [l | (True, l) <- lns] where ifld = not (null ldFlags) ld | ifld = text "${LDFLAGS}" | True = empty lns = [ (True, text "# Makefile for" <+> nm <> text ". Automatically generated by SBV. Do not edit!") , (True, text "") , (True, text "# include any user-defined .mk file in the current directory.") , (True, text "-include *.mk") , (True, text "") , (True, text "CC=gcc") , (True, text "CCFLAGS?=-Wall -O3 -DNDEBUG -fomit-frame-pointer") , (ifld, text "LDFLAGS?=" <> text (unwords ldFlags)) , (True, text "") , (ifdr, text "all:" <+> nmd) , (ifdr, text "") , (True, nmo <> text (": " ++ ppSameLine (hsep [nmc, nmh]))) , (True, text "\t${CC} ${CCFLAGS}" <+> text "-c $< -o $@") , (True, text "") , (ifdr, nmdo <> text ":" <+> nmdc) , (ifdr, text "\t${CC} ${CCFLAGS}" <+> text "-c $< -o $@") , (ifdr, text "") , (ifdr, nmd <> text (": " ++ ppSameLine (hsep [nmo, nmdo]))) , (ifdr, text "\t${CC} ${CCFLAGS}" <+> text "$^ -o $@" <+> ld) , (ifdr, text "") , (True, text "clean:") , (True, text "\trm -f *.o") , (True, text "") , (ifdr, text "veryclean: clean") , (ifdr, text "\trm -f" <+> nmd) , (ifdr, text "") ] nm = text fn nmd = text dn nmh = nm <> text ".h" nmc = nm <> text ".c" nmo = nm <> text ".o" nmdc = nmd <> text ".c" nmdo = nmd <> text ".o" -- | Generate the header genHeader :: (Maybe Int, Maybe CgSRealType) -> String -> [Doc] -> Doc -> Doc genHeader (ik, rk) fn sigs protos = text "/* Header file for" <+> nm <> text ". Automatically generated by SBV. Do not edit! */" $$ text "" $$ text "#ifndef" <+> tag $$ text "#define" <+> tag $$ text "" $$ text "#include <inttypes.h>" $$ text "#include <stdint.h>" $$ text "#include <stdbool.h>" $$ text "#include <math.h>" $$ text "" $$ text "/* The boolean type */" $$ text "typedef bool SBool;" $$ text "" $$ text "/* The float type */" $$ text "typedef float SFloat;" $$ text "" $$ text "/* The double type */" $$ text "typedef double SDouble;" $$ text "" $$ text "/* Unsigned bit-vectors */" $$ text "typedef uint8_t SWord8 ;" $$ text "typedef uint16_t SWord16;" $$ text "typedef uint32_t SWord32;" $$ text "typedef uint64_t SWord64;" $$ text "" $$ text "/* Signed bit-vectors */" $$ text "typedef int8_t SInt8 ;" $$ text "typedef int16_t SInt16;" $$ text "typedef int32_t SInt32;" $$ text "typedef int64_t SInt64;" $$ text "" $$ imapping $$ rmapping $$ text ("/* Entry point prototype" ++ plu ++ ": */") $$ vcat (map (<> semi) sigs) $$ text "" $$ protos $$ text "#endif /*" <+> tag <+> text "*/" $$ text "" where nm = text fn tag = text "__" <> nm <> text "__HEADER_INCLUDED__" plu = if length sigs /= 1 then "s" else "" imapping = case ik of Nothing -> empty Just i -> text "/* User requested mapping for SInteger. */" $$ text "/* NB. Loss of precision: Target type is subject to modular arithmetic. */" $$ text ("typedef SInt" ++ show i ++ " SInteger;") $$ text "" rmapping = case rk of Nothing -> empty Just t -> text "/* User requested mapping for SReal. */" $$ text "/* NB. Loss of precision: Target type is subject to rounding. */" $$ text ("typedef " ++ show t ++ " SReal;") $$ text "" sepIf :: Bool -> Doc sepIf b = if b then text "" else empty -- | Generate an example driver program genDriver :: CgConfig -> [Integer] -> String -> [(String, CgVal)] -> [(String, CgVal)] -> Maybe SW -> [Doc] genDriver cfg randVals fn inps outs mbRet = [pre, header, body, post] where pre = text "/* Example driver program for" <+> nm <> text ". */" $$ text "/* Automatically generated by SBV. Edit as you see fit! */" $$ text "" $$ text "#include <inttypes.h>" $$ text "#include <stdint.h>" $$ text "#include <stdbool.h>" $$ text "#include <math.h>" $$ text "#include <stdio.h>" header = text "#include" <+> doubleQuotes (nm <> text ".h") $$ text "" $$ text "int main(void)" $$ text "{" body = text "" $$ nest 2 ( vcat (map mkInp pairedInputs) $$ vcat (map mkOut outs) $$ sepIf (not (null [() | (_, _, CgArray{}) <- pairedInputs]) || not (null outs)) $$ call $$ text "" $$ (case mbRet of Just sw -> text "printf" <> parens (printQuotes (fcall <+> text "=" <+> specifier cfg sw <> text "\\n") <> comma <+> resultVar) <> semi Nothing -> text "printf" <> parens (printQuotes (fcall <+> text "->\\n")) <> semi) $$ vcat (map display outs) ) post = text "" $+$ nest 2 (text "return 0" <> semi) $$ text "}" $$ text "" nm = text fn pairedInputs = matchRands (map abs randVals) inps matchRands _ [] = [] matchRands [] _ = die "Run out of driver values!" matchRands (r:rs) ((n, CgAtomic sw) : cs) = ([mkRVal sw r], n, CgAtomic sw) : matchRands rs cs matchRands _ ((n, CgArray []) : _ ) = die $ "Unsupported empty array input " ++ show n matchRands rs ((n, a@(CgArray sws@(sw:_))) : cs) | length frs /= l = die "Run out of driver values!" | True = (map (mkRVal sw) frs, n, a) : matchRands srs cs where l = length sws (frs, srs) = splitAt l rs mkRVal sw r = mkConst cfg $ mkConstCW (kindOf sw) r mkInp (_, _, CgAtomic{}) = empty -- constant, no need to declare mkInp (_, n, CgArray []) = die $ "Unsupported empty array value for " ++ show n mkInp (vs, n, CgArray sws@(sw:_)) = pprCWord True sw <+> text n <> brackets (int (length sws)) <+> text "= {" $$ nest 4 (fsep (punctuate comma (align vs))) $$ text "};" $$ text "" $$ text "printf" <> parens (printQuotes (text "Contents of input array" <+> text n <> text ":\\n")) <> semi $$ display (n, CgArray sws) $$ text "" mkOut (v, CgAtomic sw) = pprCWord False sw <+> text v <> semi mkOut (v, CgArray []) = die $ "Unsupported empty array value for " ++ show v mkOut (v, CgArray sws@(sw:_)) = pprCWord False sw <+> text v <> brackets (int (length sws)) <> semi resultVar = text "__result" call = case mbRet of Nothing -> fcall <> semi Just sw -> pprCWord True sw <+> resultVar <+> text "=" <+> fcall <> semi fcall = nm <> parens (fsep (punctuate comma (map mkCVal pairedInputs ++ map mkOVal outs))) mkCVal ([v], _, CgAtomic{}) = v mkCVal (vs, n, CgAtomic{}) = die $ "Unexpected driver value computed for " ++ show n ++ render (hcat vs) mkCVal (_, n, CgArray{}) = text n mkOVal (n, CgAtomic{}) = text "&" <> text n mkOVal (n, CgArray{}) = text n display (n, CgAtomic sw) = text "printf" <> parens (printQuotes (text " " <+> text n <+> text "=" <+> specifier cfg sw <> text "\\n") <> comma <+> text n) <> semi display (n, CgArray []) = die $ "Unsupported empty array value for " ++ show n display (n, CgArray sws@(sw:_)) = text "int" <+> nctr <> semi $$ text "for(" <> nctr <+> text "= 0;" <+> nctr <+> text "<" <+> int (length sws) <+> text "; ++" <> nctr <> text ")" $$ nest 2 (text "printf" <> parens (printQuotes (text " " <+> entrySpec <+> text "=" <+> spec <> text "\\n") <> comma <+> nctr <+> comma <> entry) <> semi) where nctr = text n <> text "_ctr" entry = text n <> text "[" <> nctr <> text "]" entrySpec = text n <> text "[%d]" spec = specifier cfg sw -- | Generate the C program genCProg :: CgConfig -> String -> Doc -> Result -> [(String, CgVal)] -> [(String, CgVal)] -> Maybe SW -> Doc -> [Doc] genCProg cfg fn proto (Result kindInfo _tvals cgs ins preConsts tbls arrs _ _ (SBVPgm asgns) cstrs _) inVars outVars mbRet extDecls | isNothing (cgInteger cfg) && KUnbounded `Set.member` kindInfo = error $ "SBV->C: Unbounded integers are not supported by the C compiler." ++ "\nUse 'cgIntegerSize' to specify a fixed size for SInteger representation." | isNothing (cgReal cfg) && KReal `Set.member` kindInfo = error $ "SBV->C: SReal values are not supported by the C compiler." ++ "\nUse 'cgSRealType' to specify a custom type for SReal representation." | not (null usorts) = error $ "SBV->C: Cannot compile functions with uninterpreted sorts: " ++ intercalate ", " usorts | not (null cstrs) = tbd "Explicit constraints" | not (null arrs) = tbd "User specified arrays" | needsExistentials (map fst ins) = error "SBV->C: Cannot compile functions with existentially quantified variables." | True = [pre, header, post] where usorts = [s | KUninterpreted s <- Set.toList kindInfo] pre = text "/* File:" <+> doubleQuotes (nm <> text ".c") <> text ". Automatically generated by SBV. Do not edit! */" $$ text "" $$ text "#include <inttypes.h>" $$ text "#include <stdint.h>" $$ text "#include <stdbool.h>" $$ text "#include <math.h>" header = text "#include" <+> doubleQuotes (nm <> text ".h") post = text "" $$ vcat (map codeSeg cgs) $$ extDecls $$ proto $$ text "{" $$ text "" $$ nest 2 ( vcat (concatMap (genIO True) inVars) $$ vcat (merge (map genTbl tbls) (map genAsgn assignments)) $$ sepIf (not (null assignments) || not (null tbls)) $$ vcat (concatMap (genIO False) outVars) $$ maybe empty mkRet mbRet ) $$ text "}" $$ text "" nm = text fn assignments = F.toList asgns codeSeg (fnm, ls) = text "/* User specified custom code for" <+> doubleQuotes (text fnm) <+> text "*/" $$ vcat (map text ls) $$ text "" typeWidth = getMax 0 [len (kindOf s) | (s, _) <- assignments] where len (KReal{}) = 5 len (KFloat{}) = 6 -- SFloat len (KDouble{}) = 7 -- SDouble len (KUnbounded{}) = 8 len (KBounded False 1) = 5 -- SBool len (KBounded False n) = 5 + length (show n) -- SWordN len (KBounded True n) = 4 + length (show n) -- SIntN len (KUninterpreted s) = die $ "Uninterpreted sort: " ++ s getMax 8 _ = 8 -- 8 is the max we can get with SInteger, so don't bother looking any further getMax m [] = m getMax m (x:xs) = getMax (m `max` x) xs consts = (falseSW, falseCW) : (trueSW, trueCW) : preConsts isConst s = isJust (lookup s consts) genIO :: Bool -> (String, CgVal) -> [Doc] genIO True (cNm, CgAtomic sw) = [declSW typeWidth sw <+> text "=" <+> text cNm <> semi] genIO False (cNm, CgAtomic sw) = [text "*" <> text cNm <+> text "=" <+> showSW cfg consts sw <> semi] genIO isInp (cNm, CgArray sws) = zipWith genElt sws [(0::Int)..] where genElt sw i | isInp = declSW typeWidth sw <+> text "=" <+> text entry <> semi | True = text entry <+> text "=" <+> showSW cfg consts sw <> semi where entry = cNm ++ "[" ++ show i ++ "]" mkRet sw = text "return" <+> showSW cfg consts sw <> semi genTbl :: ((Int, Kind, Kind), [SW]) -> (Int, Doc) genTbl ((i, _, k), elts) = (location, static <+> text "const" <+> text (show k) <+> text ("table" ++ show i) <> text "[] = {" $$ nest 4 (fsep (punctuate comma (align (map (showSW cfg consts) elts)))) $$ text "};") where static = if location == -1 then text "static" else empty location = maximum (-1 : map getNodeId elts) getNodeId s@(SW _ (NodeId n)) | isConst s = -1 | True = n genAsgn :: (SW, SBVExpr) -> (Int, Doc) genAsgn (sw, n) = (getNodeId sw, declSW typeWidth sw <+> text "=" <+> ppExpr cfg consts n <> semi) -- merge tables intermixed with assignments, paying attention to putting tables as -- early as possible.. Note that the assignment list (second argument) is sorted on its order merge :: [(Int, Doc)] -> [(Int, Doc)] -> [Doc] merge [] as = map snd as merge ts [] = map snd ts merge ts@((i, t):trest) as@((i', a):arest) | i < i' = t : merge trest as | True = a : merge ts arest ppExpr :: CgConfig -> [(SW, CW)] -> SBVExpr -> Doc ppExpr cfg consts (SBVApp op opArgs) = p op (map (showSW cfg consts) opArgs) where rtc = cgRTC cfg cBinOps = [ (Plus, "+"), (Times, "*"), (Minus, "-") , (Equal, "=="), (NotEqual, "!="), (LessThan, "<"), (GreaterThan, ">"), (LessEq, "<="), (GreaterEq, ">=") , (And, "&"), (Or, "|"), (XOr, "^") ] p (ArrRead _) _ = tbd "User specified arrays (ArrRead)" p (ArrEq _ _) _ = tbd "User specified arrays (ArrEq)" p (Uninterpreted s) [] = text "/* Uninterpreted constant */" <+> text s p (Uninterpreted s) as = text "/* Uninterpreted function */" <+> text s <> parens (fsep (punctuate comma as)) p (Extract i j) [a] = extract i j (head opArgs) a p Join [a, b] = join (let (s1 : s2 : _) = opArgs in (s1, s2, a, b)) p (Rol i) [a] = rotate True i a (head opArgs) p (Ror i) [a] = rotate False i a (head opArgs) p (Shl i) [a] = shift True i a (head opArgs) p (Shr i) [a] = shift False i a (head opArgs) p Not [a] = case kindOf (head opArgs) of -- be careful about booleans, bitwise complement is not correct for them! KBounded False 1 -> text "!" <> a _ -> text "~" <> a p Ite [a, b, c] = a <+> text "?" <+> b <+> text ":" <+> c p (LkUp (t, k, _, len) ind def) [] | not rtc = lkUp -- ignore run-time-checks per user request | needsCheckL && needsCheckR = cndLkUp checkBoth | needsCheckL = cndLkUp checkLeft | needsCheckR = cndLkUp checkRight | True = lkUp where [index, defVal] = map (showSW cfg consts) [ind, def] lkUp = text "table" <> int t <> brackets (showSW cfg consts ind) cndLkUp cnd = cnd <+> text "?" <+> defVal <+> text ":" <+> lkUp checkLeft = index <+> text "< 0" checkRight = index <+> text ">=" <+> int len checkBoth = parens (checkLeft <+> text "||" <+> checkRight) canOverflow True sz = (2::Integer)^(sz-1)-1 >= fromIntegral len canOverflow False sz = (2::Integer)^sz -1 >= fromIntegral len (needsCheckL, needsCheckR) = case k of KBounded sg sz -> (sg, canOverflow sg sz) KReal -> die "array index with real value" KFloat -> die "array index with float value" KDouble -> die "array index with double value" KUnbounded -> case cgInteger cfg of Nothing -> (True, True) -- won't matter, it'll be rejected later Just i -> (True, canOverflow True i) KUninterpreted s -> die $ "Uninterpreted sort: " ++ s -- Div/Rem should be careful on 0, in the SBV world x `div` 0 is 0, x `rem` 0 is x -- NB: Quot is supposed to truncate toward 0; Not clear to me if C guarantees this behavior. -- Brief googling suggests C99 does indeed truncate toward 0, but other C compilers might differ. p Quot [a, b] = parens (b <+> text "== 0") <+> text "?" <+> text "0" <+> text ":" <+> parens (a <+> text "/" <+> b) p Rem [a, b] = parens (b <+> text "== 0") <+> text "?" <+> a <+> text ":" <+> parens (a <+> text "%" <+> b) p o [a, b] | Just co <- lookup o cBinOps = a <+> text co <+> b p o args = die $ "Received operator " ++ show o ++ " applied to " ++ show args shift toLeft i a s | i < 0 = shift (not toLeft) (-i) a s | i == 0 = a | True = case kindOf s of KBounded _ sz | i >= sz -> mkConst cfg $ mkConstCW (kindOf s) (0::Integer) KReal -> tbd $ "Shift for real quantity: " ++ show (toLeft, i, s) _ -> a <+> text cop <+> int i where cop | toLeft = "<<" | True = ">>" rotate toLeft i a s | i < 0 = rotate (not toLeft) (-i) a s | i == 0 = a | True = case kindOf s of KBounded True _ -> tbd $ "Rotation of signed quantities: " ++ show (toLeft, i, s) KBounded False sz | i >= sz -> rotate toLeft (i `mod` sz) a s KBounded False sz -> parens (a <+> text cop <+> int i) <+> text "|" <+> parens (a <+> text cop' <+> int (sz - i)) KUnbounded -> shift toLeft i a s -- For SInteger, rotate is the same as shift in Haskell _ -> tbd $ "Rotation for unbounded quantity: " ++ show (toLeft, i, s) where (cop, cop') | toLeft = ("<<", ">>") | True = (">>", "<<") -- TBD: below we only support the values that SBV actually currently generates. -- we would need to add new ones if we generate others. (Check instances in Data/SBV/BitVectors/Splittable.hs). extract hi lo i a = case (hi, lo, kindOf i) of ( 0, 0, KUnbounded) -> text "(SReal)" <+> a -- special SInteger -> SReal conversion (63, 32, KBounded False 64) -> text "(SWord32)" <+> parens (a <+> text ">> 32") (31, 0, KBounded False 64) -> text "(SWord32)" <+> a (31, 16, KBounded False 32) -> text "(SWord16)" <+> parens (a <+> text ">> 16") (15, 0, KBounded False 32) -> text "(SWord16)" <+> a (15, 8, KBounded False 16) -> text "(SWord8)" <+> parens (a <+> text ">> 8") ( 7, 0, KBounded False 16) -> text "(SWord8)" <+> a -- the followings are used by sign-conversions. (Check instances in Data/SBV/BitVectors/SignCast.hs). (63, 0, KBounded False 64) -> text "(SInt64)" <+> a (63, 0, KBounded True 64) -> text "(SWord64)" <+> a (31, 0, KBounded False 32) -> text "(SInt32)" <+> a (31, 0, KBounded True 32) -> text "(SWord32)" <+> a (15, 0, KBounded False 16) -> text "(SInt16)" <+> a (15, 0, KBounded True 16) -> text "(SWord16)" <+> a ( 7, 0, KBounded False 8) -> text "(SInt8)" <+> a ( 7, 0, KBounded True 8) -> text "(SWord8)" <+> a ( _, _, k ) -> tbd $ "extract with " ++ show (hi, lo, k, i) -- TBD: ditto here for join, just like extract above join (i, j, a, b) = case (kindOf i, kindOf j) of (KBounded False 8, KBounded False 8) -> parens (parens (text "(SWord16)" <+> a) <+> text "<< 8") <+> text "|" <+> parens (text "(SWord16)" <+> b) (KBounded False 16, KBounded False 16) -> parens (parens (text "(SWord32)" <+> a) <+> text "<< 16") <+> text "|" <+> parens (text "(SWord32)" <+> b) (KBounded False 32, KBounded False 32) -> parens (parens (text "(SWord64)" <+> a) <+> text "<< 32") <+> text "|" <+> parens (text "(SWord64)" <+> b) (k1, k2) -> tbd $ "join with " ++ show ((k1, i), (k2, j)) -- same as doubleQuotes, except we have to make sure there are no line breaks.. -- Otherwise breaks the generated code.. sigh printQuotes :: Doc -> Doc printQuotes d = text $ '"' : ppSameLine d ++ "\"" -- Remove newlines.. Useful when generating Makefile and such ppSameLine :: Doc -> String ppSameLine = trim . render where trim "" = "" trim ('\n':cs) = ' ' : trim (dropWhile isSpace cs) trim (c:cs) = c : trim cs -- Align a bunch of docs to occupy the exact same length by padding in the left by space -- this is ugly and inefficient, but easy to code.. align :: [Doc] -> [Doc] align ds = map (text . pad) ss where ss = map render ds l = maximum (0 : map length ss) pad s = replicate (l - length s) ' ' ++ s -- | Merge a bunch of bundles to generate code for a library mergeToLib :: String -> [CgPgmBundle] -> CgPgmBundle mergeToLib libName bundles | length nubKinds /= 1 = error $ "Cannot merge programs with differing SInteger/SReal mappings. Received the following kinds:\n" ++ unlines (map show nubKinds) | True = CgPgmBundle bundleKind $ sources ++ libHeader : [libDriver | anyDriver] ++ [libMake | anyMake] where kinds = [k | CgPgmBundle k _ <- bundles] nubKinds = nub kinds bundleKind = head nubKinds files = concat [fs | CgPgmBundle _ fs <- bundles] sigs = concat [ss | (_, (CgHeader ss, _)) <- files] anyMake = not (null [() | (_, (CgMakefile{}, _)) <- files]) drivers = [ds | (_, (CgDriver, ds)) <- files] anyDriver = not (null drivers) mkFlags = nub (concat [xs | (_, (CgMakefile xs, _)) <- files]) sources = [(f, (CgSource, [pre, libHInclude, post])) | (f, (CgSource, [pre, _, post])) <- files] sourceNms = map fst sources libHeader = (libName ++ ".h", (CgHeader sigs, [genHeader bundleKind libName sigs empty])) libHInclude = text "#include" <+> text (show (libName ++ ".h")) libMake = ("Makefile", (CgMakefile mkFlags, [genLibMake anyDriver libName sourceNms mkFlags])) libDriver = (libName ++ "_driver.c", (CgDriver, mergeDrivers libName libHInclude (zip (map takeBaseName sourceNms) drivers))) -- | Create a Makefile for the library genLibMake :: Bool -> String -> [String] -> [String] -> Doc genLibMake ifdr libName fs ldFlags = foldr1 ($$) [l | (True, l) <- lns] where ifld = not (null ldFlags) ld | ifld = text "${LDFLAGS}" | True = empty lns = [ (True, text "# Makefile for" <+> nm <> text ". Automatically generated by SBV. Do not edit!") , (True, text "") , (True, text "# include any user-defined .mk file in the current directory.") , (True, text "-include *.mk") , (True, text "") , (True, text "CC=gcc") , (True, text "CCFLAGS?=-Wall -O3 -DNDEBUG -fomit-frame-pointer") , (ifld, text "LDFLAGS?=" <> text (unwords ldFlags)) , (True, text "AR=ar") , (True, text "ARFLAGS=cr") , (True, text "") , (not ifdr, text ("all: " ++ liba)) , (ifdr, text ("all: " ++ unwords [liba, libd])) , (True, text "") , (True, text liba <> text (": " ++ unwords os)) , (True, text "\t${AR} ${ARFLAGS} $@ $^") , (True, text "") , (ifdr, text libd <> text (": " ++ unwords [libd ++ ".c", libh])) , (ifdr, text ("\t${CC} ${CCFLAGS} $< -o $@ " ++ liba) <+> ld) , (ifdr, text "") , (True, vcat (zipWith mkObj os fs)) , (True, text "clean:") , (True, text "\trm -f *.o") , (True, text "") , (True, text "veryclean: clean") , (not ifdr, text "\trm -f" <+> text liba) , (ifdr, text "\trm -f" <+> text (unwords [liba, libd])) , (True, text "") ] nm = text libName liba = libName ++ ".a" libh = libName ++ ".h" libd = libName ++ "_driver" os = map (`replaceExtension` ".o") fs mkObj o f = text o <> text (": " ++ unwords [f, libh]) $$ text "\t${CC} ${CCFLAGS} -c $< -o $@" $$ text "" -- | Create a driver for a library mergeDrivers :: String -> Doc -> [(FilePath, [Doc])] -> [Doc] mergeDrivers libName inc ds = pre : concatMap mkDFun ds ++ [callDrivers (map fst ds)] where pre = text "/* Example driver program for" <+> text libName <> text ". */" $$ text "/* Automatically generated by SBV. Edit as you see fit! */" $$ text "" $$ text "#include <inttypes.h>" $$ text "#include <stdint.h>" $$ text "#include <stdbool.h>" $$ text "#include <math.h>" $$ text "#include <stdio.h>" $$ inc mkDFun (f, [_pre, _header, body, _post]) = [header, body, post] where header = text "" $$ text ("void " ++ f ++ "_driver(void)") $$ text "{" post = text "}" mkDFun (f, _) = die $ "mergeDrivers: non-conforming driver program for " ++ show f callDrivers fs = text "" $$ text "int main(void)" $$ text "{" $+$ nest 2 (vcat (map call fs)) $$ nest 2 (text "return 0;") $$ text "}" call f = text psep $$ text ptag $$ text psep $$ text (f ++ "_driver();") $$ text "" where tag = "** Driver run for " ++ f ++ ":" ptag = "printf(\"" ++ tag ++ "\\n\");" lsep = replicate (length tag) '=' psep = "printf(\"" ++ lsep ++ "\\n\");"
dylanmc/cryptol
sbv/Data/SBV/Compilers/C.hs
Haskell
bsd-3-clause
39,515
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE TupleSections #-} module Algw.Infer where import Algw.Ast import Algw.Type import Algw.Env import State import Data.IORef import Data.Maybe import Control.Monad import qualified Data.Map as M import qualified Data.Set as S makeNewVar :: Infer (Infer TName) makeNewVar = do r <- newIORef 'a' -- return a closure like structure to mock a generator return $ do v <- readIORef r modifyIORef r succ return [v] generalize :: Env -> T -> Scheme generalize env t = let fvs = freeVars t `S.difference` freeVars env -- cause poly type here can only hold single quantified type var in S.fold Poly (Mono t) fvs replaceFreeVars :: Scheme -> Subrule -> T replaceFreeVars (Mono t) s = subst s t replaceFreeVars (Poly _ t) s = replaceFreeVars t s -- just replace quantified type variables by fresh ones to make it monomorphic instantiate :: Infer TName -> Scheme -> Infer T -- each poly type hold single quantified type variable is not really a good design, but just to be compatible with the origin paper -- τ ::= α | ι | τ → τ -- σ ::= τ | ∀α. σ instantiate newVar t = let boundVars = allVars t `S.difference` freeVars t -- update quantified type variable with fresh one update acc a = do fresh <- fmap TVar newVar return $ M.insert a fresh acc replace = foldM update M.empty boundVars -- applicative functor in pure (replaceFreeVars t) <*> replace occurs :: TName -> T -> Bool occurs a t = a `S.member` freeVars t makeSingleSubrule :: TName -> T -> Infer Subrule makeSingleSubrule a t | t == TVar a = return emptyRule | occurs a t = error "occurs check fails" | otherwise = return $ M.singleton a t -- find mgu(most general unifier) of two types unify :: T -> T -> Infer Subrule unify TInt TInt = return emptyRule unify TBool TBool = return emptyRule unify (TVar n) t = makeSingleSubrule n t unify t (TVar n) = makeSingleSubrule n t unify (TArrow tl1 tr1) (TArrow tl2 tr2) = do s1 <- unify tl1 tl2 s2 <- subst s1 tr1 `unify` subst s1 tr2 return $ s2 `compose` s1 unify t1 t2 = error $ "types do not unify: " ++ show t1 ++ " vs. " ++ show t2 -- just like assoc in clojure assocEnv :: TName -> Scheme -> Env -> Env assocEnv n v env = M.insert n v $ M.delete n env algw :: Infer TName -> Env -> Expr -> IO (Subrule, T) algw newVar env (EVar name) = (emptyRule,) <$> instantiate newVar t -- pure (emptyRule,) <*> instantiate newVar t is also fine where t = fromMaybe (error $ "unbound variable: " ++ name) $ M.lookup name env {- t <- fmap TVar newVar will work because instance Functor IO where fmap f action = do result <- action return (f result) -} algw newVar env (EAbs name expr) = do fresh <- fmap TVar newVar let env' = assocEnv name (Mono fresh) env (subrule, mono) <- algw newVar env' expr return (subrule, subst subrule fresh `TArrow` mono) algw newVar env (EApp e1 e2) = do (s1, m1) <- algw newVar env e1 (s2, m2) <- algw newVar (subst s1 env) e2 fresh <- fmap TVar newVar s3 <- unify (subst s2 m1) (TArrow m2 fresh) return (s3 `compose` s2 `compose` s1, subst s3 fresh) algw newVar env (ELet name value body) = do (s1, vmono) <- algw newVar env value let env' = subst s1 env g = generalize env' vmono env'' = assocEnv name g env' (s2, bmono) <- algw newVar env'' body return (s2 `compose` s1, bmono) -- environment is assumptions at the initial state infer :: Env -> Expr -> IO T infer env expr = do newVar <- makeNewVar (_, t) <- algw newVar env expr return t
zjhmale/HMF
src/Algw/Infer.hs
Haskell
bsd-3-clause
3,788
{-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} module Mismi.SQS.Core.Data ( QueueName (..) , Queue (..) , QueueUrl (..) , MessageId (..) ) where import Mismi.Kernel.Data (MismiRegion) import P -- Queue names are limited to 80 characters. Alphanumeric characters -- plus hyphens (-) and underscores (_) are allowed. Queue names must -- be unique within an AWS account. After you delete a queue, you can -- reuse the queue name. newtype QueueName = QueueName { renderQueueName :: Text } deriving (Eq, Show) data Queue = Queue { queueName :: QueueName , queueRegion :: MismiRegion } deriving (Eq, Show) newtype QueueUrl = QueueUrl { renderQueueUrl :: Text } deriving (Eq, Show) newtype MessageId = MessageId { renderMessageId :: Text } deriving (Eq, Show)
ambiata/mismi
mismi-sqs-core/src/Mismi/SQS/Core/Data.hs
Haskell
bsd-3-clause
867
{-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE DataKinds #-} -- these two needed for help output {-# LANGUAGE TypeOperators #-} module Types where import Data.Hashable import qualified Data.HashMap.Strict as H import Data.Csv hiding (lookup) import Data.Text (Text, pack) import GHC.Generics import Data.List (intercalate) import Options.Generic import Data.FixedList import qualified Data.ByteString.Lazy as B import Data.ByteString.Internal as I -- type FilePath = String -- , geneSource :: Maybe GenBankSource <?> "Genbank ID, Genbank file, or csv file" -- commandline stuff data Seperator = Tab | Comma deriving (Show, Eq, Generic) deriving instance Read Seperator instance ParseField Seperator deriving instance Read RowType instance ParseField RowType data Options = Options { rowFilter :: [RowType] <?> "What show -- Insert, etc.." , sep :: First Seperator <?> "Comma or Tab output" , fasta :: FilePath <?> "Input aligned fasta file" , align :: Bool <?> "Align the sequence first?" , syn :: Bool <?> "Display synonymous AAs?"} deriving (Generic, Show) instance ParseRecord Options -- Primary types type Index = Int newtype Id = Id String data AA = K | N | T | R | S | I | M | Q | H | P | L | E | D | A | G | V | Z | Y | C | W | F deriving (Show, Eq, Enum, Generic) aaShow Z = "!" aaShow x = show x newtype Codon = Codon String deriving (Eq, Show) instance Hashable Codon where hashWithSalt salt (Codon s) = hashWithSalt salt s type CodonTable = H.HashMap Codon AA data RowType = Is_Gap | Has_N | Stop_Codon | Synonymous | Non_Synonymous deriving (Show, Eq, Generic) data Degen = Insert Codon Index | WithN Codon Index | StopCodon AA Index Codon [Index] | Synonymous' AA Index Codon [Index] | NonSynonymous [AA] Index Codon [Index] -- Codon index or AA Index? Should make newtypes | NormalCodon deriving (Show, Eq) -- type for the csv row list type FieldList = FixedList6 --instance ToRecord Degen where -- toRecord = record . toList . fieldList --data GenBankSource = GBFile FilePath | CSVFile FilePath | GBID String -- deriving (Show, Generic, ParseRecord) -- unused newtype CodonIndex = CodonIndex Int -- make this part of codon? -- unused newtype AAIndex = AAIndex Int -- make this part of codon? -- *** Exception: Data.Csv.Encoding.namedRecordToRecord: header contains name "RowType" which is not present in the named record
VDBWRAIR/Haskell-MAAPs
src/Types.hs
Haskell
bsd-3-clause
2,660
module Stream ( tests -- :: Int -> Tests ) where import Control.Monad import Data.Bits import Data.ByteString (ByteString) import qualified Data.ByteString as S import Crypto.Encrypt.Stream import Crypto.Key import Crypto.Nonce import Test.QuickCheck import Util -------------------------------------------------------------------------------- -- Streaming encryption streamProp :: (SecretKey Stream -> Nonce Stream -> Bool) -> Property streamProp k = ioProperty $ liftM2 k randomKey randomNonce roundtrip :: ByteString -> Property roundtrip xs = streamProp $ \key nonce -> let enc = encrypt nonce xs key dec = decrypt nonce enc key in dec == xs streamXor :: ByteString -> Property streamXor xs = streamProp $ \key nonce -> let xorBS x1 x2 = S.pack $ S.zipWith xor x1 x2 enc = encrypt nonce xs key str = stream nonce (S.length xs) key in enc == (str `xorBS` xs) tests :: Int -> Tests tests ntests = [ ("xsalsa20 roundtrip", wrap roundtrip) , ("xsalsa20 stream/enc equiv", wrap streamXor) ] where wrap :: Testable prop => prop -> IO (Bool, Int) wrap = mkArgTest ntests
thoughtpolice/hs-nacl
tests/Stream.hs
Haskell
bsd-3-clause
1,246
module Network.Email.Parser ( -- | Encodings quotedPrintable , base64 , crlfText , -- | Mail headers , mail ) where import Prelude hiding (takeWhile) import Data.List hiding (takeWhile) import Data.Maybe import Data.Monoid import Control.Applicative import Control.Monad.Catch import qualified Data.Map as M import qualified Data.ByteString as B import qualified Data.ByteString.Lazy as BL import qualified Data.ByteString.Builder as BL import qualified Data.ByteString.Base64.Lazy as B64 import qualified Data.Text.Lazy as TL import qualified Data.Text.Lazy.Encoding as TL import qualified Data.Text.Encoding.Error as T import qualified Data.Text.Lazy.Builder as TB import qualified Data.Attoparsec.Text.Lazy as TL import Data.Attoparsec.ByteString.Char8 import qualified Data.Attoparsec.Text as T import qualified Network.Email.Header.Parser as E import qualified Network.Email.Header.Read as E import Debug.Trace import Network.Email.Types eof :: Parser () eof = () <$ string "\r\n" <|> endOfInput -- | Parses lines delimited by @delim@. The builder is concatenated to each string. -- When @delim@ returns Nothing, read the next line, otherwise return its last -- state. delimited :: Show a => Parser (BL.Builder, Maybe a) -> Parser (BL.Builder, a) delimited delim = do this <- BL.byteString <$> takeTill (== '\r') (del, r') <- delim case r' of Nothing -> do (next, r) <- delimited delim return (this <> del <> next, r) Just r -> return (this <> del, r) -- | Run a 'Text' parser for a UTF-8 encoded 'ByteString' inside a 'ByteString' 'Parser'. parseUtf8 :: Show a => TL.Parser a -> BL.ByteString -> Parser a parseUtf8 p s = do t <- case TL.decodeUtf8' s of Left (T.DecodeError e _) -> fail e Right r -> return r case TL.parse (p <* T.endOfInput) t of TL.Fail _ ctx e -> let e' = fromMaybe e (stripPrefix "Failed reading: " e) in foldl (<?>) (fail e') ctx TL.Done _ r -> return r -- | Parse e-mail headers in bulk. Does not support obsolete quoting style -- (which allows \r\n to be quoted). headers :: Parser BL.Builder headers = fst <$> delimited newline where newline = (,) <$> (BL.byteString <$> string "\r\n") <*> optional (string "\r\n") -- | Parses MIME boundary. Returns 'True' if the end of multipart message was -- met, 'False' otherwise. boundary :: B.ByteString -> Parser (BL.Builder, Maybe Bool) boundary delim = do r <- string "\r\n" (mempty, ) <$> Just <$> bnd <|> return (BL.byteString r, Nothing) where bnd = do _ <- string "--" _ <- string delim False <$ eof <|> True <$ string "--" <* eof -- | Parses a part of a multi-part e-mail, reading body and final delimiter with @body@. mailPart :: Parser (BL.Builder, a) -> Parser (Mail, a) mailPart body = do hdr' <- BL.toLazyByteString <$> headers mailHeaders <- parseUtf8 E.headers hdr' case E.boundary mailHeaders of Nothing -> do (dat, r) <- body let mail = SimpleMail { mailBody = BL.toLazyByteString dat , .. } return (mail, r) Just bnd -> do let parser = delimited $ boundary bnd read True = return [] read False = do (part, r) <- mailPart parser (part:) <$> read r (_, r') <- parser mailParts <- read r' (_, r) <- body let mail = MultipartMail { .. } return (mail, r) -- | Parses e-mail according to RFC 5322. mail :: Parser Mail mail = fst <$> mailPart ((, ()) <$> BL.lazyByteString <$> takeLazyByteString) -- | Parses quoted-printable encoded bytestring. quotedPrintable :: Parser BL.Builder quotedPrintable = mconcat <$> intersperse (BL.byteString "\r\n") <$> line `sepBy` string "\r\n" where line = mconcat <$> (padding >> body) `sepBy` (char '=' >> padding >> string "\r\n") padding = skipWhile (inClass " \t") body = BL.byteString <$> takeWhile1 (/= '=') <|> BL.word8 <$ char '=' <*> E.hexPair -- | Parses base64 encoded bytestring. base64 :: Parser BL.ByteString base64 = do str <- BL.toLazyByteString <$> mconcat <$> (BL.byteString <$> takeWhile (/= '\r')) `sepBy` string "\r\n" E.parseEither $ B64.decode str -- | Parses Unicode text with CRLF endings, converting them to LF. crlfText :: T.Parser TL.Text crlfText = TB.toLazyText <$> mconcat <$> intersperse (TB.singleton '\n') <$> line `sepBy` T.string "\r\n" where line = TB.fromText <$> T.takeWhile1 (/= '\r')
abbradar/email
src/Network/Email/Parser.hs
Haskell
bsd-3-clause
4,533
-- | -- Copyright : Anders Claesson 2013-2016 -- Maintainer : Anders Claesson <[email protected]> -- -- Components of permutations. -- module Sym.Perm.Component ( components , skewComponents , leftMaxima , leftMinima , rightMaxima , rightMinima ) where import Foreign import System.IO.Unsafe import Sym.Perm import qualified Sym.Perm.D8 as D8 -- Positions /i/ such that /max{ w[j] : j <= i } = i/. These positions -- mark the boundaries of components. comps :: Perm -> [Int] comps w = unsafePerformIO . unsafeWith w $ go [] 0 0 where n = size w go ks m i p | i >= n = return (reverse ks) | otherwise = do y <- fromIntegral `fmap` peek p let p' = advancePtr p 1 let i' = i+1 let m' = if y > m then y else m let ks' = if m' == i then i:ks else ks go ks' m' i' p' -- | The list of (plus) components. components :: Perm -> [Perm] components w = let ds = 0 : map (+1) (comps w) ks = zipWith (-) (tail ds) ds ws = slices ks w in zipWith (\d v -> imap (\_ x -> x - fromIntegral d) v) ds ws -- | The list of skew components, also called minus components. skewComponents :: Perm -> [Perm] skewComponents = map D8.complement . components . D8.complement records :: (a -> a -> Bool) -> [a] -> [a] records _ [] = [] records f (x:xs) = recs [x] xs where recs rs@(r:_) (y:ys) = recs ((if f r y then y else r):rs) ys recs rs _ = rs -- | For each position, left-to-right, records the largest value seen -- thus far. leftMaxima :: Perm -> [Int] leftMaxima w = map fromIntegral . reverse $ records (<) (toList w) -- | For each position, left-to-right, records the smallest value seen -- thus far. leftMinima :: Perm -> [Int] leftMinima w = map fromIntegral . reverse $ records (>) (toList w) -- | For each position, /right-to-left/, records the largest value seen -- thus far. rightMaxima :: Perm -> [Int] rightMaxima w = map fromIntegral $ records (<) (reverse (toList w)) -- | For each position, /right-to-left/, records the smallest value seen -- thus far. rightMinima :: Perm -> [Int] rightMinima w = map fromIntegral $ records (>) (reverse (toList w))
akc/sym
Sym/Perm/Component.hs
Haskell
bsd-3-clause
2,272
#define IncludedcatIndices #ifdef IncludedmakeIndicesNull #else #include "../Proofs/makeIndicesNull.hs" #endif #ifdef IncludedmergeIndices #else #include "../Proofs/mergeIndices.hs" #endif #ifdef IncludedconcatMakeIndices #else #include "../Proofs/concatMakeIndices.hs" #endif catIndices :: RString -> RString -> RString -> Integer -> Integer -> Proof {-@ catIndices :: input:RString -> x:RString -> target:{RString | 0 <= stringLen input - stringLen target + 1} -> lo:{INat | lo <= stringLen input - stringLen target } -> hi:{Integer | (stringLen input - stringLen target) <= hi} -> { makeIndices input target lo hi == makeIndices (input <+> x) target lo (stringLen input - stringLen target) } @-} catIndices input x target lo hi = makeIndices input target lo hi ==. append (makeIndices input target lo (stringLen input - stringLen target)) (makeIndices input target (stringLen input - stringLen target + 1) hi) ?mergeIndices input target lo (stringLen input - stringLen target) hi ==. append (makeIndices input target lo (stringLen input - stringLen target)) N ?makeIndicesNull input target (stringLen input - stringLen target + 1) hi ==. makeIndices input target lo (stringLen input - stringLen target) ?listLeftId (makeIndices input target lo (stringLen input - stringLen target)) ==. makeIndices (input <+> x) target lo (stringLen input - stringLen target) ?concatMakeIndices lo (stringLen input - stringLen target) target input x *** QED
nikivazou/verified_string_matching
src/Proofs/catIndices.hs
Haskell
bsd-3-clause
1,558
{- (c) The GRASP/AQUA Project, Glasgow University, 1992-2006 \section[RnEnv]{Environment manipulation for the renamer monad} -} {-# LANGUAGE CPP #-} module ETA.Rename.RnEnv ( newTopSrcBinder, lookupLocatedTopBndrRn, lookupTopBndrRn, lookupLocatedOccRn, lookupOccRn, lookupOccRn_maybe, lookupLocalOccRn_maybe, lookupInfoOccRn, lookupLocalOccThLvl_maybe, lookupTypeOccRn, lookupKindOccRn, lookupGlobalOccRn, lookupGlobalOccRn_maybe, reportUnboundName, HsSigCtxt(..), lookupLocalTcNames, lookupSigOccRn, lookupSigCtxtOccRn, lookupFixityRn, lookupTyFixityRn, lookupInstDeclBndr, lookupSubBndrOcc, lookupFamInstName, greRdrName, lookupSubBndrGREs, lookupConstructorFields, lookupSyntaxName, lookupSyntaxNames, lookupIfThenElse, lookupGreRn, lookupGreRn_maybe, lookupGreLocalRn_maybe, getLookupOccRn, addUsedRdrNames, newLocalBndrRn, newLocalBndrsRn, bindLocalNames, bindLocalNamesFV, MiniFixityEnv, addLocalFixities, bindLocatedLocalsFV, bindLocatedLocalsRn, extendTyVarEnvFVRn, checkDupRdrNames, checkShadowedRdrNames, checkDupNames, checkDupAndShadowedNames, checkTupSize, addFvRn, mapFvRn, mapMaybeFvRn, mapFvRnCPS, warnUnusedMatches, warnUnusedTopBinds, warnUnusedLocalBinds, dataTcOccs, kindSigErr, perhapsForallMsg, HsDocContext(..), docOfHsDocContext ) where import ETA.Iface.LoadIface ( loadInterfaceForName, loadSrcInterface_maybe ) import ETA.Iface.IfaceEnv import ETA.HsSyn.HsSyn import ETA.BasicTypes.RdrName import ETA.Main.HscTypes import ETA.TypeCheck.TcEnv ( tcLookupDataCon, tcLookupField, isBrackStage ) import ETA.TypeCheck.TcRnMonad import ETA.BasicTypes.Id ( isRecordSelector ) import ETA.BasicTypes.Name import ETA.BasicTypes.NameSet import ETA.BasicTypes.NameEnv import ETA.BasicTypes.Avail import ETA.BasicTypes.Module import ETA.BasicTypes.ConLike import ETA.BasicTypes.DataCon ( dataConFieldLabels, dataConTyCon ) import ETA.Types.TyCon ( isTupleTyCon, tyConArity ) import ETA.Prelude.PrelNames ( mkUnboundName, isUnboundName, rOOT_MAIN, forall_tv_RDR ) import ETA.Main.ErrUtils ( MsgDoc ) import ETA.BasicTypes.BasicTypes ( Fixity(..), FixityDirection(..), minPrecedence, defaultFixity ) import ETA.BasicTypes.SrcLoc import ETA.Utils.Outputable import qualified ETA.Utils.Outputable as Outputable import ETA.Utils.Util import ETA.Utils.Maybes import ETA.BasicTypes.BasicTypes ( TopLevelFlag(..) ) import ETA.Utils.ListSetOps ( removeDups ) import ETA.Main.DynFlags import ETA.Utils.FastString import Control.Monad import Data.List import qualified Data.Set as Set import ETA.Utils.ListSetOps ( minusList ) import ETA.Main.Constants ( mAX_TUPLE_SIZE ) {- ********************************************************* * * Source-code binders * * ********************************************************* Note [Signature lazy interface loading] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ GHC's lazy interface loading can be a bit confusing, so this Note is an empirical description of what happens in one interesting case. When compiling a signature module against an its implementation, we do NOT load interface files associated with its names until after the type checking phase. For example: module ASig where data T f :: T -> T Suppose we compile this with -sig-of "A is ASig": module B where data T = T f T = T module A(module B) where import B During type checking, we'll load A.hi because we need to know what the RdrEnv for the module is, but we DO NOT load the interface for B.hi! It's wholly unnecessary: our local definition 'data T' in ASig is all the information we need to finish type checking. This is contrast to type checking of ordinary Haskell files, in which we would not have the local definition "data T" and would need to consult B.hi immediately. (Also, this situation never occurs for hs-boot files, since you're not allowed to reexport from another module.) After type checking, we then check that the types we provided are consistent with the backing implementation (in checkHiBootOrHsigIface). At this point, B.hi is loaded, because we need something to compare against. I discovered this behavior when trying to figure out why type class instances for Data.Map weren't in the EPS when I was type checking a test very much like ASig (sigof02dm): the associated interface hadn't been loaded yet! (The larger issue is a moot point, since an instance declared in a signature can never be a duplicate.) This behavior might change in the future. Consider this alternate module B: module B where {-# DEPRECATED T, f "Don't use" #-} data T = T f T = T One might conceivably want to report deprecation warnings when compiling ASig with -sig-of B, in which case we need to look at B.hi to find the deprecation warnings during renaming. At the moment, you don't get any warning until you use the identifier further downstream. This would require adjusting addUsedRdrName so that during signature compilation, we do not report deprecation warnings for LocalDef. See also Note [Handling of deprecations] -} newTopSrcBinder :: Located RdrName -> RnM Name newTopSrcBinder (L loc rdr_name) | Just name <- isExact_maybe rdr_name = -- This is here to catch -- (a) Exact-name binders created by Template Haskell -- (b) The PrelBase defn of (say) [] and similar, for which -- the parser reads the special syntax and returns an Exact RdrName -- We are at a binding site for the name, so check first that it -- the current module is the correct one; otherwise GHC can get -- very confused indeed. This test rejects code like -- data T = (,) Int Int -- unless we are in GHC.Tup if isExternalName name then do { this_mod <- getModule ; unless (this_mod == nameModule name) (addErrAt loc (badOrigBinding rdr_name)) ; return name } else -- See Note [Binders in Template Haskell] in Convert.hs do { let occ = nameOccName name ; occ `seq` return () -- c.f. seq in newGlobalBinder ; this_mod <- getModule ; updNameCache $ \ ns -> let name' = mkExternalName (nameUnique name) this_mod occ loc ns' = ns { nsNames = extendNameCache (nsNames ns) this_mod occ name' } in (ns', name') } | Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name = do { this_mod <- getModule ; unless (rdr_mod == this_mod || rdr_mod == rOOT_MAIN) (addErrAt loc (badOrigBinding rdr_name)) -- When reading External Core we get Orig names as binders, -- but they should agree with the module gotten from the monad -- -- We can get built-in syntax showing up here too, sadly. If you type -- data T = (,,,) -- the constructor is parsed as a type, and then RdrHsSyn.tyConToDataCon -- uses setRdrNameSpace to make it into a data constructors. At that point -- the nice Exact name for the TyCon gets swizzled to an Orig name. -- Hence the badOrigBinding error message. -- -- Except for the ":Main.main = ..." definition inserted into -- the Main module; ugh! -- Because of this latter case, we call newGlobalBinder with a module from -- the RdrName, not from the environment. In principle, it'd be fine to -- have an arbitrary mixture of external core definitions in a single module, -- (apart from module-initialisation issues, perhaps). ; newGlobalBinder rdr_mod rdr_occ loc } | otherwise = do { unless (not (isQual rdr_name)) (addErrAt loc (badQualBndrErr rdr_name)) -- Binders should not be qualified; if they are, and with a different -- module name, we we get a confusing "M.T is not in scope" error later ; stage <- getStage ; env <- getGblEnv ; if isBrackStage stage then -- We are inside a TH bracket, so make an *Internal* name -- See Note [Top-level Names in Template Haskell decl quotes] in RnNames do { uniq <- newUnique ; return (mkInternalName uniq (rdrNameOcc rdr_name) loc) } else case tcg_impl_rdr_env env of Just gr -> -- We're compiling --sig-of, so resolve with respect to this -- module. -- See Note [Signature parameters in TcGblEnv and DynFlags] do { case lookupGlobalRdrEnv gr (rdrNameOcc rdr_name) of -- Be sure to override the loc so that we get accurate -- information later [GRE{ gre_name = n }] -> do -- NB: Just adding this line will not work: -- addUsedRdrName True gre rdr_name -- see Note [Signature lazy interface loading] for -- more details. return (setNameLoc n loc) _ -> do { -- NB: cannot use reportUnboundName rdr_name -- because it looks up in the wrong RdrEnv -- ToDo: more helpful error messages ; addErr (unknownNameErr (pprNonVarNameSpace (occNameSpace (rdrNameOcc rdr_name))) rdr_name) ; return (mkUnboundName rdr_name) } } Nothing -> -- Normal case do { this_mod <- getModule ; newGlobalBinder this_mod (rdrNameOcc rdr_name) loc } } {- ********************************************************* * * Source code occurrences * * ********************************************************* Looking up a name in the RnEnv. Note [Type and class operator definitions] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We want to reject all of these unless we have -XTypeOperators (Trac #3265) data a :*: b = ... class a :*: b where ... data (:*:) a b = .... class (:*:) a b where ... The latter two mean that we are not just looking for a *syntactically-infix* declaration, but one that uses an operator OccName. We use OccName.isSymOcc to detect that case, which isn't terribly efficient, but there seems to be no better way. -} lookupTopBndrRn :: RdrName -> RnM Name lookupTopBndrRn n = do nopt <- lookupTopBndrRn_maybe n case nopt of Just n' -> return n' Nothing -> do traceRn $ (text "lookupTopBndrRn fail" <+> ppr n) unboundName WL_LocalTop n lookupLocatedTopBndrRn :: Located RdrName -> RnM (Located Name) lookupLocatedTopBndrRn = wrapLocM lookupTopBndrRn lookupTopBndrRn_maybe :: RdrName -> RnM (Maybe Name) -- Look up a top-level source-code binder. We may be looking up an unqualified 'f', -- and there may be several imported 'f's too, which must not confuse us. -- For example, this is OK: -- import Foo( f ) -- infix 9 f -- The 'f' here does not need to be qualified -- f x = x -- Nor here, of course -- So we have to filter out the non-local ones. -- -- A separate function (importsFromLocalDecls) reports duplicate top level -- decls, so here it's safe just to choose an arbitrary one. -- -- There should never be a qualified name in a binding position in Haskell, -- but there can be if we have read in an external-Core file. -- The Haskell parser checks for the illegal qualified name in Haskell -- source files, so we don't need to do so here. lookupTopBndrRn_maybe rdr_name | Just name <- isExact_maybe rdr_name = do { name' <- lookupExactOcc name; return (Just name') } | Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name -- This deals with the case of derived bindings, where -- we don't bother to call newTopSrcBinder first -- We assume there is no "parent" name = do { loc <- getSrcSpanM ; n <- newGlobalBinder rdr_mod rdr_occ loc ; return (Just n)} | otherwise = do { -- Check for operators in type or class declarations -- See Note [Type and class operator definitions] let occ = rdrNameOcc rdr_name ; when (isTcOcc occ && isSymOcc occ) (do { op_ok <- xoptM Opt_TypeOperators ; unless op_ok (addErr (opDeclErr rdr_name)) }) ; mb_gre <- lookupGreLocalRn_maybe rdr_name ; case mb_gre of Nothing -> return Nothing Just gre -> return (Just $ gre_name gre) } ----------------------------------------------- -- | Lookup an @Exact@ @RdrName@. See Note [Looking up Exact RdrNames]. -- This adds an error if the name cannot be found. lookupExactOcc :: Name -> RnM Name lookupExactOcc name = do { result <- lookupExactOcc_either name ; case result of Left err -> do { addErr err ; return name } Right name' -> return name' } -- | Lookup an @Exact@ @RdrName@. See Note [Looking up Exact RdrNames]. -- This never adds an error, but it may return one. lookupExactOcc_either :: Name -> RnM (Either MsgDoc Name) -- See Note [Looking up Exact RdrNames] lookupExactOcc_either name | Just thing <- wiredInNameTyThing_maybe name , Just tycon <- case thing of ATyCon tc -> Just tc AConLike (RealDataCon dc) -> Just (dataConTyCon dc) _ -> Nothing , isTupleTyCon tycon = do { checkTupSize (tyConArity tycon) ; return (Right name) } | isExternalName name = return (Right name) | otherwise = do { env <- getGlobalRdrEnv ; let -- See Note [Splicing Exact names] main_occ = nameOccName name demoted_occs = case demoteOccName main_occ of Just occ -> [occ] Nothing -> [] gres = [ gre | occ <- main_occ : demoted_occs , gre <- lookupGlobalRdrEnv env occ , gre_name gre == name ] ; case gres of [] -> -- See Note [Splicing Exact names] do { lcl_env <- getLocalRdrEnv ; if name `inLocalRdrEnvScope` lcl_env then return (Right name) else #ifdef GHCI do { th_topnames_var <- fmap tcg_th_topnames getGblEnv ; th_topnames <- readTcRef th_topnames_var ; if name `elemNameSet` th_topnames then return (Right name) else return (Left exact_nm_err) } #else /* !GHCI */ return (Left exact_nm_err) #endif /* !GHCI */ } [gre] -> return (Right (gre_name gre)) _ -> return (Left dup_nm_err) -- We can get more than one GRE here, if there are multiple -- bindings for the same name. Sometimes they are caught later -- by findLocalDupsRdrEnv, like in this example (Trac #8932): -- $( [d| foo :: a->a; foo x = x |]) -- foo = True -- But when the names are totally identical, we panic (Trac #7241): -- $(newName "Foo" >>= \o -> return [DataD [] o [] [RecC o []] [''Show]]) -- So, let's emit an error here, even if it will lead to duplication in some cases. } where exact_nm_err = hang (ptext (sLit "The exact Name") <+> quotes (ppr name) <+> ptext (sLit "is not in scope")) 2 (vcat [ ptext (sLit "Probable cause: you used a unique Template Haskell name (NameU), ") , ptext (sLit "perhaps via newName, but did not bind it") , ptext (sLit "If that's it, then -ddump-splices might be useful") ]) dup_nm_err = hang (ptext (sLit "Duplicate exact Name") <+> quotes (ppr $ nameOccName name)) 2 (vcat [ ptext (sLit "Probable cause: you used a unique Template Haskell name (NameU), ") , ptext (sLit "perhaps via newName, but bound it multiple times") , ptext (sLit "If that's it, then -ddump-splices might be useful") ]) ----------------------------------------------- lookupInstDeclBndr :: Name -> SDoc -> RdrName -> RnM Name -- This is called on the method name on the left-hand side of an -- instance declaration binding. eg. instance Functor T where -- fmap = ... -- ^^^^ called on this -- Regardless of how many unqualified fmaps are in scope, we want -- the one that comes from the Functor class. -- -- Furthermore, note that we take no account of whether the -- name is only in scope qualified. I.e. even if method op is -- in scope as M.op, we still allow plain 'op' on the LHS of -- an instance decl -- -- The "what" parameter says "method" or "associated type", -- depending on what we are looking up lookupInstDeclBndr cls what rdr = do { when (isQual rdr) (addErr (badQualBndrErr rdr)) -- In an instance decl you aren't allowed -- to use a qualified name for the method -- (Although it'd make perfect sense.) ; lookupSubBndrOcc False -- False => we don't give deprecated -- warnings when a deprecated class -- method is defined. We only warn -- when it's used (ParentIs cls) doc rdr } where doc = what <+> ptext (sLit "of class") <+> quotes (ppr cls) ----------------------------------------------- lookupFamInstName :: Maybe Name -> Located RdrName -> RnM (Located Name) -- Used for TyData and TySynonym family instances only, -- See Note [Family instance binders] lookupFamInstName (Just cls) tc_rdr -- Associated type; c.f RnBinds.rnMethodBind = wrapLocM (lookupInstDeclBndr cls (ptext (sLit "associated type"))) tc_rdr lookupFamInstName Nothing tc_rdr -- Family instance; tc_rdr is an *occurrence* = lookupLocatedOccRn tc_rdr ----------------------------------------------- lookupConstructorFields :: Name -> RnM [Name] -- Look up the fields of a given constructor -- * For constructors from this module, use the record field env, -- which is itself gathered from the (as yet un-typechecked) -- data type decls -- -- * For constructors from imported modules, use the *type* environment -- since imported modles are already compiled, the info is conveniently -- right there lookupConstructorFields con_name = do { this_mod <- getModule ; if nameIsLocalOrFrom this_mod con_name then do { RecFields field_env _ <- getRecFieldEnv ; return (lookupNameEnv field_env con_name `orElse` []) } else do { con <- tcLookupDataCon con_name ; return (dataConFieldLabels con) } } ----------------------------------------------- -- Used for record construction and pattern matching -- When the -XDisambiguateRecordFields flag is on, take account of the -- constructor name to disambiguate which field to use; it's just the -- same as for instance decls -- -- NB: Consider this: -- module Foo where { data R = R { fld :: Int } } -- module Odd where { import Foo; fld x = x { fld = 3 } } -- Arguably this should work, because the reference to 'fld' is -- unambiguous because there is only one field id 'fld' in scope. -- But currently it's rejected. lookupSubBndrOcc :: Bool -> Parent -- NoParent => just look it up as usual -- ParentIs p => use p to disambiguate -> SDoc -> RdrName -> RnM Name lookupSubBndrOcc warnIfDeprec parent doc rdr_name | Just n <- isExact_maybe rdr_name -- This happens in derived code = lookupExactOcc n | Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name = lookupOrig rdr_mod rdr_occ | otherwise -- Find all the things the rdr-name maps to = do { -- and pick the one with the right parent namep env <- getGlobalRdrEnv ; case lookupSubBndrGREs env parent rdr_name of -- NB: lookupGlobalRdrEnv, not lookupGRE_RdrName! -- The latter does pickGREs, but we want to allow 'x' -- even if only 'M.x' is in scope [gre] -> do { addUsedRdrName warnIfDeprec gre (used_rdr_name gre) -- Add a usage; this is an *occurrence* site ; return (gre_name gre) } [] -> do { addErr (unknownSubordinateErr doc rdr_name) ; return (mkUnboundName rdr_name) } gres -> do { addNameClashErrRn rdr_name gres ; return (gre_name (head gres)) } } where -- Note [Usage for sub-bndrs] used_rdr_name gre | isQual rdr_name = rdr_name | otherwise = greRdrName gre greRdrName :: GlobalRdrElt -> RdrName greRdrName gre = case gre_prov gre of LocalDef -> unqual_rdr Imported is -> used_rdr_name_from_is is where occ = nameOccName (gre_name gre) unqual_rdr = mkRdrUnqual occ used_rdr_name_from_is imp_specs -- rdr_name is unqualified | not (all (is_qual . is_decl) imp_specs) = unqual_rdr -- An unqualified import is available | otherwise = -- Only qualified imports available, so make up -- a suitable qualifed name from the first imp_spec --ASSERT( not (null imp_specs) ) mkRdrQual (is_as (is_decl (head imp_specs))) occ lookupSubBndrGREs :: GlobalRdrEnv -> Parent -> RdrName -> [GlobalRdrElt] -- If Parent = NoParent, just do a normal lookup -- If Parent = Parent p then find all GREs that -- (a) have parent p -- (b) for Unqual, are in scope qualified or unqualified -- for Qual, are in scope with that qualification lookupSubBndrGREs env parent rdr_name = case parent of NoParent -> pickGREs rdr_name gres ParentIs p | isUnqual rdr_name -> filter (parent_is p) gres | otherwise -> filter (parent_is p) (pickGREs rdr_name gres) where gres = lookupGlobalRdrEnv env (rdrNameOcc rdr_name) parent_is p (GRE { gre_par = ParentIs p' }) = p == p' parent_is _ _ = False {- Note [Family instance binders] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider data family F a data instance F T = X1 | X2 The 'data instance' decl has an *occurrence* of F (and T), and *binds* X1 and X2. (This is unlike a normal data type declaration which would bind F too.) So we want an AvailTC F [X1,X2]. Now consider a similar pair: class C a where data G a instance C S where data G S = Y1 | Y2 The 'data G S' *binds* Y1 and Y2, and has an *occurrence* of G. But there is a small complication: in an instance decl, we don't use qualified names on the LHS; instead we use the class to disambiguate. Thus: module M where import Blib( G ) class C a where data G a instance C S where data G S = Y1 | Y2 Even though there are two G's in scope (M.G and Blib.G), the occurrence of 'G' in the 'instance C S' decl is unambiguous, because C has only one associated type called G. This is exactly what happens for methods, and it is only consistent to do the same thing for types. That's the role of the function lookupTcdName; the (Maybe Name) give the class of the encloseing instance decl, if any. Note [Looking up Exact RdrNames] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Exact RdrNames are generated by Template Haskell. See Note [Binders in Template Haskell] in Convert. For data types and classes have Exact system Names in the binding positions for constructors, TyCons etc. For example [d| data T = MkT Int |] when we splice in and Convert to HsSyn RdrName, we'll get data (Exact (system Name "T")) = (Exact (system Name "MkT")) ... These System names are generated by Convert.thRdrName But, constructors and the like need External Names, not System Names! So we do the following * In RnEnv.newGlobalBinder we spot Exact RdrNames that wrap a non-External Name, and make an External name for it. This is the name that goes in the GlobalRdrEnv * When looking up an occurrence of an Exact name, done in RnEnv.lookupExactOcc, we find the Name with the right unique in the GlobalRdrEnv, and use the one from the envt -- it will be an External Name in the case of the data type/constructor above. * Exact names are also use for purely local binders generated by TH, such as \x_33. x_33 Both binder and occurrence are Exact RdrNames. The occurrence gets looked up in the LocalRdrEnv by RnEnv.lookupOccRn, and misses, because lookupLocalRdrEnv always returns Nothing for an Exact Name. Now we fall through to lookupExactOcc, which will find the Name is not in the GlobalRdrEnv, so we just use the Exact supplied Name. Note [Splicing Exact names] ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Consider the splice $(do { x <- newName "x"; return (VarE x) }) This will generate a (HsExpr RdrName) term that mentions the Exact RdrName "x_56" (or whatever), but does not bind it. So when looking such Exact names we want to check that it's in scope, otherwise the type checker will get confused. To do this we need to keep track of all the Names in scope, and the LocalRdrEnv does just that; we consult it with RdrName.inLocalRdrEnvScope. There is another wrinkle. With TH and -XDataKinds, consider $( [d| data Nat = Zero data T = MkT (Proxy 'Zero) |] ) After splicing, but before renaming we get this: data Nat_77{tc} = Zero_78{d} data T_79{tc} = MkT_80{d} (Proxy 'Zero_78{tc}) |] ) The occurrence of 'Zero in the data type for T has the right unique, but it has a TcClsName name-space in its OccName. (This is set by the ctxt_ns argument of Convert.thRdrName.) When we check that is in scope in the GlobalRdrEnv, we need to look up the DataName namespace too. (An alternative would be to make the GlobalRdrEnv also have a Name -> GRE mapping.) Note [Usage for sub-bndrs] ~~~~~~~~~~~~~~~~~~~~~~~~~~ If you have this import qualified M( C( f ) ) instance M.C T where f x = x then is the qualified import M.f used? Obviously yes. But the RdrName used in the instance decl is unqualified. In effect, we fill in the qualification by looking for f's whose class is M.C But when adding to the UsedRdrNames we must make that qualification explicit (saying "used M.f"), otherwise we get "Redundant import of M.f". So we make up a suitable (fake) RdrName. But be careful import qualifed M import M( C(f) ) instance C T where f x = x Here we want to record a use of 'f', not of 'M.f', otherwise we'll miss the fact that the qualified import is redundant. -------------------------------------------------- -- Occurrences -------------------------------------------------- -} getLookupOccRn :: RnM (Name -> Maybe Name) getLookupOccRn = do local_env <- getLocalRdrEnv return (lookupLocalRdrOcc local_env . nameOccName) lookupLocatedOccRn :: Located RdrName -> RnM (Located Name) lookupLocatedOccRn = wrapLocM lookupOccRn lookupLocalOccRn_maybe :: RdrName -> RnM (Maybe Name) -- Just look in the local environment lookupLocalOccRn_maybe rdr_name = do { local_env <- getLocalRdrEnv ; return (lookupLocalRdrEnv local_env rdr_name) } lookupLocalOccThLvl_maybe :: Name -> RnM (Maybe (TopLevelFlag, ThLevel)) -- Just look in the local environment lookupLocalOccThLvl_maybe name = do { lcl_env <- getLclEnv ; return (lookupNameEnv (tcl_th_bndrs lcl_env) name) } -- lookupOccRn looks up an occurrence of a RdrName lookupOccRn :: RdrName -> RnM Name lookupOccRn rdr_name = do { mb_name <- lookupOccRn_maybe rdr_name ; case mb_name of Just name -> return name Nothing -> reportUnboundName rdr_name } lookupKindOccRn :: RdrName -> RnM Name -- Looking up a name occurring in a kind lookupKindOccRn rdr_name = do { mb_name <- lookupOccRn_maybe rdr_name ; case mb_name of Just name -> return name Nothing -> reportUnboundName rdr_name } -- lookupPromotedOccRn looks up an optionally promoted RdrName. lookupTypeOccRn :: RdrName -> RnM Name -- see Note [Demotion] lookupTypeOccRn rdr_name = do { mb_name <- lookupOccRn_maybe rdr_name ; case mb_name of { Just name -> return name ; Nothing -> lookup_demoted rdr_name } } lookup_demoted :: RdrName -> RnM Name lookup_demoted rdr_name | Just demoted_rdr <- demoteRdrName rdr_name -- Maybe it's the name of a *data* constructor = do { data_kinds <- xoptM Opt_DataKinds ; mb_demoted_name <- lookupOccRn_maybe demoted_rdr ; case mb_demoted_name of Nothing -> reportUnboundName rdr_name Just demoted_name | data_kinds -> do { whenWOptM Opt_WarnUntickedPromotedConstructors $ addWarn (untickedPromConstrWarn demoted_name) ; return demoted_name } | otherwise -> unboundNameX WL_Any rdr_name suggest_dk } | otherwise = reportUnboundName rdr_name where suggest_dk = ptext (sLit "A data constructor of that name is in scope; did you mean DataKinds?") untickedPromConstrWarn name = text "Unticked promoted constructor" <> colon <+> quotes (ppr name) <> dot $$ hsep [ text "Use" , quotes (char '\'' <> ppr name) , text "instead of" , quotes (ppr name) <> dot ] {- Note [Demotion] ~~~~~~~~~~~~~~~ When the user writes: data Nat = Zero | Succ Nat foo :: f Zero -> Int 'Zero' in the type signature of 'foo' is parsed as: HsTyVar ("Zero", TcClsName) When the renamer hits this occurrence of 'Zero' it's going to realise that it's not in scope. But because it is renaming a type, it knows that 'Zero' might be a promoted data constructor, so it will demote its namespace to DataName and do a second lookup. The final result (after the renamer) will be: HsTyVar ("Zero", DataName) -} -- Use this version to get tracing -- -- lookupOccRn_maybe, lookupOccRn_maybe' :: RdrName -> RnM (Maybe Name) -- lookupOccRn_maybe rdr_name -- = do { mb_res <- lookupOccRn_maybe' rdr_name -- ; gbl_rdr_env <- getGlobalRdrEnv -- ; local_rdr_env <- getLocalRdrEnv -- ; traceRn $ text "lookupOccRn_maybe" <+> -- vcat [ ppr rdr_name <+> ppr (getUnique (rdrNameOcc rdr_name)) -- , ppr mb_res -- , text "Lcl env" <+> ppr local_rdr_env -- , text "Gbl env" <+> ppr [ (getUnique (nameOccName (gre_name (head gres'))),gres') | gres <- occEnvElts gbl_rdr_env -- , let gres' = filter isLocalGRE gres, not (null gres') ] ] -- ; return mb_res } lookupOccRn_maybe :: RdrName -> RnM (Maybe Name) -- lookupOccRn looks up an occurrence of a RdrName lookupOccRn_maybe rdr_name = do { local_env <- getLocalRdrEnv ; case lookupLocalRdrEnv local_env rdr_name of { Just name -> return (Just name) ; Nothing -> do { mb_name <- lookupGlobalOccRn_maybe rdr_name ; case mb_name of { Just name -> return (Just name) ; Nothing -> do { ns <- lookupQualifiedNameGHCi rdr_name -- This test is not expensive, -- and only happens for failed lookups ; case ns of (n:_) -> return (Just n) -- Unlikely to be more than one...? [] -> return Nothing } } } } } lookupGlobalOccRn :: RdrName -> RnM Name -- lookupGlobalOccRn is like lookupOccRn, except that it looks in the global -- environment. Adds an error message if the RdrName is not in scope. lookupGlobalOccRn rdr_name = do { mb_name <- lookupGlobalOccRn_maybe rdr_name ; case mb_name of Just n -> return n Nothing -> do { traceRn (text "lookupGlobalOccRn" <+> ppr rdr_name) ; unboundName WL_Global rdr_name } } lookupInfoOccRn :: RdrName -> RnM [Name] -- lookupInfoOccRn is intended for use in GHCi's ":info" command -- It finds all the GREs that RdrName could mean, not complaining -- about ambiguity, but rather returning them all -- C.f. Trac #9881 lookupInfoOccRn rdr_name | Just n <- isExact_maybe rdr_name -- e.g. (->) = return [n] | Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name = do { n <- lookupOrig rdr_mod rdr_occ ; return [n] } | otherwise = do { rdr_env <- getGlobalRdrEnv ; let ns = map gre_name (lookupGRE_RdrName rdr_name rdr_env) ; qual_ns <- lookupQualifiedNameGHCi rdr_name ; return (ns ++ (qual_ns `minusList` ns)) } lookupGlobalOccRn_maybe :: RdrName -> RnM (Maybe Name) -- No filter function; does not report an error on failure lookupGlobalOccRn_maybe rdr_name | Just n <- isExact_maybe rdr_name -- This happens in derived code = do { n' <- lookupExactOcc n; return (Just n') } | Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name = do { n <- lookupOrig rdr_mod rdr_occ ; return (Just n) } | otherwise = do { mb_gre <- lookupGreRn_maybe rdr_name ; case mb_gre of Nothing -> return Nothing Just gre -> return (Just (gre_name gre)) } -------------------------------------------------- -- Lookup in the Global RdrEnv of the module -------------------------------------------------- lookupGreRn_maybe :: RdrName -> RnM (Maybe GlobalRdrElt) -- Just look up the RdrName in the GlobalRdrEnv lookupGreRn_maybe rdr_name = lookupGreRn_help rdr_name (lookupGRE_RdrName rdr_name) lookupGreRn :: RdrName -> RnM GlobalRdrElt -- If not found, add error message, and return a fake GRE lookupGreRn rdr_name = do { mb_gre <- lookupGreRn_maybe rdr_name ; case mb_gre of { Just gre -> return gre ; Nothing -> do { traceRn (text "lookupGreRn" <+> ppr rdr_name) ; name <- unboundName WL_Global rdr_name ; return (GRE { gre_name = name, gre_par = NoParent, gre_prov = LocalDef }) }}} lookupGreLocalRn_maybe :: RdrName -> RnM (Maybe GlobalRdrElt) -- Similar, but restricted to locally-defined things lookupGreLocalRn_maybe rdr_name = lookupGreRn_help rdr_name lookup_fn where lookup_fn env = filter isLocalGRE (lookupGRE_RdrName rdr_name env) lookupGreRn_help :: RdrName -- Only used in error message -> (GlobalRdrEnv -> [GlobalRdrElt]) -- Lookup function -> RnM (Maybe GlobalRdrElt) -- Checks for exactly one match; reports deprecations -- Returns Nothing, without error, if too few lookupGreRn_help rdr_name lookup = do { env <- getGlobalRdrEnv ; case lookup env of [] -> return Nothing [gre] -> do { addUsedRdrName True gre rdr_name ; return (Just gre) } gres -> do { addNameClashErrRn rdr_name gres ; return (Just (head gres)) } } {- ********************************************************* * * Deprecations * * ********************************************************* Note [Handling of deprecations] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ * We report deprecations at each *occurrence* of the deprecated thing (see Trac #5867) * We do not report deprectations for locally-definded names. For a start, we may be exporting a deprecated thing. Also we may use a deprecated thing in the defn of another deprecated things. We may even use a deprecated thing in the defn of a non-deprecated thing, when changing a module's interface. * addUsedRdrNames: we do not report deprecations for sub-binders: - the ".." completion for records - the ".." in an export item 'T(..)' - the things exported by a module export 'module M' -} addUsedRdrName :: Bool -> GlobalRdrElt -> RdrName -> RnM () -- Record usage of imported RdrNames addUsedRdrName warnIfDeprec gre rdr | isLocalGRE gre = return () -- No call to warnIfDeprecated -- See Note [Handling of deprecations] | otherwise = do { env <- getGblEnv ; when warnIfDeprec $ warnIfDeprecated gre ; updMutVar (tcg_used_rdrnames env) (\s -> Set.insert rdr s) } addUsedRdrNames :: [RdrName] -> RnM () -- Record used sub-binders -- We don't check for imported-ness here, because it's inconvenient -- and not stritly necessary. -- NB: no call to warnIfDeprecated; see Note [Handling of deprecations] addUsedRdrNames rdrs = do { env <- getGblEnv ; updMutVar (tcg_used_rdrnames env) (\s -> foldr Set.insert s rdrs) } warnIfDeprecated :: GlobalRdrElt -> RnM () warnIfDeprecated gre@(GRE { gre_name = name, gre_prov = Imported (imp_spec : _) }) = do { dflags <- getDynFlags ; when (wopt Opt_WarnWarningsDeprecations dflags) $ do { iface <- loadInterfaceForName doc name ; case lookupImpDeprec iface gre of Just txt -> addWarn (mk_msg txt) Nothing -> return () } } where mk_msg txt = sep [ sep [ ptext (sLit "In the use of") <+> pprNonVarNameSpace (occNameSpace (nameOccName name)) <+> quotes (ppr name) , parens imp_msg <> colon ] , ppr txt ] name_mod = {-ASSERT2( isExternalName name, ppr name )-} nameModule name imp_mod = importSpecModule imp_spec imp_msg = ptext (sLit "imported from") <+> ppr imp_mod <> extra extra | imp_mod == moduleName name_mod = Outputable.empty | otherwise = ptext (sLit ", but defined in") <+> ppr name_mod doc = ptext (sLit "The name") <+> quotes (ppr name) <+> ptext (sLit "is mentioned explicitly") warnIfDeprecated _ = return () -- No deprecations for things defined locally lookupImpDeprec :: ModIface -> GlobalRdrElt -> Maybe WarningTxt lookupImpDeprec iface gre = mi_warn_fn iface (gre_name gre) `mplus` -- Bleat if the thing, case gre_par gre of -- or its parent, is warn'd ParentIs p -> mi_warn_fn iface p NoParent -> Nothing {- Note [Used names with interface not loaded] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ It's (just) possible to find a used Name whose interface hasn't been loaded: a) It might be a WiredInName; in that case we may not load its interface (although we could). b) It might be GHC.Real.fromRational, or GHC.Num.fromInteger These are seen as "used" by the renamer (if -XRebindableSyntax) is on), but the typechecker may discard their uses if in fact the in-scope fromRational is GHC.Read.fromRational, (see tcPat.tcOverloadedLit), and the typechecker sees that the type is fixed, say, to GHC.Base.Float (see Inst.lookupSimpleInst). In that obscure case it won't force the interface in. In both cases we simply don't permit deprecations; this is, after all, wired-in stuff. ********************************************************* * * GHCi support * * ********************************************************* A qualified name on the command line can refer to any module at all: we try to load the interface if we don't already have it, just as if there was an "import qualified M" declaration for every module. If we fail we just return Nothing, rather than bleating about "attempting to use module ‘D’ (./D.hs) which is not loaded" which is what loadSrcInterface does. Note [Safe Haskell and GHCi] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ We DONT do this Safe Haskell as we need to check imports. We can and should instead check the qualified import but at the moment this requires some refactoring so leave as a TODO -} lookupQualifiedNameGHCi :: RdrName -> RnM [Name] lookupQualifiedNameGHCi rdr_name = -- We want to behave as we would for a source file import here, -- and respect hiddenness of modules/packages, hence loadSrcInterface. do { dflags <- getDynFlags ; is_ghci <- getIsGHCi ; go_for_it dflags is_ghci } where go_for_it dflags is_ghci | Just (mod,occ) <- isQual_maybe rdr_name , is_ghci , gopt Opt_ImplicitImportQualified dflags -- Enables this GHCi behaviour , not (safeDirectImpsReq dflags) -- See Note [Safe Haskell and GHCi] = do { res <- loadSrcInterface_maybe doc mod False Nothing ; case res of Succeeded ifaces -> return [ name | iface <- ifaces , avail <- mi_exports iface , name <- availNames avail , nameOccName name == occ ] _ -> -- Either we couldn't load the interface, or -- we could but we didn't find the name in it do { traceRn (text "lookupQualifiedNameGHCi" <+> ppr rdr_name) ; return [] } } | otherwise = return [] doc = ptext (sLit "Need to find") <+> ppr rdr_name {- Note [Looking up signature names] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ lookupSigOccRn is used for type signatures and pragmas Is this valid? module A import M( f ) f :: Int -> Int f x = x It's clear that the 'f' in the signature must refer to A.f The Haskell98 report does not stipulate this, but it will! So we must treat the 'f' in the signature in the same way as the binding occurrence of 'f', using lookupBndrRn However, consider this case: import M( f ) f :: Int -> Int g x = x We don't want to say 'f' is out of scope; instead, we want to return the imported 'f', so that later on the reanamer will correctly report "misplaced type sig". Note [Signatures for top level things] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ data HsSigCtxt = ... | TopSigCtxt NameSet Bool | .... * The NameSet says what is bound in this group of bindings. We can't use isLocalGRE from the GlobalRdrEnv, because of this: f x = x $( ...some TH splice... ) f :: Int -> Int When we encounter the signature for 'f', the binding for 'f' will be in the GlobalRdrEnv, and will be a LocalDef. Yet the signature is mis-placed * The Bool says whether the signature is ok for a class method or record selector. Consider infix 3 `f` -- Yes, ok f :: C a => a -> a -- No, not ok class C a where f :: a -> a -} data HsSigCtxt = TopSigCtxt NameSet Bool -- At top level, binding these names -- See Note [Signatures for top level things] -- Bool <=> ok to give sig for -- class method or record selctor | LocalBindCtxt NameSet -- In a local binding, binding these names | ClsDeclCtxt Name -- Class decl for this class | InstDeclCtxt Name -- Intsance decl for this class | HsBootCtxt -- Top level of a hs-boot file | RoleAnnotCtxt NameSet -- A role annotation, with the names of all types -- in the group lookupSigOccRn :: HsSigCtxt -> Sig RdrName -> Located RdrName -> RnM (Located Name) lookupSigOccRn ctxt sig = lookupSigCtxtOccRn ctxt (hsSigDoc sig) -- | Lookup a name in relation to the names in a 'HsSigCtxt' lookupSigCtxtOccRn :: HsSigCtxt -> SDoc -- ^ description of thing we're looking up, -- like "type family" -> Located RdrName -> RnM (Located Name) lookupSigCtxtOccRn ctxt what = wrapLocM $ \ rdr_name -> do { mb_name <- lookupBindGroupOcc ctxt what rdr_name ; case mb_name of Left err -> do { addErr err; return (mkUnboundName rdr_name) } Right name -> return name } lookupBindGroupOcc :: HsSigCtxt -> SDoc -> RdrName -> RnM (Either MsgDoc Name) -- Looks up the RdrName, expecting it to resolve to one of the -- bound names passed in. If not, return an appropriate error message -- -- See Note [Looking up signature names] lookupBindGroupOcc ctxt what rdr_name | Just n <- isExact_maybe rdr_name = lookupExactOcc_either n -- allow for the possibility of missing Exacts; -- see Note [dataTcOccs and Exact Names] -- Maybe we should check the side conditions -- but it's a pain, and Exact things only show -- up when you know what you are doing | Just (rdr_mod, rdr_occ) <- isOrig_maybe rdr_name = do { n' <- lookupOrig rdr_mod rdr_occ ; return (Right n') } | otherwise = case ctxt of HsBootCtxt -> lookup_top (const True) True TopSigCtxt ns meth_ok -> lookup_top (`elemNameSet` ns) meth_ok RoleAnnotCtxt ns -> lookup_top (`elemNameSet` ns) False LocalBindCtxt ns -> lookup_group ns ClsDeclCtxt cls -> lookup_cls_op cls InstDeclCtxt cls -> lookup_cls_op cls where lookup_cls_op cls = do { env <- getGlobalRdrEnv ; let gres = lookupSubBndrGREs env (ParentIs cls) rdr_name ; case gres of [] -> return (Left (unknownSubordinateErr doc rdr_name)) (gre:_) -> return (Right (gre_name gre)) } -- If there is more than one local GRE for the -- same OccName 'f', that will be reported separately -- as a duplicate top-level binding for 'f' where doc = ptext (sLit "method of class") <+> quotes (ppr cls) lookup_top keep_me meth_ok = do { env <- getGlobalRdrEnv ; let all_gres = lookupGlobalRdrEnv env (rdrNameOcc rdr_name) ; case filter (keep_me . gre_name) all_gres of [] | null all_gres -> bale_out_with Outputable.empty | otherwise -> bale_out_with local_msg (gre:_) | ParentIs {} <- gre_par gre , not meth_ok -> bale_out_with sub_msg | otherwise -> return (Right (gre_name gre)) } lookup_group bound_names -- Look in the local envt (not top level) = do { local_env <- getLocalRdrEnv ; case lookupLocalRdrEnv local_env rdr_name of Just n | n `elemNameSet` bound_names -> return (Right n) | otherwise -> bale_out_with local_msg Nothing -> bale_out_with Outputable.empty } bale_out_with msg = return (Left (sep [ ptext (sLit "The") <+> what <+> ptext (sLit "for") <+> quotes (ppr rdr_name) , nest 2 $ ptext (sLit "lacks an accompanying binding")] $$ nest 2 msg)) local_msg = parens $ ptext (sLit "The") <+> what <+> ptext (sLit "must be given where") <+> quotes (ppr rdr_name) <+> ptext (sLit "is declared") sub_msg = parens $ ptext (sLit "You cannot give a") <+> what <+> ptext (sLit "for a record selector or class method") --------------- lookupLocalTcNames :: HsSigCtxt -> SDoc -> RdrName -> RnM [Name] -- GHC extension: look up both the tycon and data con or variable. -- Used for top-level fixity signatures and deprecations. -- Complain if neither is in scope. -- See Note [Fixity signature lookup] lookupLocalTcNames ctxt what rdr_name = do { mb_gres <- mapM lookup (dataTcOccs rdr_name) ; let (errs, names) = splitEithers mb_gres ; when (null names) $ addErr (head errs) -- Bleat about one only ; return names } where lookup = lookupBindGroupOcc ctxt what dataTcOccs :: RdrName -> [RdrName] -- Return both the given name and the same name promoted to the TcClsName -- namespace. This is useful when we aren't sure which we are looking at. -- See also Note [dataTcOccs and Exact Names] dataTcOccs rdr_name | isDataOcc occ || isVarOcc occ = [rdr_name, rdr_name_tc] | otherwise = [rdr_name] where occ = rdrNameOcc rdr_name rdr_name_tc = setRdrNameSpace rdr_name tcName {- Note [dataTcOccs and Exact Names] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Exact RdrNames can occur in code generated by Template Haskell, and generally those references are, well, exact. However, the TH `Name` type isn't expressive enough to always track the correct namespace information, so we sometimes get the right Unique but wrong namespace. Thus, we still have to do the double-lookup for Exact RdrNames. There is also an awkward situation for built-in syntax. Example in GHCi :info [] This parses as the Exact RdrName for nilDataCon, but we also want the list type constructor. Note that setRdrNameSpace on an Exact name requires the Name to be External, which it always is for built in syntax. ********************************************************* * * Fixities * * ********************************************************* Note [Fixity signature lookup] ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ A fixity declaration like infixr 2 ? can refer to a value-level operator, e.g.: (?) :: String -> String -> String or a type-level operator, like: data (?) a b = A a | B b so we extend the lookup of the reader name '?' to the TcClsName namespace, as well as the original namespace. The extended lookup is also used in other places, like resolution of deprecation declarations, and lookup of names in GHCi. -} -------------------------------- type MiniFixityEnv = FastStringEnv (Located Fixity) -- Mini fixity env for the names we're about -- to bind, in a single binding group -- -- It is keyed by the *FastString*, not the *OccName*, because -- the single fixity decl infix 3 T -- affects both the data constructor T and the type constrctor T -- -- We keep the location so that if we find -- a duplicate, we can report it sensibly -------------------------------- -- Used for nested fixity decls to bind names along with their fixities. -- the fixities are given as a UFM from an OccName's FastString to a fixity decl addLocalFixities :: MiniFixityEnv -> [Name] -> RnM a -> RnM a addLocalFixities mini_fix_env names thing_inside = extendFixityEnv (mapMaybe find_fixity names) thing_inside where find_fixity name = case lookupFsEnv mini_fix_env (occNameFS occ) of Just (L _ fix) -> Just (name, FixItem occ fix) Nothing -> Nothing where occ = nameOccName name {- -------------------------------- lookupFixity is a bit strange. * Nested local fixity decls are put in the local fixity env, which we find with getFixtyEnv * Imported fixities are found in the HIT or PIT * Top-level fixity decls in this module may be for Names that are either Global (constructors, class operations) or Local/Exported (everything else) (See notes with RnNames.getLocalDeclBinders for why we have this split.) We put them all in the local fixity environment -} lookupFixityRn :: Name -> RnM Fixity lookupFixityRn name | isUnboundName name = return (Fixity minPrecedence InfixL) -- Minimise errors from ubound names; eg -- a>0 `foo` b>0 -- where 'foo' is not in scope, should not give an error (Trac #7937) | otherwise = do { local_fix_env <- getFixityEnv ; case lookupNameEnv local_fix_env name of { Just (FixItem _ fix) -> return fix ; Nothing -> do { this_mod <- getModule ; if nameIsLocalOrFrom this_mod name -- Local (and interactive) names are all in the -- fixity env, and don't have entries in the HPT then return defaultFixity else lookup_imported } } } where lookup_imported -- For imported names, we have to get their fixities by doing a -- loadInterfaceForName, and consulting the Ifaces that comes back -- from that, because the interface file for the Name might not -- have been loaded yet. Why not? Suppose you import module A, -- which exports a function 'f', thus; -- module CurrentModule where -- import A( f ) -- module A( f ) where -- import B( f ) -- Then B isn't loaded right away (after all, it's possible that -- nothing from B will be used). When we come across a use of -- 'f', we need to know its fixity, and it's then, and only -- then, that we load B.hi. That is what's happening here. -- -- loadInterfaceForName will find B.hi even if B is a hidden module, -- and that's what we want. = do { iface <- loadInterfaceForName doc name ; traceRn (text "lookupFixityRn: looking up name in iface cache and found:" <+> vcat [ppr name, ppr $ mi_fix_fn iface (nameOccName name)]) ; return (mi_fix_fn iface (nameOccName name)) } doc = ptext (sLit "Checking fixity for") <+> ppr name --------------- lookupTyFixityRn :: Located Name -> RnM Fixity lookupTyFixityRn (L _ n) = lookupFixityRn n {- ************************************************************************ * * Rebindable names Dealing with rebindable syntax is driven by the Opt_RebindableSyntax dynamic flag. In "deriving" code we don't want to use rebindable syntax so we switch off the flag locally * * ************************************************************************ Haskell 98 says that when you say "3" you get the "fromInteger" from the Standard Prelude, regardless of what is in scope. However, to experiment with having a language that is less coupled to the standard prelude, we're trying a non-standard extension that instead gives you whatever "Prelude.fromInteger" happens to be in scope. Then you can import Prelude () import MyPrelude as Prelude to get the desired effect. At the moment this just happens for * fromInteger, fromRational on literals (in expressions and patterns) * negate (in expressions) * minus (arising from n+k patterns) * "do" notation We store the relevant Name in the HsSyn tree, in * HsIntegral/HsFractional/HsIsString * NegApp * NPlusKPat * HsDo respectively. Initially, we just store the "standard" name (PrelNames.fromIntegralName, fromRationalName etc), but the renamer changes this to the appropriate user name if Opt_NoImplicitPrelude is on. That is what lookupSyntaxName does. We treat the orignal (standard) names as free-vars too, because the type checker checks the type of the user thing against the type of the standard thing. -} lookupIfThenElse :: RnM (Maybe (SyntaxExpr Name), FreeVars) -- Different to lookupSyntaxName because in the non-rebindable -- case we desugar directly rather than calling an existing function -- Hence the (Maybe (SyntaxExpr Name)) return type lookupIfThenElse = do { rebind <- xoptM Opt_RebindableSyntax ; if not rebind then return (Nothing, emptyFVs) else do { ite <- lookupOccRn (mkVarUnqual (fsLit "ifThenElse")) ; return (Just (HsVar ite), unitFV ite) } } lookupSyntaxName :: Name -- The standard name -> RnM (SyntaxExpr Name, FreeVars) -- Possibly a non-standard name lookupSyntaxName std_name = do { rebindable_on <- xoptM Opt_RebindableSyntax ; if not rebindable_on then return (HsVar std_name, emptyFVs) else -- Get the similarly named thing from the local environment do { usr_name <- lookupOccRn (mkRdrUnqual (nameOccName std_name)) ; return (HsVar usr_name, unitFV usr_name) } } lookupSyntaxNames :: [Name] -- Standard names -> RnM ([HsExpr Name], FreeVars) -- See comments with HsExpr.ReboundNames lookupSyntaxNames std_names = do { rebindable_on <- xoptM Opt_RebindableSyntax ; if not rebindable_on then return (map HsVar std_names, emptyFVs) else do { usr_names <- mapM (lookupOccRn . mkRdrUnqual . nameOccName) std_names ; return (map HsVar usr_names, mkFVs usr_names) } } {- ********************************************************* * * \subsection{Binding} * * ********************************************************* -} newLocalBndrRn :: Located RdrName -> RnM Name -- Used for non-top-level binders. These should -- never be qualified. newLocalBndrRn (L loc rdr_name) | Just name <- isExact_maybe rdr_name = return name -- This happens in code generated by Template Haskell -- See Note [Binders in Template Haskell] in Convert.lhs | otherwise = do { unless (isUnqual rdr_name) (addErrAt loc (badQualBndrErr rdr_name)) ; uniq <- newUnique ; return (mkInternalName uniq (rdrNameOcc rdr_name) loc) } newLocalBndrsRn :: [Located RdrName] -> RnM [Name] newLocalBndrsRn = mapM newLocalBndrRn --------------------- bindLocatedLocalsRn :: [Located RdrName] -> ([Name] -> RnM a) -> RnM a bindLocatedLocalsRn rdr_names_w_loc enclosed_scope = do { checkDupRdrNames rdr_names_w_loc ; checkShadowedRdrNames rdr_names_w_loc -- Make fresh Names and extend the environment ; names <- newLocalBndrsRn rdr_names_w_loc ; bindLocalNames names (enclosed_scope names) } bindLocalNames :: [Name] -> RnM a -> RnM a bindLocalNames names enclosed_scope = do { lcl_env <- getLclEnv ; let th_level = thLevel (tcl_th_ctxt lcl_env) th_bndrs' = extendNameEnvList (tcl_th_bndrs lcl_env) [ (n, (NotTopLevel, th_level)) | n <- names ] rdr_env' = extendLocalRdrEnvList (tcl_rdr lcl_env) names ; setLclEnv (lcl_env { tcl_th_bndrs = th_bndrs' , tcl_rdr = rdr_env' }) enclosed_scope } bindLocalNamesFV :: [Name] -> RnM (a, FreeVars) -> RnM (a, FreeVars) bindLocalNamesFV names enclosed_scope = do { (result, fvs) <- bindLocalNames names enclosed_scope ; return (result, delFVs names fvs) } ------------------------------------- -- binLocalsFVRn is the same as bindLocalsRn -- except that it deals with free vars bindLocatedLocalsFV :: [Located RdrName] -> ([Name] -> RnM (a,FreeVars)) -> RnM (a, FreeVars) bindLocatedLocalsFV rdr_names enclosed_scope = bindLocatedLocalsRn rdr_names $ \ names -> do (thing, fvs) <- enclosed_scope names return (thing, delFVs names fvs) ------------------------------------- extendTyVarEnvFVRn :: [Name] -> RnM (a, FreeVars) -> RnM (a, FreeVars) -- This function is used only in rnSourceDecl on InstDecl extendTyVarEnvFVRn tyvars thing_inside = bindLocalNamesFV tyvars thing_inside ------------------------------------- checkDupRdrNames :: [Located RdrName] -> RnM () -- Check for duplicated names in a binding group checkDupRdrNames rdr_names_w_loc = mapM_ (dupNamesErr getLoc) dups where (_, dups) = removeDups (\n1 n2 -> unLoc n1 `compare` unLoc n2) rdr_names_w_loc checkDupNames :: [Name] -> RnM () -- Check for duplicated names in a binding group checkDupNames names = check_dup_names (filterOut isSystemName names) -- See Note [Binders in Template Haskell] in Convert check_dup_names :: [Name] -> RnM () check_dup_names names = mapM_ (dupNamesErr nameSrcSpan) dups where (_, dups) = removeDups (\n1 n2 -> nameOccName n1 `compare` nameOccName n2) names --------------------- checkShadowedRdrNames :: [Located RdrName] -> RnM () checkShadowedRdrNames loc_rdr_names = do { envs <- getRdrEnvs ; checkShadowedOccs envs get_loc_occ filtered_rdrs } where filtered_rdrs = filterOut (isExact . unLoc) loc_rdr_names -- See Note [Binders in Template Haskell] in Convert get_loc_occ (L loc rdr) = (loc,rdrNameOcc rdr) checkDupAndShadowedNames :: (GlobalRdrEnv, LocalRdrEnv) -> [Name] -> RnM () checkDupAndShadowedNames envs names = do { check_dup_names filtered_names ; checkShadowedOccs envs get_loc_occ filtered_names } where filtered_names = filterOut isSystemName names -- See Note [Binders in Template Haskell] in Convert get_loc_occ name = (nameSrcSpan name, nameOccName name) ------------------------------------- checkShadowedOccs :: (GlobalRdrEnv, LocalRdrEnv) -> (a -> (SrcSpan, OccName)) -> [a] -> RnM () checkShadowedOccs (global_env,local_env) get_loc_occ ns = whenWOptM Opt_WarnNameShadowing $ do { traceRn (text "shadow" <+> ppr (map get_loc_occ ns)) ; mapM_ check_shadow ns } where check_shadow n | startsWithUnderscore occ = return () -- Do not report shadowing for "_x" -- See Trac #3262 | Just n <- mb_local = complain [ptext (sLit "bound at") <+> ppr (nameSrcLoc n)] | otherwise = do { gres' <- filterM is_shadowed_gre gres ; complain (map pprNameProvenance gres') } where (loc,occ) = get_loc_occ n mb_local = lookupLocalRdrOcc local_env occ gres = lookupGRE_RdrName (mkRdrUnqual occ) global_env -- Make an Unqualified RdrName and look that up, so that -- we don't find any GREs that are in scope qualified-only complain [] = return () complain pp_locs = addWarnAt loc (shadowedNameWarn occ pp_locs) is_shadowed_gre :: GlobalRdrElt -> RnM Bool -- Returns False for record selectors that are shadowed, when -- punning or wild-cards are on (cf Trac #2723) is_shadowed_gre gre@(GRE { gre_par = ParentIs _ }) = do { dflags <- getDynFlags ; if (xopt Opt_RecordPuns dflags || xopt Opt_RecordWildCards dflags) then do { is_fld <- is_rec_fld gre; return (not is_fld) } else return True } is_shadowed_gre _other = return True is_rec_fld gre -- Return True for record selector ids | isLocalGRE gre = do { RecFields _ fld_set <- getRecFieldEnv ; return (gre_name gre `elemNameSet` fld_set) } | otherwise = do { sel_id <- tcLookupField (gre_name gre) ; return (isRecordSelector sel_id) } {- ************************************************************************ * * What to do when a lookup fails * * ************************************************************************ -} data WhereLooking = WL_Any -- Any binding | WL_Global -- Any top-level binding (local or imported) | WL_LocalTop -- Any top-level binding in this module reportUnboundName :: RdrName -> RnM Name reportUnboundName rdr = unboundName WL_Any rdr unboundName :: WhereLooking -> RdrName -> RnM Name unboundName wl rdr = unboundNameX wl rdr Outputable.empty unboundNameX :: WhereLooking -> RdrName -> SDoc -> RnM Name unboundNameX where_look rdr_name extra = do { show_helpful_errors <- goptM Opt_HelpfulErrors ; let what = pprNonVarNameSpace (occNameSpace (rdrNameOcc rdr_name)) err = unknownNameErr what rdr_name $$ extra ; if not show_helpful_errors then addErr err else do { suggestions <- unknownNameSuggestErr where_look rdr_name ; addErr (err $$ suggestions) } ; return (mkUnboundName rdr_name) } unknownNameErr :: SDoc -> RdrName -> SDoc unknownNameErr what rdr_name = vcat [ hang (ptext (sLit "Not in scope:")) 2 (what <+> quotes (ppr rdr_name)) , extra ] where extra | rdr_name == forall_tv_RDR = perhapsForallMsg | otherwise = Outputable.empty type HowInScope = Either SrcSpan ImpDeclSpec -- Left loc => locally bound at loc -- Right ispec => imported as specified by ispec unknownNameSuggestErr :: WhereLooking -> RdrName -> RnM SDoc unknownNameSuggestErr where_look tried_rdr_name = do { local_env <- getLocalRdrEnv ; global_env <- getGlobalRdrEnv ; dflags <- getDynFlags ; let all_possibilities :: [(String, (RdrName, HowInScope))] all_possibilities = [ (showPpr dflags r, (r, Left loc)) | (r,loc) <- local_possibilities local_env ] ++ [ (showPpr dflags r, rp) | (r, rp) <- global_possibilities global_env ] suggest = fuzzyLookup (showPpr dflags tried_rdr_name) all_possibilities perhaps = ptext (sLit "Perhaps you meant") extra_err = case suggest of [] -> Outputable.empty [p] -> perhaps <+> pp_item p ps -> sep [ perhaps <+> ptext (sLit "one of these:") , nest 2 (pprWithCommas pp_item ps) ] ; return extra_err } where pp_item :: (RdrName, HowInScope) -> SDoc pp_item (rdr, Left loc) = pp_ns rdr <+> quotes (ppr rdr) <+> loc' -- Locally defined where loc' = case loc of UnhelpfulSpan l -> parens (ppr l) RealSrcSpan l -> parens (ptext (sLit "line") <+> int (srcSpanStartLine l)) pp_item (rdr, Right is) = pp_ns rdr <+> quotes (ppr rdr) <+> -- Imported parens (ptext (sLit "imported from") <+> ppr (is_mod is)) pp_ns :: RdrName -> SDoc pp_ns rdr | ns /= tried_ns = pprNameSpace ns | otherwise = Outputable.empty where ns = rdrNameSpace rdr tried_occ = rdrNameOcc tried_rdr_name tried_is_sym = isSymOcc tried_occ tried_ns = occNameSpace tried_occ tried_is_qual = isQual tried_rdr_name correct_name_space occ = nameSpacesRelated (occNameSpace occ) tried_ns && isSymOcc occ == tried_is_sym -- Treat operator and non-operators as non-matching -- This heuristic avoids things like -- Not in scope 'f'; perhaps you meant '+' (from Prelude) local_ok = case where_look of { WL_Any -> True; _ -> False } local_possibilities :: LocalRdrEnv -> [(RdrName, SrcSpan)] local_possibilities env | tried_is_qual = [] | not local_ok = [] | otherwise = [ (mkRdrUnqual occ, nameSrcSpan name) | name <- localRdrEnvElts env , let occ = nameOccName name , correct_name_space occ] gre_ok :: GlobalRdrElt -> Bool gre_ok = case where_look of WL_LocalTop -> isLocalGRE _ -> \_ -> True global_possibilities :: GlobalRdrEnv -> [(RdrName, (RdrName, HowInScope))] global_possibilities global_env | tried_is_qual = [ (rdr_qual, (rdr_qual, how)) | gre <- globalRdrEnvElts global_env , gre_ok gre , let name = gre_name gre occ = nameOccName name , correct_name_space occ , (mod, how) <- quals_in_scope name (gre_prov gre) , let rdr_qual = mkRdrQual mod occ ] | otherwise = [ (rdr_unqual, pair) | gre <- globalRdrEnvElts global_env , gre_ok gre , let name = gre_name gre prov = gre_prov gre occ = nameOccName name rdr_unqual = mkRdrUnqual occ , correct_name_space occ , pair <- case (unquals_in_scope name prov, quals_only occ prov) of (how:_, _) -> [ (rdr_unqual, how) ] ([], pr:_) -> [ pr ] -- See Note [Only-quals] ([], []) -> [] ] -- Note [Only-quals] -- The second alternative returns those names with the same -- OccName as the one we tried, but live in *qualified* imports -- e.g. if you have: -- -- > import qualified Data.Map as Map -- > foo :: Map -- -- then we suggest @Map.Map@. -------------------- unquals_in_scope :: Name -> Provenance -> [HowInScope] unquals_in_scope n LocalDef = [ Left (nameSrcSpan n) ] unquals_in_scope _ (Imported is) = [ Right ispec | i <- is, let ispec = is_decl i , not (is_qual ispec) ] -------------------- quals_in_scope :: Name -> Provenance -> [(ModuleName, HowInScope)] -- Ones for which the qualified version is in scope quals_in_scope n LocalDef = case nameModule_maybe n of Nothing -> [] Just m -> [(moduleName m, Left (nameSrcSpan n))] quals_in_scope _ (Imported is) = [ (is_as ispec, Right ispec) | i <- is, let ispec = is_decl i ] -------------------- quals_only :: OccName -> Provenance -> [(RdrName, HowInScope)] -- Ones for which *only* the qualified version is in scope quals_only _ LocalDef = [] quals_only occ (Imported is) = [ (mkRdrQual (is_as ispec) occ, Right ispec) | i <- is, let ispec = is_decl i, is_qual ispec ] {- ************************************************************************ * * \subsection{Free variable manipulation} * * ************************************************************************ -} -- A useful utility addFvRn :: FreeVars -> RnM (thing, FreeVars) -> RnM (thing, FreeVars) addFvRn fvs1 thing_inside = do { (res, fvs2) <- thing_inside ; return (res, fvs1 `plusFV` fvs2) } mapFvRn :: (a -> RnM (b, FreeVars)) -> [a] -> RnM ([b], FreeVars) mapFvRn f xs = do stuff <- mapM f xs case unzip stuff of (ys, fvs_s) -> return (ys, plusFVs fvs_s) mapMaybeFvRn :: (a -> RnM (b, FreeVars)) -> Maybe a -> RnM (Maybe b, FreeVars) mapMaybeFvRn _ Nothing = return (Nothing, emptyFVs) mapMaybeFvRn f (Just x) = do { (y, fvs) <- f x; return (Just y, fvs) } -- because some of the rename functions are CPSed: -- maps the function across the list from left to right; -- collects all the free vars into one set mapFvRnCPS :: (a -> (b -> RnM c) -> RnM c) -> [a] -> ([b] -> RnM c) -> RnM c mapFvRnCPS _ [] cont = cont [] mapFvRnCPS f (x:xs) cont = f x $ \ x' -> mapFvRnCPS f xs $ \ xs' -> cont (x':xs') {- ************************************************************************ * * \subsection{Envt utility functions} * * ************************************************************************ -} warnUnusedTopBinds :: [GlobalRdrElt] -> RnM () warnUnusedTopBinds gres = whenWOptM Opt_WarnUnusedBinds $ do env <- getGblEnv let isBoot = tcg_src env == HsBootFile let noParent gre = case gre_par gre of NoParent -> True ParentIs _ -> False -- Don't warn about unused bindings with parents in -- .hs-boot files, as you are sometimes required to give -- unused bindings (trac #3449). -- HOWEVER, in a signature file, you are never obligated to put a -- definition in the main text. Thus, if you define something -- and forget to export it, we really DO want to warn. gres' = if isBoot then filter noParent gres else gres warnUnusedGREs gres' warnUnusedLocalBinds, warnUnusedMatches :: [Name] -> FreeVars -> RnM () warnUnusedLocalBinds = check_unused Opt_WarnUnusedBinds warnUnusedMatches = check_unused Opt_WarnUnusedMatches check_unused :: WarningFlag -> [Name] -> FreeVars -> RnM () check_unused flag bound_names used_names = whenWOptM flag (warnUnusedLocals (filterOut (`elemNameSet` used_names) bound_names)) ------------------------- -- Helpers warnUnusedGREs :: [GlobalRdrElt] -> RnM () warnUnusedGREs gres = warnUnusedBinds [(n,p) | GRE {gre_name = n, gre_prov = p} <- gres] warnUnusedLocals :: [Name] -> RnM () warnUnusedLocals names = warnUnusedBinds [(n,LocalDef) | n<-names] warnUnusedBinds :: [(Name,Provenance)] -> RnM () warnUnusedBinds names = mapM_ warnUnusedName (filter reportable names) where reportable (name,_) | isWiredInName name = False -- Don't report unused wired-in names -- Otherwise we get a zillion warnings -- from Data.Tuple | otherwise = not (startsWithUnderscore (nameOccName name)) ------------------------- warnUnusedName :: (Name, Provenance) -> RnM () warnUnusedName (name, LocalDef) = addUnusedWarning name (nameSrcSpan name) (ptext (sLit "Defined but not used")) warnUnusedName (name, Imported is) = mapM_ warn is where warn spec = addUnusedWarning name span msg where span = importSpecLoc spec pp_mod = quotes (ppr (importSpecModule spec)) msg = ptext (sLit "Imported from") <+> pp_mod <+> ptext (sLit "but not used") addUnusedWarning :: Name -> SrcSpan -> SDoc -> RnM () addUnusedWarning name span msg = addWarnAt span $ sep [msg <> colon, nest 2 $ pprNonVarNameSpace (occNameSpace (nameOccName name)) <+> quotes (ppr name)] addNameClashErrRn :: RdrName -> [GlobalRdrElt] -> RnM () addNameClashErrRn rdr_name gres | all isLocalGRE gres -- If there are two or more *local* defns, we'll have reported = return () -- that already, and we don't want an error cascade | otherwise = addErr (vcat [ptext (sLit "Ambiguous occurrence") <+> quotes (ppr rdr_name), ptext (sLit "It could refer to") <+> vcat (msg1 : msgs)]) where (np1:nps) = gres msg1 = ptext (sLit "either") <+> mk_ref np1 msgs = [ptext (sLit " or") <+> mk_ref np | np <- nps] mk_ref gre = sep [quotes (ppr (gre_name gre)) <> comma, pprNameProvenance gre] shadowedNameWarn :: OccName -> [SDoc] -> SDoc shadowedNameWarn occ shadowed_locs = sep [ptext (sLit "This binding for") <+> quotes (ppr occ) <+> ptext (sLit "shadows the existing binding") <> plural shadowed_locs, nest 2 (vcat shadowed_locs)] perhapsForallMsg :: SDoc perhapsForallMsg = vcat [ ptext (sLit "Perhaps you intended to use ExplicitForAll or similar flag") , ptext (sLit "to enable explicit-forall syntax: forall <tvs>. <type>")] unknownSubordinateErr :: SDoc -> RdrName -> SDoc unknownSubordinateErr doc op -- Doc is "method of class" or -- "field of constructor" = quotes (ppr op) <+> ptext (sLit "is not a (visible)") <+> doc badOrigBinding :: RdrName -> SDoc badOrigBinding name = ptext (sLit "Illegal binding of built-in syntax:") <+> ppr (rdrNameOcc name) -- The rdrNameOcc is because we don't want to print Prelude.(,) dupNamesErr :: Outputable n => (n -> SrcSpan) -> [n] -> RnM () dupNamesErr get_loc names = addErrAt big_loc $ vcat [ptext (sLit "Conflicting definitions for") <+> quotes (ppr (head names)), locations] where locs = map get_loc names big_loc = foldr1 combineSrcSpans locs locations = ptext (sLit "Bound at:") <+> vcat (map ppr (sort locs)) kindSigErr :: Outputable a => a -> SDoc kindSigErr thing = hang (ptext (sLit "Illegal kind signature for") <+> quotes (ppr thing)) 2 (ptext (sLit "Perhaps you intended to use KindSignatures")) badQualBndrErr :: RdrName -> SDoc badQualBndrErr rdr_name = ptext (sLit "Qualified name in binding position:") <+> ppr rdr_name opDeclErr :: RdrName -> SDoc opDeclErr n = hang (ptext (sLit "Illegal declaration of a type or class operator") <+> quotes (ppr n)) 2 (ptext (sLit "Use TypeOperators to declare operators in type and declarations")) checkTupSize :: Int -> RnM () checkTupSize tup_size | tup_size <= mAX_TUPLE_SIZE = return () | otherwise = addErr (sep [ptext (sLit "A") <+> int tup_size <> ptext (sLit "-tuple is too large for GHC"), nest 2 (parens (ptext (sLit "max size is") <+> int mAX_TUPLE_SIZE)), nest 2 (ptext (sLit "Workaround: use nested tuples or define a data type"))]) {- ************************************************************************ * * \subsection{Contexts for renaming errors} * * ************************************************************************ -} data HsDocContext = TypeSigCtx SDoc | PatCtx | SpecInstSigCtx | DefaultDeclCtx | ForeignDeclCtx (Located RdrName) | DerivDeclCtx | RuleCtx FastString | TyDataCtx (Located RdrName) | TySynCtx (Located RdrName) | TyFamilyCtx (Located RdrName) | ConDeclCtx [Located RdrName] | ClassDeclCtx (Located RdrName) | ExprWithTySigCtx | TypBrCtx | HsTypeCtx | GHCiCtx | SpliceTypeCtx (LHsType RdrName) | ClassInstanceCtx | VectDeclCtx (Located RdrName) | GenericCtx SDoc -- Maybe we want to use this more! docOfHsDocContext :: HsDocContext -> SDoc docOfHsDocContext (GenericCtx doc) = doc docOfHsDocContext (TypeSigCtx doc) = text "In the type signature for" <+> doc docOfHsDocContext PatCtx = text "In a pattern type-signature" docOfHsDocContext SpecInstSigCtx = text "In a SPECIALISE instance pragma" docOfHsDocContext DefaultDeclCtx = text "In a `default' declaration" docOfHsDocContext (ForeignDeclCtx name) = ptext (sLit "In the foreign declaration for") <+> ppr name docOfHsDocContext DerivDeclCtx = text "In a deriving declaration" docOfHsDocContext (RuleCtx name) = text "In the transformation rule" <+> ftext name docOfHsDocContext (TyDataCtx tycon) = text "In the data type declaration for" <+> quotes (ppr tycon) docOfHsDocContext (TySynCtx name) = text "In the declaration for type synonym" <+> quotes (ppr name) docOfHsDocContext (TyFamilyCtx name) = text "In the declaration for type family" <+> quotes (ppr name) docOfHsDocContext (ConDeclCtx [name]) = text "In the definition of data constructor" <+> quotes (ppr name) docOfHsDocContext (ConDeclCtx names) = text "In the definition of data constructors" <+> interpp'SP names docOfHsDocContext (ClassDeclCtx name) = text "In the declaration for class" <+> ppr name docOfHsDocContext ExprWithTySigCtx = text "In an expression type signature" docOfHsDocContext TypBrCtx = ptext (sLit "In a Template-Haskell quoted type") docOfHsDocContext HsTypeCtx = text "In a type argument" docOfHsDocContext GHCiCtx = ptext (sLit "In GHCi input") docOfHsDocContext (SpliceTypeCtx hs_ty) = ptext (sLit "In the spliced type") <+> ppr hs_ty docOfHsDocContext ClassInstanceCtx = ptext (sLit "TcSplice.reifyInstances") docOfHsDocContext (VectDeclCtx tycon) = ptext (sLit "In the VECTORISE pragma for type constructor") <+> quotes (ppr tycon)
alexander-at-github/eta
compiler/ETA/Rename/RnEnv.hs
Haskell
bsd-3-clause
81,249
{-# OPTIONS_GHC -W #-} module Type.Inference where import qualified Data.Map as Map import qualified Type.Type as T import qualified Type.Environment as Env import qualified Type.Constrain.Expression as TcExpr import qualified Type.Solve as Solve import SourceSyntax.Module as Module import SourceSyntax.Annotation (noneNoDocs) import SourceSyntax.Type (Type) import Text.PrettyPrint import qualified Type.State as TS import qualified Type.ExtraChecks as Check import Control.Monad.State (execStateT, forM) import Control.Monad.Error (runErrorT, liftIO) import qualified Type.Alias as Alias import System.IO.Unsafe -- Possible to switch over to the ST monad instead of -- the IO monad. I don't think that'd be worthwhile. infer :: Interfaces -> MetadataModule -> Either [Doc] (Map.Map String Type) infer interfaces modul = unsafePerformIO $ do env <- Env.initialEnvironment (datatypes modul ++ concatMap iAdts (Map.elems interfaces)) (aliases modul ++ concatMap iAliases (Map.elems interfaces)) ctors <- forM (Map.keys (Env.constructor env)) $ \name -> do (_, vars, args, result) <- Env.freshDataScheme env name return (name, (vars, foldr (T.==>) result args)) attemptConstraint <- runErrorT $ do importedVars <- forM (concatMap (Map.toList . iTypes) $ Map.elems interfaces) $ \(name,tipe) -> (,) name `fmap` Env.instantiateType env tipe Map.empty let allTypes = ctors ++ importedVars vars = concatMap (fst . snd) allTypes header = Map.map snd (Map.fromList allTypes) environ = noneNoDocs . T.CLet [ T.Scheme vars [] (noneNoDocs T.CTrue) header ] fvar <- liftIO $ T.var T.Flexible c <- TcExpr.constrain env (program modul) (T.VarN fvar) return (header, environ c) case attemptConstraint of Left err -> return $ Left err Right (header, constraint) -> do state <- execStateT (Solve.solve constraint) TS.initialState let rules = Alias.rules interfaces (aliases modul) (imports modul) case TS.sErrors state of errors@(_:_) -> Left `fmap` sequence (map ($ rules) (reverse errors)) [] -> case Check.portTypes rules (program modul) of Right () -> Check.mainType rules (Map.difference (TS.sSavedEnv state) header) Left err -> return (Left err)
deadfoxygrandpa/Elm
compiler/Type/Inference.hs
Haskell
bsd-3-clause
2,377
{-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiWayIf #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE OverloadedStrings #-} module Lazyfoo.Lesson15 (main) where import Control.Applicative import Control.Monad import Data.Foldable import Data.Monoid import Data.Maybe import Foreign.C.Types import Linear import Linear.Affine import SDL (($=)) import qualified SDL import Paths_sdl2 (getDataFileName) screenWidth, screenHeight :: CInt (screenWidth, screenHeight) = (640, 480) data Texture = Texture SDL.Texture (V2 CInt) loadTexture :: SDL.Renderer -> FilePath -> IO Texture loadTexture r filePath = do surface <- getDataFileName filePath >>= SDL.loadBMP size <- SDL.surfaceDimensions surface format <- SDL.surfaceFormat surface key <- SDL.mapRGB format (V3 0 maxBound maxBound) SDL.colorKey surface $= Just key t <- SDL.createTextureFromSurface r surface SDL.freeSurface surface return (Texture t size) renderTexture :: SDL.Renderer -> Texture -> Point V2 CInt -> Maybe (SDL.Rectangle CInt) -> Maybe CDouble -> Maybe (Point V2 CInt) -> Maybe (V2 Bool) -> IO () renderTexture r (Texture t size) xy clip theta center flips = let dstSize = maybe size (\(SDL.Rectangle _ size') -> size') clip in SDL.renderCopyEx r t clip (Just (SDL.Rectangle xy dstSize)) (fromMaybe 0 theta) center (fromMaybe (pure False) flips) textureSize :: Texture -> V2 CInt textureSize (Texture _ sz) = sz main :: IO () main = do SDL.initialize [SDL.InitVideo] SDL.HintRenderScaleQuality $= SDL.ScaleLinear do renderQuality <- SDL.get SDL.HintRenderScaleQuality when (renderQuality /= SDL.ScaleLinear) $ putStrLn "Warning: Linear texture filtering not enabled!" window <- SDL.createWindow "SDL Tutorial" SDL.defaultWindow {SDL.windowInitialSize = V2 screenWidth screenHeight} SDL.showWindow window renderer <- SDL.createRenderer window (-1) (SDL.RendererConfig { SDL.rendererAccelerated = True , SDL.rendererSoftware = False , SDL.rendererTargetTexture = False , SDL.rendererPresentVSync = True }) SDL.renderDrawColor renderer $= V4 maxBound maxBound maxBound maxBound arrowTexture <- loadTexture renderer "examples/lazyfoo/arrow.bmp" let loop theta flips = do let collectEvents = do e <- SDL.pollEvent case e of Nothing -> return [] Just e' -> (e' :) <$> collectEvents events <- collectEvents let (Any quit, Sum phi, Last newFlips) = foldMap (\case SDL.QuitEvent -> (Any True, mempty, mempty) SDL.KeyboardEvent{..} -> (\(x,y) -> (mempty, x,y)) $ if | keyboardEventKeyMotion == SDL.KeyDown -> let scancode = SDL.keysymScancode keyboardEventKeysym in if | scancode == SDL.ScancodeQ -> (mempty, Last (Just (V2 True False))) | scancode == SDL.ScancodeW -> (mempty, Last (Just (V2 False False))) | scancode == SDL.ScancodeE -> (mempty, Last (Just (V2 False True))) | scancode == SDL.ScancodeA -> (Sum (-60), mempty) | scancode == SDL.ScancodeD -> (Sum 60, mempty) | otherwise -> mempty | otherwise -> mempty _ -> mempty) $ map SDL.eventPayload events SDL.renderDrawColor renderer $= V4 maxBound maxBound maxBound maxBound SDL.renderClear renderer let theta' = theta + phi flips' = fromMaybe flips newFlips renderTexture renderer arrowTexture (P (fmap (`div` 2) (V2 screenWidth screenHeight) - fmap (`div` 2) (textureSize arrowTexture))) Nothing (Just theta') Nothing (Just flips') SDL.renderPresent renderer unless quit (loop theta' flips') loop 0 (pure False) SDL.destroyRenderer renderer SDL.destroyWindow window SDL.quit
svenkeidel/sdl2
examples/lazyfoo/Lesson15.hs
Haskell
bsd-3-clause
4,292
{-# LANGUAGE CPP #-} {-# LANGUAGE DataKinds #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE PolyKinds #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE TypeApplications #-} {-# LANGUAGE TypeFamilies #-} {-# LANGUAGE TypeInType #-} {-# LANGUAGE TypeOperators #-} module Numeric.Subroutine.Sort ( SortBy (..), sortBy, sort , SortableDataFrame ) where import Control.Monad import Control.Monad.ST import Control.Monad.ST.Unsafe import Data.Kind import Data.Type.Lits import Numeric.DataFrame.Internal.PrimArray import Numeric.DataFrame.ST import Numeric.DataFrame.Type import Numeric.Dimensions import Unsafe.Coerce -- | Sort a @DataFrame@ along the first dimension. -- -- Note: the elements (which are of type @DataFrame t ns@) are compared -- lexicographically. sort :: forall (t :: Type) n ns . ( SortableDataFrame t (n ': ns), Ord t, SortBy n) => DataFrame t (n ': ns) -> DataFrame t (n ': ns) sort df = case dimKind @(KindOf n) of DimKNat -> case uniqueOrCumulDims df of Left _ -> df -- all equal, no need for sorting. Right steps | SomeDims (Dims :: Dims ms) <- fromSteps steps , Dict <- (unsafeCoerce (Dict @(ns ~ ns)) :: Dict (ns ~ ms)) -> sortBy compare df | otherwise -> error "sort/DimNat/uniqueOrCumulDims -- impossible pattern" DimKXNat | XFrame (df' :: DataFrame t ms) <- df , D :* Dims <- dims @ms -> XFrame (sortBy compare df') | otherwise -> error "sort/DimXNat -- impossible pattern" {-# ANN sort "HLint: ignore Use sort" #-} -- | Sort a @DataFrame@ along the first dimension using given comparison function. sortBy :: forall (t :: Type) n ns . ( SortableDataFrame t (n ': ns) , SortBy n) => (DataFrame t ns -> DataFrame t ns -> Ordering) -> DataFrame t (n ': ns) -> DataFrame t (n ': ns) sortBy cmp df = case dimKind @(KindOf n) of DimKNat -> runST $ flip (withThawDataFrame (const $ pure df)) df $ \mdf -> do sortByInplace (\x y -> cmp <$> unsafeFreezeDataFrame x <*> unsafeFreezeDataFrame y) mdf unsafeFreezeDataFrame mdf DimKXNat | XFrame dfN <- df , D :* Dims <- dims `inSpaceOf` dfN -> XFrame (sortBy (\a b -> cmp (XFrame a) (XFrame b)) dfN) | otherwise -> error "sortBy/DimXNat -- impossible pattern" -- | The required context for sorting a DataFrame is slightly different -- for @Nat@ and @XNat@ indexed arrays. -- This type family abstracts away the difference. type family SortableDataFrame (t :: Type) (ns :: [k]) :: Constraint where SortableDataFrame t ((n ': ns) :: [Nat]) = (PrimArray t (DataFrame t ns), PrimArray t (DataFrame t (n ': ns))) SortableDataFrame t ((n ': ns) :: [XNat]) = PrimBytes t class BoundedDim n => SortBy n where -- | Note, "Inplace" here means the input frame is modified. -- It does not mean the algorithm does not use extra space (it does use). sortByInplace :: PrimBytes t => (STDataFrame s t ns -> STDataFrame s t ns -> ST s Ordering) -- ^ must not modify state! -> STDataFrame s t (n ': ns) -> ST s () instance SortBy 0 where sortByInplace _ _ = pure () instance SortBy 1 where sortByInplace _ _ = pure () instance SortBy 2 where sortByInplace cmp xs = cmp a b >>= \case GT -> do tmp <- oneMoreDataFrame a swapDF tmp a b _ -> pure () where a = subDataFrameView' (Idx 0 :* U) xs b = subDataFrameView' (Idx 1 :* U) xs instance SortBy 3 where sortByInplace cmp xs = join $ go <$> unsafeDupableInterleaveST (oneMoreDataFrame a) <*> cmp a b <*> cmp b c <*> cmp a c where a = subDataFrameView' (Idx 0 :* U) xs b = subDataFrameView' (Idx 1 :* U) xs c = subDataFrameView' (Idx 2 :* U) xs go tmp GT LT GT -- b < c < a = swap3DF tmp a b c go tmp LT GT GT -- c < a < b = swap3DF tmp b a c go tmp GT bc ac | bc /= GT && ac /= GT = swapDF tmp a b go tmp ab GT ac | ab /= GT && ac /= GT = swapDF tmp b c go tmp ab bc GT | ab /= LT && bc /= LT = swapDF tmp a c go _ _ _ _ = pure () instance SortBy 4 where sortByInplace cmp xs = do tmp <- unsafeDupableInterleaveST (oneMoreDataFrame a) cmpSwap tmp a c cmpSwap tmp b d cmpSwap tmp a b cmpSwap tmp c d cmpSwap tmp b c where a = subDataFrameView' (Idx 0 :* U) xs b = subDataFrameView' (Idx 1 :* U) xs c = subDataFrameView' (Idx 2 :* U) xs d = subDataFrameView' (Idx 3 :* U) xs cmpSwap tmp x y = cmp x y >>= \case GT -> swapDF tmp x y _ -> pure () instance {-# INCOHERENT #-} KnownDim n => SortBy (n :: Nat) where sortByInplace cmp (xs :: STDataFrame s t (n ': ns)) = do tmp <- oneMoreDataFrame xs copyMutableDataFrame' U xs tmp mergeSort D tmp xs where mergeSort :: Dim (d :: Nat) -> STDataFrame s t (d ': ns) -> STDataFrame s t (d ': ns) -> ST s () mergeSort D0 _ _ = pure () mergeSort D1 _ _ = pure () mergeSort (d@D :: Dim d) b a = do d2l@D <- pure $ divDim d D2 Just d2r@D <- pure $ minusDimM d d2l d2li@D <- pure $ plusDim d2l D1 d2ri@D <- pure $ plusDim d2r D1 Just Dict <- pure $ sameDim (plusDim d D1) (plusDim d2li d2r) Just Dict <- pure $ sameDim (plusDim d D1) (plusDim d2ri d2l) let leA = subDataFrameView @t @d @(d - Div d 2 + 1) @(Div d 2) @'[] (Idx 0 :* U) a riA = subDataFrameView @t @d @(Div d 2 + 1) @(d - Div d 2) @'[] (Idx (dimVal d2l) :* U) a leB = subDataFrameView @t @d @(d - Div d 2 + 1) @(Div d 2) @'[] (Idx 0 :* U) b riB = subDataFrameView @t @d @(Div d 2 + 1) @(d - Div d 2) @'[] (Idx (dimVal d2l) :* U) b mergeSort d2l leA leB mergeSort d2r riA riB merge d2l d2r d leB riB a merge :: forall (a :: Nat) (b :: Nat) (ab :: Nat) . Dim a -> Dim b -> Dim ab -> STDataFrame s t (a ': ns) -> STDataFrame s t (b ': ns) -> STDataFrame s t (ab ': ns) -> ST s () merge da@D db@D dab@D a b ab = foldM_ f (Just (0,0)) [0 .. dimVal dab - 1] where f Nothing _ = pure Nothing f (Just (i,j)) k | i >= dimVal da , Dx dj@(D :: Dim j) <- someDimVal j , D <- plusDim dj D1 , Just bmj@D <- minusDimM db dj , Just bmji@D <- minusDimM (plusDim dab D1) bmj , Just Dict <- sameDim (plusDim dab D1) (plusDim bmji bmj) , Just Dict <- sameDim (plusDim db D1) (dj `plusDim` D1 `plusDim` bmj) = Nothing <$ copyMutableDataFrame @t @ab @(ab + 1 - (b - j)) @(b - j) (Idx k :* U) (subDataFrameView @t @b @(j + 1) @(b - j) (Idx j :* U) b) ab | j >= dimVal db , Dx di@(D :: Dim i) <- someDimVal i , D <- plusDim di D1 , Just bmi@D <- minusDimM da di , Just bmii@D <- minusDimM (plusDim dab D1) bmi , Just Dict <- sameDim (plusDim dab D1) (plusDim bmii bmi) , Just Dict <- sameDim (plusDim da D1) (di `plusDim` D1 `plusDim` bmi) = Nothing <$ copyMutableDataFrame (Idx k :* U) (subDataFrameView @t @a @(i + 1) @(a - i) (Idx i :* U) a) ab | otherwise = cmp (subDataFrameView' (Idx i :* U) a) (subDataFrameView' (Idx j :* U) b) >>= \case GT -> Just (i, j + 1) <$ copyMutableDataFrame' (Idx k :* U) (subDataFrameView' (Idx j :* U) b) ab _ -> Just (i + 1, j) <$ copyMutableDataFrame' (Idx k :* U) (subDataFrameView' (Idx i :* U) a) ab instance BoundedDim xn => SortBy (xn :: XNat) where sortByInplace cmp (XSTFrame xs) | D :* _ <- dims `inSpaceOf` xs = sortByInplace (\x y -> cmp (castDataFrame x) (castDataFrame y)) xs #if !MIN_VERSION_GLASGOW_HASKELL(9,0,0,0) | otherwise = error "sortByInplace: impossible pattern" #endif -- | Swap contents of two DataFrames swapDF :: forall (s :: Type) (t :: Type) (ns :: [Nat]) . PrimBytes t => STDataFrame s t ns -- ^ Temporary buffer -> STDataFrame s t ns -> STDataFrame s t ns -> ST s () swapDF tmp a b = do copyMutableDataFrame' U a tmp copyMutableDataFrame' U b a copyMutableDataFrame' U tmp b -- | Rotate left contents of three DataFrames swap3DF :: forall (s :: Type) (t :: Type) (ns :: [Nat]) . PrimBytes t => STDataFrame s t ns -- ^ Temporary buffer -> STDataFrame s t ns -> STDataFrame s t ns -> STDataFrame s t ns -> ST s () swap3DF tmp a b c = do copyMutableDataFrame' U a tmp copyMutableDataFrame' U b a copyMutableDataFrame' U c b copyMutableDataFrame' U tmp c
achirkin/easytensor
easytensor/src/Numeric/Subroutine/Sort.hs
Haskell
bsd-3-clause
9,742
{-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE TypeFamilies #-} -- | -- Module : Data.Array.Repa.Mutable -- Copyright : (c) Geoffrey Mainland 2012 -- License : BSD-style -- -- Maintainer : Geoffrey Mainland <[email protected]> -- Stability : experimental -- Portability : non-portable -- -- This module provides an interface for mutable arrays. It is like the 'Target' -- type class, but maintains shape information. module Data.Array.Repa.Mutable (Mutable(..)) where import Data.Array.Repa class Mutable r sh e where -- | Mutable representation of an array data MArray r sh e -- | Get extent of the mutable array. mextent :: MArray r sh e -> sh -- | Allocate a new mutable array of the given size. newMArray :: sh -> IO (MArray r sh e) -- | Write an element into the mutable array. unsafeWriteMArray :: MArray r sh e -> sh -> e -> IO () -- | Freeze the mutable array into an immutable Repa array. unsafeFreezeMArray :: MArray r sh e -> IO (Array r sh e)
mainland/nikola
src/Data/Array/Repa/Mutable.hs
Haskell
bsd-3-clause
1,025
-- typeInference2.hs module TypeInference2 where f x y = x + y + 3
renevp/hello-haskell
src/typeInference2.hs
Haskell
bsd-3-clause
67
{-# OPTIONS_GHC -Wall #-} {-# LANGUAGE CPP #-} module Language.Sh.Glob ( expandGlob, matchPattern, removePrefix, removeSuffix ) where import Control.Monad.Trans ( MonadIO ) import Control.Monad.State ( runState, put ) import Data.List ( isPrefixOf, partition ) import Data.Maybe ( isJust, listToMaybe ) import Text.Regex.PCRE.Light.Char8 ( Regex, compileM, match, ungreedy ) import Language.Sh.Syntax ( Lexeme(..), Word ) -- we might get a bit fancier if older glob libraries will support -- a subset of what we want to do...? import Control.Monad.Trans ( liftIO ) #ifdef HAVE_GLOB import System.FilePath.Glob ( compileWith, compPosix, globDir, commonDirectory ) #else import Data.List ( sort, tails ) import System.Directory ( getDirectoryContents ) #endif expandGlob :: MonadIO m => Word -> m [FilePath] #ifdef HAVE_GLOB expandGlob w = case mkGlob w of Nothing -> return [] Just g -> let g' = compileWith compPosix g in liftIO $ do let (dir,g'') = commonPrefix g' liftIO $ putStrLn $ show (dir,g'') hits <- globDir [g''] dir return $ head $ fst $ hits -- By the time this is called, we should only have quotes and quoted -- literals to worry about. In the event of finding an unquoted glob -- char (and if the glob matches) we'll automatically remove quotes, etc. -- (since the next stage is, after all, quote removal). -- mkGlob :: Word -> Maybe String -- mkGlob w = case runState (mkG w) False of -- (s,True) -> Just s -- _ -> Nothing -- where mkG [] = return [] -- mkG (Literal '[':xs) = case mkClass xs of -- Just (g,xs') -> fmap (g++) $ mkG xs' -- Nothing -> fmap ((mkLit '[')++) $ mkG xs -- mkG (Literal '*':Literal '*':xs) = mkG $ Literal '*':xs -- mkG (Literal '*':xs) = put True >> fmap ('*':) (mkG xs) -- mkG (Literal '?':xs) = put True >> fmap ('?':) (mkG xs) -- mkG (Literal c:xs) = fmap (mkLit c++) $ mkG xs -- mkG (Quoted (Literal c):xs) = fmap (mkLit c++) $ mkG xs -- mkG (Quoted q:xs) = mkG $ q:xs -- mkG (Quote _:xs) = mkG xs -- mkG l = error $ "bad lexeme: "++show l -- mkLit c | c `elem` "[*?<" = ['[',c,']'] -- | otherwise = [c] #else expandGlob [] = return [] expandGlob w = do let breakd [] = [] breakd x = case break isd x of (p,[]) -> [p] (p,ps) -> p : breakd (dropWhile isd ps) isd (Literal '/') = True isd (Quoted l) = isd l isd _ = False l2c (Literal c) = c l2c (Quoted l) = l2c l l2c l = error $ "bad lexeme in l2gs: "++ show l isclose (Literal ']') = True isclose _ = False w2g [] = [] w2g (Literal '[':Literal '!':r) = case break isclose r of (m,_:r') -> NoneOf (map l2c m) : w2g r' _ -> Lit '[' : Lit '!' : w2g r -- not a range w2g (Literal '[':Literal '^':r) = case break isclose r of (m,_:r') -> NoneOf (map l2c m) : w2g r' _ -> Lit '[' : Lit '^' : w2g r w2g (Literal '[':r) = case break isclose r of (m,_:r') -> Alt (map l2c m) : w2g r' _ -> Lit '[' : w2g r w2g (Literal '*':r) = Many : w2g r w2g (Literal '?':r) = One : w2g r w2g (Literal c:r) = Lit c : w2g r w2g (Quote _:r) = w2g r w2g (Quoted (Quoted q):r) = w2g (Quoted q:r) w2g (Quoted (Literal c):r) = Lit c : w2g r w2g (Quoted (Quote _):r) = w2g r w2g (Quoted x:r) = w2g (x:r) -- only expansions left w2g l = error $ "bad lexeme: "++show l whichd = if isd $ head w then "/" else "." liftIO $ filePathMatches (map w2g $ breakd w) whichd data Glob = Lit Char | Many | One | Alt [Char] | NoneOf [Char] deriving ( Show ) simpleMatch :: [Glob] -> String -> Bool simpleMatch [] "" = True simpleMatch (Many:rest) s = any (simpleMatch rest) $ tails s simpleMatch (One:rest) (_:s) = simpleMatch rest s simpleMatch (Lit x:rest) (c:cs) | x == c = simpleMatch rest cs simpleMatch (Alt xs:rest) (c:cs) | c `elem` xs = simpleMatch rest cs simpleMatch (NoneOf xs:rest) (c:cs) | c `notElem` xs = simpleMatch rest cs simpleMatch _ _ = False filePathMatches :: [[Glob]] -> FilePath -> IO [FilePath] filePathMatches [] _ = return [] filePathMatches (g:gs) d = do xs <- filter (`notElem` [".",".."]) `fmap` (getDirectoryContents d `catch` \_ -> return []) let xs' = filter (simpleMatch g) $ case g of Lit _:_ -> xs _ -> filter notdot xs notdot ('.':_) = False notdot _ = True fpm x = map ((x++"/")++) `fmap` filePathMatches gs (d++'/':x) case gs of [] -> return $ sort xs' _ -> (sort . concat) `fmap` mapM fpm xs' #endif -- This is basically gratuitously copied from Glob's internals. mkClass :: Word -> Maybe (String,Word) mkClass xs = let (range, rest) = break (isLit ']') xs in if null rest then Nothing else if null range then let (range', rest') = break (isLit ']') (tail rest) in if null rest' then Nothing else do x <- cr' range' return (x,tail rest') else do x <- cr' range return (x,tail rest) where cr' s = Just $ "["++movedash (filter (not . isQuot) s)++"]" isLit c x = case x of { Literal c' -> c==c'; _ -> False } isQuot x = case x of { Quote _ -> True; _ -> False } quoted c x = case x of Quoted (Quoted x') -> quoted c $ Quoted x' Quoted (Literal c') -> c==c' _ -> False movedash s = let (d,nd) = partition (quoted '-') s bad = null d || (isLit '-' $ head $ reverse s) in map fromLexeme $ if bad then nd else nd++d fromLexeme x = case x of { Literal c -> c; Quoted q -> fromLexeme q; l -> error $ "bad lexeme "++show l } {- expandGlob :: MonadIO m => Word -> m [FilePath] expandGlob w = case mkGlob w of Nothing -> return [] Just g -> case G.unPattern g of (G.PathSeparator:_) -> liftIO $ do hits <- G.globDir [g] "/" -- unix...? let ps = [pathSeparator] return $ head $ fst $ hits _ -> liftIO $ do cwd <- getCurrentDirectory hits <- G.globDir [g] cwd let ps = [pathSeparator] return $ map (removePrefix $ cwd++ps) $ head $ fst $ hits where removePrefix pre s | pre `isPrefixOf` s = drop (length pre) s | otherwise = s -} -- Two issues: we can deal with them here... -- 1. if glob starts with a dirsep then we need to go relative to root... -- (what about in windows?) -- 2. if not, then we should remove the absolute path from the beginning of -- the results (should be easy w/ a map) {- -- This is a sort of default matcher, but needn't be used... matchGlob :: MonadIO m => Glob -> m [FilePath] matchGlob g = matchG' [] $ splitDir return $ do -- now we're in the list monad... where d = splitDir g splitDir (c:xs) | ips c = []:splitDir (dropWhile ips xs) splitDir xs = filter (not . null) $ filter (not . all ips) $ groupBy ((==) on ips) xs ips x = case x of { Lit c -> isPathSeparator c; _ -> False } -} ---------------------------------------------------------------------- -- This is copied from above, but it's used separately for non-glob -- -- pattern matching. Maybe we'll combine them someday. -- ---------------------------------------------------------------------- match' :: Regex -> String -> Maybe String match' regex s = listToMaybe =<< match regex s [] matchPattern :: Word -> String -> Bool matchPattern w s = case mkRegex False False "^" "$" w of Just r -> isJust $ match r s [] Nothing -> fromLit w == s removePrefix :: Bool -- ^greediness -> Word -- ^pattern -> String -- ^haystack -> String removePrefix g n h = case mkRegex g False "^" "" n of Just r -> case match' r h of Just m -> drop (length m) h Nothing -> h Nothing -> if l `isPrefixOf` h then drop (length l) h else h where l = fromLit n removeSuffix :: Bool -- ^greediness -> Word -- ^pattern -> String -- ^haystack -> String removeSuffix g n h = case mkRegex g True "^" "" n of Just r -> case match' r hr of Just m -> reverse $ drop (length m) hr Nothing -> h Nothing -> if l `isPrefixOf` hr then reverse $ drop (length l) hr else h where l = reverse $ fromLit n hr = reverse h mkRegex :: Bool -- ^greedy? -> Bool -- ^reverse? (before adding pre/suff) -> String -- ^prefix -> String -- ^suffix -> Word -- ^pattern -> Maybe Regex mkRegex g r pre suf w = case runState (mkR w) False of (s,True) -> mk' $ concat $ affix $ (if r then reverse else id) s _ -> Nothing where mkR [] = return [] mkR (Literal '[':xs) = case mkClass xs of Just (c,xs') -> fmap (c:) $ mkR xs' Nothing -> fmap ((mkLit '['):) $ mkR xs mkR (Literal '*':Literal '*':xs) = mkR $ Literal '*':xs mkR (Literal '*':xs) = put True >> fmap (".*":) (mkR xs) mkR (Literal '?':xs) = put True >> fmap (".":) (mkR xs) mkR (Literal c:xs) = fmap (mkLit c:) $ mkR xs mkR (Quoted (Literal c):xs) = fmap (mkLit c:) $ mkR xs mkR (Quoted q:xs) = mkR $ q:xs mkR (Quote _:xs) = mkR xs mkR l = error $ "bad lexeme: "++show l mkLit c | c `elem` "[](){}|^$.*+?\\" = ['\\',c] | otherwise = [c] affix s = pre:s++[suf] mk' s = case compileM s (if g then [] else [ungreedy]) of Left _ -> Nothing Right regex -> Just regex fromLit :: Word -> String fromLit = concatMap $ \l -> case l of Literal c -> [c] Quoted q -> fromLit [q] _ -> []
shicks/shsh
Language/Sh/Glob.hs
Haskell
bsd-3-clause
12,285
import Data.Array import Data.List import Data.Ord (comparing) syrs n = a where a = listArray (1,n) $ 0:[1 + syr n x | x <- [2..n]] syr n x = if x' <= n then a ! x' else 1 + syr n x' where x' = if even x then x `div` 2 else 3 * x + 1 main = print $ maximumBy (comparing snd) $ assocs $ syrs 1000000
dterei/Scraps
euler/p14/p14_mem.hs
Haskell
bsd-3-clause
352
module PTS.Syntax.Algebra ( PreAlgebra , Algebra , fold , strip , allvars , allvarsAlgebra , freevars , freevarsAlgebra , freshvar , depZip ) where import Data.Set (Set) import qualified Data.Set as Set import PTS.Syntax.Names import PTS.Syntax.Term -- algebras type PreAlgebra alpha beta = TermStructure alpha -> beta type Algebra alpha = PreAlgebra alpha alpha fold :: Structure term => Algebra alpha -> term -> alpha fold algebra term = algebra (fmap (fold algebra) (structure term)) strip :: Structure term => term -> Term strip t = fold MkTerm t allvars :: Structure term => term -> Names allvars t = fold allvarsAlgebra t allvarsAlgebra :: Algebra Names allvarsAlgebra (Var x) = Set.singleton x allvarsAlgebra (App t1 t2) = t1 `Set.union` t2 allvarsAlgebra (IntOp _ t1 t2) = t1 `Set.union` t2 allvarsAlgebra (IfZero t1 t2 t3) = t1 `Set.union` t2 `Set.union` t3 allvarsAlgebra (Lam x t1 t2) = Set.insert x (t1 `Set.union` t2) allvarsAlgebra (Pi x t1 t2 _) = Set.insert x (t1 `Set.union` t2) allvarsAlgebra (Pos p t) = t allvarsAlgebra _ = Set.empty freevarsAlgebra :: Algebra Names freevarsAlgebra t = case t of Var x -> Set.singleton x App t1 t2 -> t1 `Set.union` t2 IntOp _ t1 t2 -> t1 `Set.union` t2 IfZero t1 t2 t3 -> Set.unions [t1, t2, t3] Lam x t1 t2 -> t1 `Set.union` (Set.delete x t2) Pi x t1 t2 _ -> t1 `Set.union` (Set.delete x t2) Pos p t -> t _ -> Set.empty freevars :: Structure term => term -> Names freevars = fold freevarsAlgebra freshvar :: Structure term => term -> Name -> Name freshvar t x = freshvarl (freevars t) x -- instance Arrow PreAlgebra? depZip :: PreAlgebra alpha alpha -> PreAlgebra (alpha, beta) beta -> PreAlgebra (alpha, beta) (alpha, beta) depZip f g x = (f (fmap fst x), g x)
Toxaris/pts
src-lib/PTS/Syntax/Algebra.hs
Haskell
bsd-3-clause
1,900
module Main where import Prelude hiding ((+),(-)) import System.Environment import DeepGADT import IO main = do args <- getArgs let [inImg,outImg] = args img1 <- readImgAsVector inImg newImg <- printTimeDeep (run ((integer 30) + (blurY (blurX (image img1))))) writeVectorImage outImg newImg
robstewart57/small-image-processing-dsl-implementations
haskell/small-image-processing-dsl/app/deep-gadt/prog5.hs
Haskell
bsd-3-clause
314
{-| Module : Database.Test.MultiConnect Copyright : (c) 2004 Oleg Kiselyov, Alistair Bayley License : BSD-style Maintainer : [email protected], [email protected] Stability : experimental Portability : non-portable Tests Database.Enumerator code in the context of multiple database connections to different DBMS products. We should add tests to shift data between databases, too. -} {-# LANGUAGE OverlappingInstances #-} module Database.Test.MultiConnect (runTest) where import qualified Database.Sqlite.Enumerator as Sqlite import qualified Database.PostgreSQL.Enumerator as PG import Database.Sqlite.Test.Enumerator as SqlTest import Database.PostgreSQL.Test.Enumerator as PGTest import Database.Test.Performance as Perf import Database.Enumerator import System.Environment (getArgs) runTest :: Perf.ShouldRunTests -> [String] -> IO () runTest runPerf args = catchDB ( do let [ user, pswd, dbname ] = args withSession (PG.connect user pswd dbname) $ \sessPG -> do withSession (Sqlite.connect user pswd dbname) $ \sessSql -> do SqlTest.runTest runPerf args PGTest.runTest runPerf args ) basicDBExceptionReporter
bagl/takusen-oracle
Database/Test/MultiConnect.hs
Haskell
bsd-3-clause
1,200
----------------------------------------------------------------------------- -- | -- Module : Tuura.Plato.Translate.Translation -- Copyright : (c) 2015-2018, Tuura authors -- License : BSD (see the file LICENSE) -- Maintainer : [email protected] -- Stability : experimental -- -- Plato is a tool which embeds the Asynchronous Concepts language in Haskell. -- This language is used for the specification of asynchronous circuits, and -- is fully compositional and highly reusable, from individual concepts to -- entire concepts specifications. -- Plato can also compile and validate Asynchronous Concepts, with the help of -- the GHC. Compiled concepts can then be translated to existing modelling -- formalisms of Signal Transition Graphs (STGs) and State Graphs. These models -- feature a long history of theory and therefore several tools which can be -- used for verification and synthesis. STGs and State Graphs can be visualized -- in Workcraft (https://workcraft.org), where Plato and the tools for these -- models are all integrated. -- -- This module defines several functions which are common to the translation -- of Asynchronous Concepts to either STGs or State Graphs. -- ----------------------------------------------------------------------------- module Tuura.Plato.Translate.Translation where import Data.Char import Data.Monoid import Tuura.Concept.Circuit.Basic import Tuura.Concept.Circuit.Derived {- | 'ValidationResult' is a data type used to define whether the validation, performed by several functions in this module, was successful or not. 'Valid' indicates that validation was succesful. 'Invalid' indicates that validation failed, and contains a list of the errors which can then be reported back to the user. The 'Monoid' instance is used to compose results. It contains some simple rules for the compositions, i.e. If two 'ValidationResults' objects are 'Valid' then the result of the composition is valid. If at least one is 'Invalid', then the result must be 'Invalid'. If both are 'Invalid' then the result is 'Invalid' and the list of errors is combined, to ensure that all errors are reported to the user. -} data ValidationResult a = Valid | Invalid [ValidationError a] deriving Eq instance Monoid (ValidationResult a) where mempty = mempty mappend Valid x = x mappend x Valid = x mappend (Invalid es) (Invalid fs) = Invalid (fs ++ es) {-| 'ValidationError' is a type used to define the type of error found during the validation performed by several functions in this module. 'UnusedSignal' occurs when a signal in the provided concept specification is not provided with an interface, i.e. It is not declared as either an 'Input', 'Output' or 'Internal' signal. The affected signals are stored as part of this. 'InconsistentInitialState' occurs when a signal has its initial state defined as both 'High' and 'Low'. The affected signals are stored as part of this. 'UndefinedInitialState' occurs when a signal has no initial state defined. The affected signals are stored as part of this. 'InvariantViolated' occurs when a state can be reached which is defined, using a 'never' concept, to not be part of the invariant. The transitions which are part of the associated 'never' concept are stored as part of this. -} data ValidationError a = UnusedSignal a | InconsistentInitialState a | UndefinedInitialState a | InvariantViolated [Transition a] deriving Eq {-| 'Signal' is a type which is used to refer to signals as defined in a Concept specification. The number of signals in the specification is known and a signal can be referenced by an integer. The 'Show' instance converts the integer value of a signal to a letter for reference. This follows the alphabet for the first 26 signals, and then will be referred to as 'S' and the integer value after this. 'Ord' is used to compare the integer value of signals, to determine whether two signals are infact the same signal. -} data Signal = Signal Int deriving Eq instance Show Signal where show (Signal i) | i < 26 = [chr (ord 'A' + i)] | otherwise = 'S' : show i instance Ord Signal where compare (Signal x) (Signal y) = compare x y -- TODO: Tidy up function, it looks ugly. -- | Prepare output explaining errors found during validation to users. addErrors :: (Eq a, Show a) => [ValidationError a] -> String addErrors errs = "Error\n" ++ (if unused /= [] then "The following signals are not declared as input, " ++ "output or internal: \n" ++ unlines (map show unused) ++ "\n" else "") ++ (if incons /= [] then "The following signals have inconsistent inital states: \n" ++ unlines (map show incons) ++ "\n" else "") ++ (if undefd /= [] then "The following signals have undefined initial states: \n" ++ unlines (map show undefd) ++ "\n" else "") ++ (if invVio /= [] then "The following state(s) are reachable " ++ "but the invariant does not hold for them:\n" ++ unlines (map show invVio) ++ "\n" else "") where unused = [ a | UnusedSignal a <- errs ] incons = [ a | InconsistentInitialState a <- errs ] undefd = [ a | UndefinedInitialState a <- errs ] invVio = [ a | InvariantViolated a <- errs ] -- | Validate initial states and interface. validate :: [a] -> CircuitConcept a -> ValidationResult a validate signs circuit = validateInitialState signs circuit <> validateInterface signs circuit -- | Validate initial state - If there are any undefined or inconsistent -- initial states, then these will populate the list. validateInitialState :: [a] -> CircuitConcept a -> ValidationResult a validateInitialState signs circuit | null (undef ++ inconsistent) = Valid | otherwise = Invalid (map UndefinedInitialState undef ++ map InconsistentInitialState inconsistent) where undef = filter ((==Undefined) . initial circuit) signs inconsistent = filter ((==Inconsistent) . initial circuit) signs -- | Validate interface - If there are any unused signals then these -- will populate the list. validateInterface :: [a] -> CircuitConcept a -> ValidationResult a validateInterface signs circuit | null unused = Valid | otherwise = Invalid (map UnusedSignal unused) where unused = filter ((==Unused) . interface circuit) signs -- | Converts a 'Transition' to a 'Literal', a data type which is used by the --Tuura Boolean library. toLiteral :: [Transition a] -> [Literal a] toLiteral = map (\t -> Literal (signal t) (newValue t)) -- | Converts a 'Literal' to 'Transitions'. toTransitions :: [Literal a] -> [Transition a] toTransitions = map (\l -> Transition (variable l) (polarity l)) -- | Converts 'Causality' concepts into a tuple, containing a list of possible -- causes, for each effect. arcLists :: [Causality (Transition a)] -> [([Transition a], Transition a)] arcLists xs = [ (f, t) | Causality f t <- xs ] -- | Converts from a 'CircuitConcept' using the polymorphic @a@ type, to a -- CircuitConcept using the 'Signal' type. convert :: Enum a => CircuitConcept a -> CircuitConcept Signal convert c = mempty { initial = convertFunction (initial c), arcs = fmap convertCausality (arcs c), interface = convertFunction (interface c), invariant = fmap convertInvariant (invariant c) } -- | For every function, such as the 'initial' function or 'interface' -- function, Convert from using the polymorphic @a@ type to using the 'Signal' -- type. convertFunction :: Enum a => (a -> b) -> (Signal -> b) convertFunction f (Signal i) = f $ toEnum i -- | Converts all causalities from using the polymorphic @a@ type, to using the -- 'Signal' type. convertCausality :: Enum a => Causality (Transition a) -> Causality (Transition Signal) convertCausality (Causality f t) = Causality (map convertTrans f) (convertTrans t) -- | Converts all invariant concepts from using the polymorphic @a@ type, to -- using the 'Signal' type. convertInvariant :: Enum a => Invariant (Transition a) -> Invariant (Transition Signal) convertInvariant (NeverAll es) = NeverAll (map convertTrans es) -- | Converts an individual 'Transition' from using the polymorphic @a@ type, -- to using the 'Signal' type. convertTrans :: Enum a => Transition a -> Transition Signal convertTrans t = Transition (Signal $ fromEnum $ signal t) (newValue t)
tuura/concepts
src/Tuura/Plato/Translate/Translation.hs
Haskell
bsd-3-clause
8,661
module Dotnet.System.Xml.XmlProcessingInstruction where import Dotnet import qualified Dotnet.System.Xml.XmlLinkedNode import Dotnet.System.Xml.XmlWriter import Dotnet.System.Xml.XmlNode import Dotnet.System.Xml.XmlNodeType data XmlProcessingInstruction_ a type XmlProcessingInstruction a = Dotnet.System.Xml.XmlLinkedNode.XmlLinkedNode (XmlProcessingInstruction_ a) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.WriteContentTo" writeContentTo :: Dotnet.System.Xml.XmlWriter.XmlWriter a0 -> XmlProcessingInstruction obj -> IO (()) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.WriteTo" writeTo :: Dotnet.System.Xml.XmlWriter.XmlWriter a0 -> XmlProcessingInstruction obj -> IO (()) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.set_InnerText" set_InnerText :: String -> XmlProcessingInstruction obj -> IO (()) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_InnerText" get_InnerText :: XmlProcessingInstruction obj -> IO (String) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_LocalName" get_LocalName :: XmlProcessingInstruction obj -> IO (String) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.CloneNode" cloneNode :: Bool -> XmlProcessingInstruction obj -> IO (Dotnet.System.Xml.XmlNode.XmlNode a1) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_NodeType" get_NodeType :: XmlProcessingInstruction obj -> IO (Dotnet.System.Xml.XmlNodeType.XmlNodeType a0) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.set_Value" set_Value :: String -> XmlProcessingInstruction obj -> IO (()) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_Value" get_Value :: XmlProcessingInstruction obj -> IO (String) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_Name" get_Name :: XmlProcessingInstruction obj -> IO (String) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_Target" get_Target :: XmlProcessingInstruction obj -> IO (String) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.get_Data" get_Data :: XmlProcessingInstruction obj -> IO (String) foreign import dotnet "method Dotnet.System.Xml.XmlProcessingInstruction.set_Data" set_Data :: String -> XmlProcessingInstruction obj -> IO (())
alekar/hugs
dotnet/lib/Dotnet/System/Xml/XmlProcessingInstruction.hs
Haskell
bsd-3-clause
2,475
{-# LANGUAGE BangPatterns #-} foo = case v of !(x : xs) -> x
mpickering/hlint-refactor
tests/examples/Structure19.hs
Haskell
bsd-3-clause
61
{- BackendCommon: Common code used by most backends Part of Flounder: a message passing IDL for Barrelfish Copyright (c) 2007-2010, ETH Zurich. All rights reserved. This file is distributed under the terms in the attached LICENSE file. If you do not find this file, copies can be found by writing to: ETH Zurich D-INFK, Universit\"atstr. 6, CH-8092 Zurich. Attn: Systems Group. -} module BackendCommon where import qualified CAbsSyntax as C import Syntax data Direction = TX | RX deriving (Show, Eq) ------------------------------------------------------------------------ -- Language mapping: C identifier names ------------------------------------------------------------------------ -- Scope a list of strings ifscope :: String -> String -> String --ifscope ifn s = ifn ++ "$" ++ s ifscope ifn s = ifn ++ "_" ++ s idscope :: String -> String -> String -> String idscope ifn s suffix = ifscope ifn (s ++ "__" ++ suffix) drvscope :: String -> String -> String -> String drvscope drv ifn s = ifscope ifn (drv ++ "_" ++ s) -- Name of the binding struct for an interface type intf_bind_type :: String -> String intf_bind_type ifn = ifscope ifn "binding" -- Variable used to refer to a binding intf_bind_var = "_binding" -- Name of the binding struct for an interface type intf_frameinfo_type :: String -> String intf_frameinfo_type ifn = ifscope ifn "frameinfo" -- Variable used to refer to a continuation intf_frameinfo_var = "_frameinfo" -- Name of the bind continuation function type for an interface type intf_bind_cont_type :: String -> String intf_bind_cont_type ifn = ifscope ifn "bind_continuation_fn" -- Variable used to refer to a continuation intf_cont_var = "_continuation" -- name of the export state struct export_type n = ifscope n "export" -- Name of the enumeration of message numbers msg_enum_name :: String -> String msg_enum_name ifn = ifscope ifn "msg_enum" -- Name of each element of the message number enumeration msg_enum_elem_name :: String -> String -> String msg_enum_elem_name ifn mn = idscope ifn mn "msgnum" -- Name of the type of a message function msg_sig_type :: String -> MessageDef -> Direction -> String msg_sig_type ifn m@(RPC _ _ _) _ = idscope ifn (msg_name m) "rpc_method_fn" msg_sig_type ifn m TX = idscope ifn (msg_name m) "tx_method_fn" msg_sig_type ifn m RX = idscope ifn (msg_name m) "rx_method_fn" -- Name of a given message definition msg_name :: MessageDef -> String msg_name (Message _ n _ _) = n msg_name (RPC n _ _) = n -- Name of the static inline wrapper for sending messages tx_wrapper_name :: String -> String -> String tx_wrapper_name ifn mn = idscope ifn mn "tx" -- Names of the underlying messages that are constructed from an RPC rpc_call_name n = n ++ "_call" rpc_resp_name n = n ++ "_response" -- Name of the struct holding message args for SAR msg_argstruct_name :: String -> String -> String msg_argstruct_name ifn n = idscope ifn n "args" -- Name of the union type holding all the arguments for a message binding_arg_union_type :: String -> String binding_arg_union_type ifn = ifscope ifn "arg_union" -- Name of the C type for a concrete flounder type, struct, or enum type_c_struct, type_c_enum :: String -> String -> String type_c_struct ifn n = "_" ++ idscope ifn n "struct" type_c_enum ifn e = ifscope ifn e type_c_name :: String -> TypeRef -> String type_c_name ifn (Builtin Cap) = undefined type_c_name ifn (Builtin GiveAwayCap) = undefined type_c_name ifn (Builtin String) = undefined type_c_name ifn (Builtin t) = (show t) ++ "_t" type_c_name ifn (TypeVar t) = type_c_name1 ifn t type_c_name ifn (TypeAlias t _) = type_c_name1 ifn t type_c_name1 :: String -> String -> String type_c_name1 ifn tn = (ifscope ifn tn) ++ "_t" type_c_type :: String -> TypeRef -> C.TypeSpec type_c_type ifn (Builtin Cap) = C.Struct "capref" type_c_type ifn (Builtin GiveAwayCap) = C.Struct "capref" type_c_type ifn (Builtin Char) = C.TypeName "char" type_c_type ifn (Builtin Bool) = C.TypeName "bool" type_c_type ifn (Builtin String) = C.Ptr $ C.TypeName "char" type_c_type ifn t = C.TypeName $ type_c_name ifn t -- TX pointers should be const type_c_type_dir :: Direction -> String -> TypeRef -> C.TypeSpec type_c_type_dir TX ifn tr = case type_c_type ifn tr of C.Ptr t -> C.Ptr $ C.ConstT t t -> t type_c_type_dir RX ifn tr = type_c_type ifn tr -- Array types in the msg args struct should only be pointers to the storage type_c_type_msgstruct :: String -> [TypeDef] -> TypeRef -> C.TypeSpec type_c_type_msgstruct ifn typedefs t = case lookup_typeref typedefs t of TArray tr n _ -> C.Ptr $ type_c_type ifn t _ -> type_c_type ifn t -- Name of the struct type for the method vtable intf_vtbl_type :: String -> Direction -> String intf_vtbl_type ifn TX = ifscope ifn "tx_vtbl" intf_vtbl_type ifn RX = ifscope ifn "rx_vtbl" connect_callback_name n = ifscope n "connect_fn" drv_connect_handler_name drv n = drvscope drv n "connect_handler" drv_connect_fn_name drv n = drvscope drv n "connect" drv_accept_fn_name drv n = drvscope drv n "accept" can_send_fn_name drv n = drvscope drv n "can_send" register_send_fn_name drv n = drvscope drv n "register_send" default_error_handler_fn_name drv n = drvscope drv n "default_error_handler" generic_control_fn_name drv n = drvscope drv n "control" can_send_fn_type ifn = ifscope ifn "can_send_fn" register_send_fn_type ifn = ifscope ifn "register_send_fn" change_waitset_fn_type ifn = ifscope ifn "change_waitset_fn" control_fn_type ifn = ifscope ifn "control_fn" error_handler_fn_type ifn = ifscope ifn "error_handler_fn" ------------------------------------------------------------------------ -- Code shared by backend implementations ------------------------------------------------------------------------ intf_preamble :: String -> String -> Maybe String -> C.Unit intf_preamble infile name descr = let dstr = case descr of Nothing -> "not specified" Just s -> s in C.MultiComment [ "Copyright (c) 2010, ETH Zurich.", "All rights reserved.", "", "INTERFACE NAME: " ++ name, "INTEFACE FILE: " ++ infile, "INTERFACE DESCRIPTION: " ++ dstr, "", "This file is distributed under the terms in the attached LICENSE", "file. If you do not find this file, copies can be found by", "writing to:", "ETH Zurich D-INFK, Universitaetstr.6, CH-8092 Zurich.", "Attn: Systems Group.", "", "THIS FILE IS AUTOMATICALLY GENERATED BY FLOUNDER: DO NOT EDIT!" ] -- -- Convert each RPC definition to a pair of underlying call/response messages -- rpcs_to_msgs :: [MessageDef] -> [MessageDef] rpcs_to_msgs ml = concat $ map rpc_to_msgs ml rpc_to_msgs :: MessageDef -> [MessageDef] rpc_to_msgs (RPC n rpcargs bckargs) = [Message MCall (rpc_call_name n) inargs bckargs, Message MResponse (rpc_resp_name n) outargs bckargs] where (inargs, outargs) = partition_rpc_args rpcargs rpc_to_msgs m = [m] -- partition a list of RPC arguments to lists of input and output arguments partition_rpc_args :: [RPCArgument] -> ([MessageArgument], [MessageArgument]) partition_rpc_args [] = ([], []) partition_rpc_args (first:rest) = case first of RPCArgIn t v -> ((Arg t v):restin, restout) RPCArgOut t v -> (restin, (Arg t v):restout) where (restin, restout) = partition_rpc_args rest msg_argdecl :: Direction -> String -> MessageArgument -> [C.Param] msg_argdecl dir ifn (Arg tr (Name n)) = [ C.Param (type_c_type_dir dir ifn tr) n ] msg_argdecl RX ifn (Arg tr (DynamicArray n l)) = [ C.Param (C.Ptr $ type_c_type_dir RX ifn tr) n, C.Param (type_c_type_dir RX ifn size) l ] msg_argdecl TX ifn (Arg tr (DynamicArray n l)) = [ C.Param (C.Ptr $ C.ConstT $ type_c_type_dir TX ifn tr) n, C.Param (type_c_type_dir TX ifn size) l ] msg_argstructdecl :: String -> [TypeDef] -> MessageArgument -> [C.Param] msg_argstructdecl ifn typedefs (Arg tr (Name n)) = [ C.Param (type_c_type_msgstruct ifn typedefs tr) n ] msg_argstructdecl ifn typedefs a = msg_argdecl RX ifn a rpc_argdecl :: String -> RPCArgument -> [C.Param] rpc_argdecl ifn (RPCArgIn tr v) = msg_argdecl TX ifn (Arg tr v) rpc_argdecl ifn (RPCArgOut tr (Name n)) = [ C.Param (C.Ptr $ type_c_type ifn tr) n ] rpc_argdecl ifn (RPCArgOut tr (DynamicArray n l)) = [ C.Param (C.Ptr $ C.Ptr $ type_c_type ifn tr) n, C.Param (C.Ptr $ type_c_type ifn size) l ] -- XXX: kludge wrapper to pass array types by reference in RPC rpc_argdecl2 :: String -> [TypeDef] -> RPCArgument -> [C.Param] rpc_argdecl2 ifn typedefs arg@(RPCArgOut tr (Name n)) = case lookup_typeref typedefs tr of TArray _ _ _ -> [ C.Param (C.Ptr $ C.Ptr $ type_c_type ifn tr) n ] _ -> rpc_argdecl ifn arg rpc_argdecl2 ifn _ arg = rpc_argdecl ifn arg -- binding parameter for a function binding_param ifname = C.Param (C.Ptr $ C.Struct $ intf_bind_type ifname) intf_bind_var -- -- Generate the code to initialise/destroy a binding structure instance -- binding_struct_init :: String -> String -> C.Expr -> C.Expr -> C.Expr -> [C.Stmt] binding_struct_init drv ifn binding_var waitset_ex tx_vtbl_ex = [ C.Ex $ C.Assignment (C.FieldOf binding_var "st") (C.Variable "NULL"), C.Ex $ C.Assignment (C.FieldOf binding_var "waitset") waitset_ex, C.Ex $ C.Call "event_mutex_init" [C.AddressOf $ C.FieldOf binding_var "mutex", waitset_ex], C.Ex $ C.Assignment (C.FieldOf binding_var "can_send") (C.Variable $ can_send_fn_name drv ifn), C.Ex $ C.Assignment (C.FieldOf binding_var "register_send") (C.Variable $ register_send_fn_name drv ifn), C.Ex $ C.Assignment (C.FieldOf binding_var "error_handler") (C.Variable $ default_error_handler_fn_name drv ifn), C.Ex $ C.Assignment (C.FieldOf binding_var "tx_vtbl") tx_vtbl_ex, C.Ex $ C.Call "memset" [C.AddressOf $ C.FieldOf binding_var "rx_vtbl", C.NumConstant 0, C.Call "sizeof" [C.FieldOf binding_var "rx_vtbl"]], C.Ex $ C.Call "flounder_support_waitset_chanstate_init" [C.AddressOf $ C.FieldOf binding_var "register_chanstate"], C.Ex $ C.Call "flounder_support_waitset_chanstate_init" [C.AddressOf $ C.FieldOf binding_var "tx_cont_chanstate"], C.StmtList [C.Ex $ C.Assignment (C.FieldOf binding_var f) (C.NumConstant 0) | f <- ["tx_msgnum", "rx_msgnum", "tx_msg_fragment", "rx_msg_fragment", "tx_str_pos", "rx_str_pos", "tx_str_len", "rx_str_len"]], C.Ex $ C.Assignment (C.FieldOf binding_var "bind_cont") (C.Variable "NULL")] binding_struct_destroy :: String -> C.Expr -> [C.Stmt] binding_struct_destroy ifn binding_var = [C.Ex $ C.Call "flounder_support_waitset_chanstate_destroy" [C.AddressOf $ C.FieldOf binding_var "register_chanstate"], C.Ex $ C.Call "flounder_support_waitset_chanstate_destroy" [C.AddressOf $ C.FieldOf binding_var "tx_cont_chanstate"]] -- -- Generate a generic can_send function -- can_send_fn_def :: String -> String -> C.Unit can_send_fn_def drv ifn = C.FunctionDef C.Static (C.TypeName "bool") (can_send_fn_name drv ifn) params [ C.Return $ C.Binary C.Equals (bindvar `C.DerefField` "tx_msgnum") (C.NumConstant 0) ] where params = [ C.Param (C.Ptr $ C.Struct $ intf_bind_type ifn) "b" ] bindvar = C.Variable "b" -- -- generate a generic register_send function -- register_send_fn_def :: String -> String -> C.Unit register_send_fn_def drv ifn = C.FunctionDef C.Static (C.TypeName "errval_t") (register_send_fn_name drv ifn) params [ C.Return $ C.Call "flounder_support_register" [C.Variable "ws", C.AddressOf $ bindvar `C.DerefField` "register_chanstate", C.Variable intf_cont_var, C.Call (can_send_fn_name drv ifn) [bindvar]] ] where params = [ C.Param (C.Ptr $ C.Struct $ intf_bind_type ifn) "b", C.Param (C.Ptr $ C.Struct "waitset") "ws", C.Param (C.Struct "event_closure") intf_cont_var ] bindvar = C.Variable "b" -- -- generate a default error handler (which the user should replace!) -- default_error_handler_fn_def :: String -> String -> C.Unit default_error_handler_fn_def drv ifn = C.FunctionDef C.Static C.Void (default_error_handler_fn_name drv ifn) params [ C.Ex $ C.Call "DEBUG_ERR" [errvar, C.StringConstant $ "asynchronous error in Flounder-generated " ++ ifn ++ " " ++ drv ++ " binding (default handler)" ], C.Ex $ C.Call "abort" [] ] where params = [ C.Param (C.Ptr $ C.Struct $ intf_bind_type ifn) "b", C.Param (C.TypeName "errval_t") "err" ] -- -- generate a generic control function that does nothing -- generic_control_fn_def :: String -> String -> C.Unit generic_control_fn_def drv ifn = C.FunctionDef C.Static (C.TypeName "errval_t") (generic_control_fn_name drv ifn) params [ C.SComment "no control flags are supported", C.Return $ C.Variable "SYS_ERR_OK" ] where params = [C.Param (C.Ptr $ C.Struct $ intf_bind_type ifn) intf_bind_var, C.Param (C.TypeName "idc_control_t") "control"] -- register a transmit continuation register_txcont :: C.Expr -> [C.Stmt] register_txcont cont_ex = [ C.If (C.Binary C.NotEquals (cont_ex `C.FieldOf` "handler") (C.Variable "NULL")) [localvar (C.TypeName "errval_t") "_err" Nothing, C.Ex $ C.Assignment errvar $ C.Call "flounder_support_register" [bindvar `C.DerefField` "waitset", C.AddressOf $ bindvar `C.DerefField` "tx_cont_chanstate", cont_ex, C.Variable "false"], C.SComment "may fail if previous continuation hasn't fired yet", C.If (C.Call "err_is_fail" [errvar]) [C.If (C.Binary C.Equals (C.Call "err_no" [errvar]) (C.Variable "LIB_ERR_CHAN_ALREADY_REGISTERED")) [C.Return $ C.Variable "FLOUNDER_ERR_TX_BUSY"] [C.Ex $ C.Call "assert" [C.Unary C.Not $ C.StringConstant "shouldn't happen"], C.Return $ errvar] ] [] ] [] ] where errvar = C.Variable "_err" -- starting a send: just a debug hook start_send :: String -> String -> String -> [MessageArgument] -> [C.Stmt] start_send drvn ifn mn msgargs = [C.Ex $ C.Call "FL_DEBUG" [C.StringConstant $ drvn ++ " TX " ++ ifn ++ "." ++ mn ++ "\n"]] -- finished a send: clear msgnum, trigger pending waitsets/events finished_send :: [C.Stmt] finished_send = [ C.Ex $ C.Assignment tx_msgnum_field (C.NumConstant 0)] ++ [C.Ex $ C.Call "flounder_support_trigger_chan" [wsaddr ws] | ws <- ["tx_cont_chanstate", "register_chanstate"]] where tx_msgnum_field = C.DerefField bindvar "tx_msgnum" wsaddr ws = C.AddressOf $ bindvar `C.DerefField` ws -- start receiving: allocate space for any static arrays in message start_recv :: String -> String -> [TypeDef] -> String -> [MessageArgument] -> [C.Stmt] start_recv drvn ifn typedefs mn msgargs = concat [ [C.Ex $ C.Assignment (field fn) $ C.Call "malloc" [C.SizeOfT $ type_c_type ifn tr], C.Ex $ C.Call "assert" [C.Binary C.NotEquals (field fn) (C.Variable "NULL")] ] | Arg tr (Name fn) <- msgargs, is_array tr] where field fn = rx_union_elem mn fn is_array tr = case lookup_typeref typedefs tr of TArray _ _ _ -> True _ -> False -- finished recv: debug, run handler and clean up finished_recv :: String -> String -> [TypeDef] -> String -> [MessageArgument] -> [C.Stmt] finished_recv drvn ifn typedefs mn msgargs = [C.Ex $ C.Call "FL_DEBUG" [C.StringConstant $ drvn ++ " RX " ++ ifn ++ "." ++ mn ++ "\n"], C.Ex $ C.Call "assert" [C.Binary C.NotEquals handler (C.Variable "NULL")], C.Ex $ C.CallInd handler (bindvar:args), C.Ex $ C.Assignment rx_msgnum_field (C.NumConstant 0)] where rx_msgnum_field = C.DerefField bindvar "rx_msgnum" handler = C.DerefField bindvar "rx_vtbl" `C.FieldOf` mn args = concat [mkargs tr a | Arg tr a <- msgargs] mkargs tr (Name n) = case lookup_typeref typedefs tr of TArray _ _ _ -> [C.DerefPtr $ rx_union_elem mn n] _ -> [rx_union_elem mn n] mkargs _ (DynamicArray n l) = [rx_union_elem mn n, rx_union_elem mn l] tx_arg_assignment :: String -> [TypeDef] -> String -> MessageArgument -> C.Stmt tx_arg_assignment ifn typedefs mn (Arg tr v) = case v of Name an -> C.Ex $ C.Assignment (tx_union_elem mn an) (srcarg an) DynamicArray an len -> C.StmtList [ C.Ex $ C.Assignment (tx_union_elem mn an) (C.Cast (C.Ptr typespec) (C.Variable an)), C.Ex $ C.Assignment (tx_union_elem mn len) (C.Variable len)] where typespec = type_c_type ifn tr srcarg an = case lookup_typeref typedefs tr of -- XXX: I have no idea why GCC requires a cast for the array type TArray _ _ _ -> C.Cast (C.Ptr typespec) (C.Variable an) _ -> case typespec of -- we may need to cast away the const on a pointer C.Ptr _ -> C.Cast typespec (C.Variable an) _ -> C.Variable an tx_union_elem :: String -> String -> C.Expr tx_union_elem mn fn = bindvar `C.DerefField` "tx_union" `C.FieldOf` mn `C.FieldOf` fn rx_union_elem :: String -> String -> C.Expr rx_union_elem mn fn = bindvar `C.DerefField` "rx_union" `C.FieldOf` mn `C.FieldOf` fn -- misc common bits of C localvar = C.VarDecl C.NoScope C.NonConst errvar = C.Variable "err" bindvar = C.Variable intf_bind_var report_user_err ex = C.Ex $ C.CallInd (C.DerefField bindvar "error_handler") [bindvar, ex] report_user_tx_err ex = C.StmtList [ report_user_err ex, C.Ex $ C.Assignment tx_msgnum_field (C.NumConstant 0), C.Ex $ C.Call "flounder_support_trigger_chan" [wsaddr "register_chanstate"], C.Ex $ C.Call "flounder_support_deregister_chan" [wsaddr "tx_cont_chanstate"] ] where tx_msgnum_field = C.DerefField bindvar "tx_msgnum" wsaddr ws = C.AddressOf $ bindvar `C.DerefField` ws
utsav2601/cmpe295A
tools/flounder/BackendCommon.hs
Haskell
mit
18,497
module HN.Curl where import Network.Curl -- | Download a string from a URI. downloadString :: String -> IO (Either (CurlCode,String) String) downloadString uri = do withCurlDo $ do (code,resp) <- curlGetString_ uri opts case code of CurlOK -> return (Right resp) _ -> return (Left (code,resp)) -- Some silly servers think they're super smart by disallowing the -- "Curl" user-agent. Aw. ^_^ where opts = [CurlUserAgent "Chrome"]
lwm/haskellnews
src/HN/Curl.hs
Haskell
bsd-3-clause
462
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="id-ID"> <title>Encode/Decode/Hash Add-on</title> <maps> <homeID>encoder</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/encoder/src/main/javahelp/org/zaproxy/addon/encoder/resources/help_id_ID/helpset_id_ID.hs
Haskell
apache-2.0
974
{-# LANGUAGE TypeOperators #-} -- #2993 module T2993 where foo b a = a <**> b . b
sdiehl/ghc
testsuite/tests/rename/should_fail/T2993.hs
Haskell
bsd-3-clause
86
module HAD.Y2014.M03.D18.Exercise where -- $setup -- >>> import Data.Maybe -- >>> let backPartner = (>>= partner) . (>>= partner) data Person a = Single a | Married a (Person a) partner :: Person a -> Maybe (Person a) partner (Married _ p) = Just p partner _ = Nothing get :: Person a -> a get (Single x) = x get (Married x _) = x -- | wedding -- Marry single people, linking them together -- Nothing if one is married -- -- If you're used to Haskell, this one should be VERY easy. -- But remember how strange it was the first time... -- And see you tomorrow! -- -- Examples: -- -- >>> isNothing $ wedding (Married "foo" (Single "foobar")) (Single "bar") -- True -- -- prop> \(x,y) -> (fmap get . backPartner . fmap fst $ wedding (Single x) (Single y)) == Just (x :: String) -- prop> \(x,y) -> (fmap get . backPartner . fmap snd $ wedding (Single x) (Single y)) == Just (y :: String) wedding :: Person a -> Person a -> Maybe (Person a, Person a) wedding = undefined
1HaskellADay/1HAD
exercises/HAD/Y2014/M03/D18/Exercise.hs
Haskell
mit
975
{-# LANGUAGE CPP, OverloadedStrings #-} module LoadCache where import Haxl.Core import ExampleDataSource #include "LoadCache.txt"
GaussDing/Haxl
tests/LoadCache.hs
Haskell
bsd-3-clause
132
module Distribution.Client.Dependency.Modular.Validate (validateTree) where -- Validation of the tree. -- -- The task here is to make sure all constraints hold. After validation, any -- assignment returned by exploration of the tree should be a complete valid -- assignment, i.e., actually constitute a solution. import Control.Applicative import Control.Monad.Reader hiding (sequence) import Data.List as L import Data.Map as M import Data.Traversable import Prelude hiding (sequence) import Distribution.Client.Dependency.Modular.Assignment import Distribution.Client.Dependency.Modular.Dependency import Distribution.Client.Dependency.Modular.Flag import Distribution.Client.Dependency.Modular.Index import Distribution.Client.Dependency.Modular.Package import Distribution.Client.Dependency.Modular.PSQ as P import Distribution.Client.Dependency.Modular.Tree import Distribution.Client.ComponentDeps (Component) -- In practice, most constraints are implication constraints (IF we have made -- a number of choices, THEN we also have to ensure that). We call constraints -- that for which the preconditions are fulfilled ACTIVE. We maintain a set -- of currently active constraints that we pass down the node. -- -- We aim at detecting inconsistent states as early as possible. -- -- Whenever we make a choice, there are two things that need to happen: -- -- (1) We must check that the choice is consistent with the currently -- active constraints. -- -- (2) The choice increases the set of active constraints. For the new -- active constraints, we must check that they are consistent with -- the current state. -- -- We can actually merge (1) and (2) by saying the the current choice is -- a new active constraint, fixing the choice. -- -- If a test fails, we have detected an inconsistent state. We can -- disable the current subtree and do not have to traverse it any further. -- -- We need a good way to represent the current state, i.e., the current -- set of active constraints. Since the main situation where we have to -- search in it is (1), it seems best to store the state by package: for -- every package, we store which versions are still allowed. If for any -- package, we have inconsistent active constraints, we can also stop. -- This is a particular way to read task (2): -- -- (2, weak) We only check if the new constraints are consistent with -- the choices we've already made, and add them to the active set. -- -- (2, strong) We check if the new constraints are consistent with the -- choices we've already made, and the constraints we already have. -- -- It currently seems as if we're implementing the weak variant. However, -- when used together with 'preferEasyGoalChoices', we will find an -- inconsistent state in the very next step. -- -- What do we do about flags? -- -- Like for packages, we store the flag choices we have already made. -- Now, regarding (1), we only have to test whether we've decided the -- current flag before. Regarding (2), the interesting bit is in discovering -- the new active constraints. To this end, we look up the constraints for -- the package the flag belongs to, and traverse its flagged dependencies. -- Wherever we find the flag in question, we start recording dependencies -- underneath as new active dependencies. If we encounter other flags, we -- check if we've chosen them already and either proceed or stop. -- | The state needed during validation. data ValidateState = VS { index :: Index, saved :: Map QPN (FlaggedDeps Component QPN), -- saved, scoped, dependencies pa :: PreAssignment, qualifyOptions :: QualifyOptions } type Validate = Reader ValidateState validate :: Tree QGoalReasonChain -> Validate (Tree QGoalReasonChain) validate = cata go where go :: TreeF QGoalReasonChain (Validate (Tree QGoalReasonChain)) -> Validate (Tree QGoalReasonChain) go (PChoiceF qpn gr ts) = PChoice qpn gr <$> sequence (P.mapWithKey (goP qpn gr) ts) go (FChoiceF qfn gr b m ts) = do -- Flag choices may occur repeatedly (because they can introduce new constraints -- in various places). However, subsequent choices must be consistent. We thereby -- collapse repeated flag choice nodes. PA _ pfa _ <- asks pa -- obtain current flag-preassignment case M.lookup qfn pfa of Just rb -> -- flag has already been assigned; collapse choice to the correct branch case P.lookup rb ts of Just t -> goF qfn gr rb t Nothing -> return $ Fail (toConflictSet (Goal (F qfn) gr)) (MalformedFlagChoice qfn) Nothing -> -- flag choice is new, follow both branches FChoice qfn gr b m <$> sequence (P.mapWithKey (goF qfn gr) ts) go (SChoiceF qsn gr b ts) = do -- Optional stanza choices are very similar to flag choices. PA _ _ psa <- asks pa -- obtain current stanza-preassignment case M.lookup qsn psa of Just rb -> -- stanza choice has already been made; collapse choice to the correct branch case P.lookup rb ts of Just t -> goS qsn gr rb t Nothing -> return $ Fail (toConflictSet (Goal (S qsn) gr)) (MalformedStanzaChoice qsn) Nothing -> -- stanza choice is new, follow both branches SChoice qsn gr b <$> sequence (P.mapWithKey (goS qsn gr) ts) -- We don't need to do anything for goal choices or failure nodes. go (GoalChoiceF ts) = GoalChoice <$> sequence ts go (DoneF rdm ) = pure (Done rdm) go (FailF c fr ) = pure (Fail c fr) -- What to do for package nodes ... goP :: QPN -> QGoalReasonChain -> POption -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain) goP qpn@(Q _pp pn) gr (POption i _) r = do PA ppa pfa psa <- asks pa -- obtain current preassignment idx <- asks index -- obtain the index svd <- asks saved -- obtain saved dependencies qo <- asks qualifyOptions -- obtain dependencies and index-dictated exclusions introduced by the choice let (PInfo deps _ mfr) = idx ! pn ! i -- qualify the deps in the current scope let qdeps = qualifyDeps qo qpn deps -- the new active constraints are given by the instance we have chosen, -- plus the dependency information we have for that instance let goal = Goal (P qpn) gr let newactives = Dep qpn (Fixed i goal) : L.map (resetGoal goal) (extractDeps pfa psa qdeps) -- We now try to extend the partial assignment with the new active constraints. let mnppa = extend (P qpn) ppa newactives -- In case we continue, we save the scoped dependencies let nsvd = M.insert qpn qdeps svd case mfr of Just fr -> -- The index marks this as an invalid choice. We can stop. return (Fail (toConflictSet goal) fr) _ -> case mnppa of Left (c, d) -> -- We have an inconsistency. We can stop. return (Fail c (Conflicting d)) Right nppa -> -- We have an updated partial assignment for the recursive validation. local (\ s -> s { pa = PA nppa pfa psa, saved = nsvd }) r -- What to do for flag nodes ... goF :: QFN -> QGoalReasonChain -> Bool -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain) goF qfn@(FN (PI qpn _i) _f) gr b r = do PA ppa pfa psa <- asks pa -- obtain current preassignment svd <- asks saved -- obtain saved dependencies -- Note that there should be saved dependencies for the package in question, -- because while building, we do not choose flags before we see the packages -- that define them. let qdeps = svd ! qpn -- We take the *saved* dependencies, because these have been qualified in the -- correct scope. -- -- Extend the flag assignment let npfa = M.insert qfn b pfa -- We now try to get the new active dependencies we might learn about because -- we have chosen a new flag. let newactives = extractNewDeps (F qfn) gr b npfa psa qdeps -- As in the package case, we try to extend the partial assignment. case extend (F qfn) ppa newactives of Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found Right nppa -> local (\ s -> s { pa = PA nppa npfa psa }) r -- What to do for stanza nodes (similar to flag nodes) ... goS :: QSN -> QGoalReasonChain -> Bool -> Validate (Tree QGoalReasonChain) -> Validate (Tree QGoalReasonChain) goS qsn@(SN (PI qpn _i) _f) gr b r = do PA ppa pfa psa <- asks pa -- obtain current preassignment svd <- asks saved -- obtain saved dependencies -- Note that there should be saved dependencies for the package in question, -- because while building, we do not choose flags before we see the packages -- that define them. let qdeps = svd ! qpn -- We take the *saved* dependencies, because these have been qualified in the -- correct scope. -- -- Extend the flag assignment let npsa = M.insert qsn b psa -- We now try to get the new active dependencies we might learn about because -- we have chosen a new flag. let newactives = extractNewDeps (S qsn) gr b pfa npsa qdeps -- As in the package case, we try to extend the partial assignment. case extend (S qsn) ppa newactives of Left (c, d) -> return (Fail c (Conflicting d)) -- inconsistency found Right nppa -> local (\ s -> s { pa = PA nppa pfa npsa }) r -- | We try to extract as many concrete dependencies from the given flagged -- dependencies as possible. We make use of all the flag knowledge we have -- already acquired. extractDeps :: FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN] extractDeps fa sa deps = do d <- deps case d of Simple sd _ -> return sd Flagged qfn _ td fd -> case M.lookup qfn fa of Nothing -> mzero Just True -> extractDeps fa sa td Just False -> extractDeps fa sa fd Stanza qsn td -> case M.lookup qsn sa of Nothing -> mzero Just True -> extractDeps fa sa td Just False -> [] -- | We try to find new dependencies that become available due to the given -- flag or stanza choice. We therefore look for the choice in question, and then call -- 'extractDeps' for everything underneath. extractNewDeps :: Var QPN -> QGoalReasonChain -> Bool -> FAssignment -> SAssignment -> FlaggedDeps comp QPN -> [Dep QPN] extractNewDeps v gr b fa sa = go where go :: FlaggedDeps comp QPN -> [Dep QPN] -- Type annotation necessary (polymorphic recursion) go deps = do d <- deps case d of Simple _ _ -> mzero Flagged qfn' _ td fd | v == F qfn' -> L.map (resetGoal (Goal v gr)) $ if b then extractDeps fa sa td else extractDeps fa sa fd | otherwise -> case M.lookup qfn' fa of Nothing -> mzero Just True -> go td Just False -> go fd Stanza qsn' td | v == S qsn' -> L.map (resetGoal (Goal v gr)) $ if b then extractDeps fa sa td else [] | otherwise -> case M.lookup qsn' sa of Nothing -> mzero Just True -> go td Just False -> [] -- | Interface. validateTree :: Index -> Tree QGoalReasonChain -> Tree QGoalReasonChain validateTree idx t = runReader (validate t) VS { index = idx , saved = M.empty , pa = PA M.empty M.empty M.empty , qualifyOptions = defaultQualifyOptions idx }
enolan/cabal
cabal-install/Distribution/Client/Dependency/Modular/Validate.hs
Haskell
bsd-3-clause
12,175
{-# LANGUAGE TypeFamilies, GeneralizedNewtypeDeriving #-} module ShouldCompile where import Control.Applicative (Applicative) data family S a newtype instance S Int = S Int deriving Eq data family S2 a b newtype instance S2 Int b = S2 (IO b) deriving (Functor, Applicative, Monad)
snoyberg/ghc
testsuite/tests/indexed-types/should_compile/DerivingNewType.hs
Haskell
bsd-3-clause
337
{-# OPTIONS_GHC -XLiberalTypeSynonyms #-} module ShouldCompile where type T a b = a type S m = m () f :: S (T Int) f = undefined
urbanslug/ghc
testsuite/tests/typecheck/should_compile/tc234.hs
Haskell
bsd-3-clause
136
{-# LANGUAGE NoMonomorphismRestriction #-} {-# LANGUAGE ScopedTypeVariables #-} module Floating ( floating ) where import Fractional (fractional) import System.Random (Random) import Test.QuickCheck.Checkers (EqProp, (=-=), inverseL) import Test.QuickCheck.Extra (UnitInterval(..), Tiny(..), BiunitInterval) import Test.Tasty (testGroup, TestTree) import Test.Tasty.QuickCheck (testProperty, NonNegative(..), Positive(..), Arbitrary, (==>)) import Test.Tasty.HUnit (testCase, (@?=)) floating :: forall a. (Arbitrary a, EqProp a, Show a, Floating a, Ord a, Random a) => a -> TestTree floating _ = testGroup "Test Floating instance" ts where e = exp 1 ts = [ fractional (undefined :: a) , testCase "π/4 = atan 1" ((pi::a) @?= 4 * atan 1) , testProperty "log == logBase e" (log =-= logBase (e :: Positive a)) , testProperty "exp == (e **)" (exp =-= ((e::a) **)) , testProperty "sqrt x * sqrt x = x" (\(NonNegative (x :: a)) -> let r = sqrt x in r * r == x) , testProperty "law of exponents" (\(Positive (base :: a)) x y -> base ** (x + y) =-= base ** x * base ** y) , testProperty "logarithm definition" (\(Positive (b :: a)) (Tiny c) -> let x = b ** c in b /= 1 ==> c =-= logBase b x) , testProperty "sine cosine definition" (\x (y :: a) -> cos (x - y) =-= cos x * cos y + sin x * sin y) -- TODO: Use open interval , testProperty "0 < x cos x" (\(x::UnitInterval a) -> 0 <= x * cos x) -- Use <= here because of precision issues :( , testProperty "x cos x < sin x" (\(x::UnitInterval a) -> x * cos x <= sin x) , testProperty "sin x < x" (\(x::UnitInterval a) -> sin x <= x) , testProperty "tangent definition" (\(x::a) -> cos x /= 0 ==> tan x =-= sin x / cos x) , testProperty "asin left inverse" (inverseL sin (asin :: BiunitInterval a -> BiunitInterval a)) , testProperty "acos left inverse" (inverseL cos (acos :: BiunitInterval a -> BiunitInterval a)) , testProperty "atan left inverse" (inverseL tan (atan :: a -> a)) , testProperty "sinh definition" (\(x::a) -> sinh x =-= (exp x - exp (-x)) / 2) , testProperty "cosh definition" (\(x::a) -> cosh x =-= (exp x + exp (-x)) / 2) , testProperty "tanh definition" (\(x::a) -> tanh x =-= sinh x / cosh x) , testProperty "sinh left inverse" (inverseL asinh (sinh :: a -> a)) , testProperty "cosh left inverse" (acosh . cosh =-= (abs :: a -> a)) , testProperty "tanh left inverse" (inverseL atanh (tanh :: Tiny a -> Tiny a)) ]
expipiplus1/exact-real
test/Floating.hs
Haskell
mit
3,332
import Data.List (foldl1') import Math.NumberTheory.Primes.Testing (isPrime) --import Control.Parallel.Strategies (using, parList, rseq) -- isPrime :: Integer -> Bool -- isPrime n -- | n < 2 = False -- | n == 2 = True -- | otherwise = -- let sqn = (floor (sqrt (fromIntegral n))) + 1 -- in and [ n `mod` d /= 0 | d <- [2..sqn] ] formula :: Integer -> Integer -> Integer -> Integer formula a b n = n*n + a*n + b test :: Integer -> Integer -> Integer -> Bool test a b n = isPrime $ formula a b n primeSequence :: Integer -> Integer -> [Integer] primeSequence a b = takeWhile (test a b) [0..] sequenceLength :: Integer -> Integer -> Int sequenceLength a b = length $ primeSequence a b maximum' = foldl1' max sequenceLenghts = [ (sequenceLength a b, a, b) | a <- [(-999)..999] , b <- [(-999)..999] ] -- `using` parList rseq euler27 = maximum' sequenceLenghts main = print $ a * b where (len, a, b) = euler27 --(71,-61,971) --(30.64 secs, 5773882468 bytes) -- using my isPrime implementation --(15.15 secs, 2572417912 bytes) -- using Math.NumberTheory.Primes.Testing.isPrime
feliposz/project-euler-solutions
haskell/euler27.hs
Haskell
mit
1,153
-- ($) :: (a -> b) -> a -> b -- f $ x = f x -- f a b c === (((f a) b) c) test1 = sum (map sqrt [1..130]) test2 = sum $ map sqrt [1..130] test3 = sqrt (3 + 4 + 9) test4 = sqrt $ 3 + 4 + 9 test5 = sum (filter (> 10) (map (*2) [2..10])) test6 = sum $ filter (> 10) (map (*2) [2..10]) test7 = sum $ filter (> 10) $ map (*2) [2..10] test8 = map ($ 3) [(/4), (10*), (^2), sqrt]
v0lkan/learning-haskell
session-archive/007-dollar.hs
Haskell
mit
377
main = putStrLn "สวัสดีครับ"
merxer/kata
haskell/14.hs
Haskell
mit
49
{-# LANGUAGE TupleSections #-} module SoOSiM.Components.ResourceManager.Behaviour where import Control.Arrow (first,second) import Data.Char (toLower) import qualified Data.HashMap.Strict as HashMap import Data.List (mapAccumL,intersect,(\\),partition) import SoOSiM import SoOSiM.Components.ResourceDescriptor import SoOSiM.Components.ResourceManager.Interface import SoOSiM.Components.ResourceManager.Types behaviour :: RM_State -> Input RM_Cmd -> Sim RM_State behaviour s (Message _ (AddResource rId rd) retAddr) = do let rs = HashMap.insert rId rd (resources s) rsI = HashMap.insertWith (flip (++)) rd [rId] (resources_inv s) s' = s { resources = rs, resources_inv = rsI, free_resources = (free_resources s) ++ [rId] } yield s' behaviour s (Message _ (RequestResources appId rsList) retAddr) = do let (free',ids) = assignFree s rsList busy = map (,appId) ids s' = s { free_resources = free', busy_resources = (busy_resources s) ++ busy } traceMsg ("REQ: " ++ show (rsList,ids)) respond ResourceManager retAddr (RM_Resources ids) yield s' behaviour s (Message _ (FreeAllResources appId) retAddr) = do let (freed,busy') = first (map fst) $ partition ((== appId) . snd) (busy_resources s) s' = s { free_resources = (free_resources s) ++ freed, busy_resources = busy' } yield s' behaviour s (Message _ (FreeResources appId rIds) retAddr) = do let (freed,busy') = first (map fst) $ partition (\(rId,aId) -> aId == appId && rId `elem` rIds) (busy_resources s) s' = s { free_resources = (free_resources s) ++ freed, busy_resources = busy' } yield s' behaviour s (Message _ (GetResourceDescription rId) retAddr) = do let rdM = HashMap.lookup rId (resources s) respond ResourceManager retAddr (RM_Descriptor rdM) yield s behaviour s _ = yield s checkFree :: String -> ResourceFreeList -> ([ResourceId],Int) -> (ResourceFreeList,[ResourceId]) checkFree dm free (keys,needed) = let keys' = intersect keys free keys'' = take needed $ case dm of "all" -> keys' "half" -> take (ceiling $ (fromIntegral $ length keys') / 2) keys' _ -> keys' free' = free \\ keys'' in (free',keys'') assignFree :: RM_State -> ResourceRequestList -> (ResourceFreeList,[ResourceId]) assignFree s rsList = (free',givenIds) where available = map (\(rTy,_) -> concat $ HashMap.elems $ HashMap.filterWithKey (\k _ -> isComplient k rTy) (resources_inv s) ) rsList wanted = zip available (map snd rsList) dm = map toLower (dist_method s) (free',givenIds) = second concat $ mapAccumL (checkFree dm) (free_resources s) wanted
christiaanb/SoOSiM-components
src/SoOSiM/Components/ResourceManager/Behaviour.hs
Haskell
mit
2,912
module Unison.Typechecker.Components (components, minimize, minimize') where import Data.Bifunctor (first) import qualified Data.Graph as Graph import qualified Data.Map as Map import Data.Maybe import Data.Set (Set) import qualified Data.Set as Set import qualified Unison.ABT as ABT import Unison.Term (AnnotatedTerm') import qualified Unison.Term as Term import Unison.Var (Var) components :: Var v => [(v, ABT.Term f v a)] -> [[(v, ABT.Term f v a)]] components = components' ABT.freeVars -- | Order bindings by dependencies and group into components. -- Each component consists of > 1 bindings, each of which depends -- transitively on all other bindings in the component. -- -- 1-element components may or may not depend on themselves. -- -- The order is such that a component at index i will not depend -- on components and indexes > i. But a component at index i does not -- _necessarily_ depend on any components at earlier indices. -- -- Example: -- -- let rec -- ping n = pong (n + 1); -- pong n = ping (n + 1); -- g = id 42; -- y = id "hi" -- id x = x; -- in ping g -- -- `components` would produce `[[ping,pong], [id], [g], [y]]` -- Notice that `id` comes before `g` and `y` in the output, since -- both `g` and `y` depend on `id`. -- -- Uses Tarjan's algorithm: -- https://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm components' :: Var v => (t -> Set v) -> [(v, t)] -> [[(v, t)]] components' freeVars bs = let varIds = Map.fromList (map fst bs `zip` [(0::Int)..]) varId v = fromJust $ Map.lookup v varIds -- something horribly wrong if this bombs -- use ints as keys for graph to preserve original source order as much as possible graph = [ ((v,b), varId v, deps b) | (v,b) <- bs ] vars = Set.fromList (map fst bs) deps b = varId <$> Set.toList (Set.intersection vars (freeVars b)) in Graph.flattenSCC <$> Graph.stronglyConnComp graph -- | Algorithm for minimizing cycles of a `let rec`. This can -- improve generalization during typechecking and may also be more -- efficient for execution. -- -- For instance: -- -- minimize (let rec id x = x; g = id 42; y = id "hi" in g) -- ==> -- Just (let id x = x; g = id 42; y = id "hi" in g) -- -- Gets rid of the let rec and replaces it with an ordinary `let`, such -- that `id` is suitably generalized. minimize :: Var v => AnnotatedTerm' vt v a -> Maybe (AnnotatedTerm' vt v a) minimize (Term.LetRecNamedAnnotated' ann bs e) = case components (first snd <$> bs) of [_single] -> Nothing cs -> let varAnnotations = Map.fromList ((\((a,v),_) -> (v,a)) <$> bs) annotationFor v = fromJust $ Map.lookup v varAnnotations annotatedVar v = (annotationFor v, v) -- When introducing a nested let/let rec, we use the annotation of the -- variable that starts off that let/let rec mklet [(hdv,hdb)] e | Set.member hdv (ABT.freeVars hdb) = Term.letRec (annotationFor hdv) [(annotatedVar hdv, hdb)] e | otherwise = Term.let1 [(annotatedVar hdv,hdb)] e mklet cycle@((hdv,_):_) e = Term.letRec (annotationFor hdv) (first annotatedVar <$> cycle) e mklet [] e = e in -- The outer annotation is going to be meaningful, so we make -- sure to preserve it, whereas the annotations at intermediate Abs nodes -- aren't necessarily meaningful Just $ ABT.annotate ann (foldr mklet e cs) where minimize _ = Nothing minimize' :: Var v => AnnotatedTerm' vt v a -> AnnotatedTerm' vt v a minimize' term = fromMaybe term (minimize term)
paulp/unison
parser-typechecker/src/Unison/Typechecker/Components.hs
Haskell
mit
3,618
{-# LANGUAGE OverloadedStrings, QuasiQuotes #-} module Y2017.M11.D27.Exercise where {-- Okay, we have a set of recommended articles, now we want to add some new articles to the list. So, given an article id as a basis and a set of article ids to add, add those articles to the source article's recommended list. Yes: it isn't rocket science. --} import Database.PostgreSQL.Simple import Database.PostgreSQL.Simple.SqlQQ import Database.PostgreSQL.Simple.ToRow import Store.SQL.Connection import Store.SQL.Util.Pivots {-- Actually, when you get right down to it: since added articles are not scored, adding rows to the recommendation table becomes a pivot table exercise. But even then, since we're only working with one source article to add articles to, this is just an insert-in-context. --} insertRecsStmt :: Query insertRecsStmt = [sql|INSERT INTO recommendation (for_article_id,recommended_article_id) VALUES (?,?)|] insertRec :: Connection -> Integer -> [Integer] -> IO () insertRec conn srcId recs = undefined {-- BONUS ----------------------------------------------------------------- Write a program that takes a source article ID and a list of recommended article ids an inserts that set into recommendation. --} main' :: [String] -> IO () main' artIds = undefined
geophf/1HaskellADay
exercises/HAD/Y2017/M11/D27/Exercise.hs
Haskell
mit
1,299
{-# OPTIONS_GHC -fno-warn-orphans #-} {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE StandaloneDeriving #-} {-# LANGUAGE TemplateHaskell #-} module TimeUnitsJSONInstance where import Data.Aeson.TH (Options (omitNothingFields), defaultOptions, deriveJSON) import Data.Time import Data.Time.Clock.POSIX import Data.Time.Units import GHC.Generics import Protolude hiding (FilePath) import Text.PrettyPrint.GenericPretty TODO NOT used anymore, delete it $(deriveJSON defaultOptions {omitNothingFields = True} ''Microsecond) -- deriving instance Generic Microsecond -- deriving instance Pretty Microsecond -- instance Pretty Microsecond where -- pretty ( Microsecond m) = -- to use for the ssLastMarketSubscriptionMessageSentAt timeInMicroseconds :: IO Microsecond timeInMicroseconds = fromMicroseconds . fromIntegral . numerator . toRational . (* 1000000) <$> getPOSIXTime
joe9/streaming-betfair-api
src/TimeUnitsJSONInstance.hs
Haskell
mit
1,044
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE StrictData #-} {-# LANGUAGE TupleSections #-} -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-deltatime.html module Stratosphere.ResourceProperties.IoTAnalyticsDatasetDeltaTime where import Stratosphere.ResourceImports -- | Full data type definition for IoTAnalyticsDatasetDeltaTime. See -- 'ioTAnalyticsDatasetDeltaTime' for a more convenient constructor. data IoTAnalyticsDatasetDeltaTime = IoTAnalyticsDatasetDeltaTime { _ioTAnalyticsDatasetDeltaTimeOffsetSeconds :: Val Integer , _ioTAnalyticsDatasetDeltaTimeTimeExpression :: Val Text } deriving (Show, Eq) instance ToJSON IoTAnalyticsDatasetDeltaTime where toJSON IoTAnalyticsDatasetDeltaTime{..} = object $ catMaybes [ (Just . ("OffsetSeconds",) . toJSON) _ioTAnalyticsDatasetDeltaTimeOffsetSeconds , (Just . ("TimeExpression",) . toJSON) _ioTAnalyticsDatasetDeltaTimeTimeExpression ] -- | Constructor for 'IoTAnalyticsDatasetDeltaTime' containing required fields -- as arguments. ioTAnalyticsDatasetDeltaTime :: Val Integer -- ^ 'itaddtOffsetSeconds' -> Val Text -- ^ 'itaddtTimeExpression' -> IoTAnalyticsDatasetDeltaTime ioTAnalyticsDatasetDeltaTime offsetSecondsarg timeExpressionarg = IoTAnalyticsDatasetDeltaTime { _ioTAnalyticsDatasetDeltaTimeOffsetSeconds = offsetSecondsarg , _ioTAnalyticsDatasetDeltaTimeTimeExpression = timeExpressionarg } -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-deltatime.html#cfn-iotanalytics-dataset-deltatime-offsetseconds itaddtOffsetSeconds :: Lens' IoTAnalyticsDatasetDeltaTime (Val Integer) itaddtOffsetSeconds = lens _ioTAnalyticsDatasetDeltaTimeOffsetSeconds (\s a -> s { _ioTAnalyticsDatasetDeltaTimeOffsetSeconds = a }) -- | http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-iotanalytics-dataset-deltatime.html#cfn-iotanalytics-dataset-deltatime-timeexpression itaddtTimeExpression :: Lens' IoTAnalyticsDatasetDeltaTime (Val Text) itaddtTimeExpression = lens _ioTAnalyticsDatasetDeltaTimeTimeExpression (\s a -> s { _ioTAnalyticsDatasetDeltaTimeTimeExpression = a })
frontrowed/stratosphere
library-gen/Stratosphere/ResourceProperties/IoTAnalyticsDatasetDeltaTime.hs
Haskell
mit
2,243
module Main where import Filter.Abstract (abstract) import Filter.Attr (simplifyAttr) import Filter.Bib (bibliography) import Filter.Float (float) import Filter.Hyperref (hyperref) import Filter.LinksAsNotes (linksAsNotes) import Filter.Macros (processMacros) import Filter.MultiBib (multibib) import Filter.Multicol (multicol) import Filter.NumberRef (numberRef) import Filter.WrapFloat (wrapFloat) import Paths_ppp (version) import PostProcess (trim) import PreProcess (include) import Reader (toPandoc) import Writer (toTex, toPdf) import Control.Monad (forM_) import qualified Data.ByteString.Lazy.Char8 as BS import Data.Monoid ((<>)) import qualified Data.Text as T import qualified Data.Text.IO as T import Data.Version (showVersion) import Options.Applicative import System.FilePath ((-<.>), takeExtension) ppp :: FilePath -> IO T.Text ppp fp = fmap trim . toTex =<< processMacros False . hyperref . float . wrapFloat . multibib =<< bibliography . linksAsNotes . numberRef . multicol . abstract . simplifyAttr =<< processMacros True =<< toPandoc =<< include fp data Options = Options { targetTex :: Bool , sourceFiles :: [FilePath] } parser :: Parser Options parser = infoOption ( "ppp version " ++ showVersion version ) ( long "version" <> help "show version information and exit" ) <*> abortOption ShowHelpText ( long "help" <> help "show usage information and exit" ) <*> ( Options <$> switch ( long "tex" <> help "convert to latex instead of pdf" ) <*> some ( strArgument ( metavar "<file>..." ) ) ) main :: IO () main = do opts <- execParser $ info parser idm forM_ (sourceFiles opts) $ \file -> case (targetTex opts, takeExtension file) of (False, ".md" ) -> do putStrLn $ "rendering " ++ (file -<.> "pdf") ++ "..." BS.writeFile (file -<.> "pdf") =<< toPdf =<< ppp file (False, ".tex") -> do putStrLn $ "rendering " ++ (file -<.> "pdf") ++ "..." BS.writeFile (file -<.> "pdf") =<< toPdf =<< T.readFile file (True, ".md" ) -> do putStrLn $ "rendering " ++ (file -<.> "tex") ++ "..." T.writeFile (file -<.> "tex") =<< ppp file (True, ".tex") -> error $ file ++ ": is already in tex format" (_, ext ) -> error $ file ++ ": unrecognised file extension `" ++ ext ++ "'"
Thhethssmuz/ppp
src/Main.hs
Haskell
mit
2,510
-- Paradigme de programare, laboratorul 2: functii recursive -- 1. Factorialul unui număr dat, fără restricții și tail recursive. -- -- TDA-ul număr se aseamănă cu TDA-ul listă: -- - 0 (caz de bază) <- 0! = 1 -- - n (pas de inducție) <- n! = n * (n - 1)! factorial 0 = 1 factorial n = n * factorial (n - 1) -- Cum se evaluează un factorial n oarecare? -- -- > factorial n -- > n * factorial (n - 1) -- > n * (n - 1) * factorial (n - 2) -- > ... -- -- Avem aceeași problemă cu spațiul, iar optimizarea tail-recursive -- poate fi făcută adăugând un parametru de tip "acumulator", care va -- avea valoarea inițială 1 (valoarea cazului de bază). Observăm că -- astfel calculul se face pe avans. factorialTail n = let factorialAux acc 0 = acc factorialAux acc n = factorialAux (acc * n) (n - 1) in factorialAux 1 n -- 2. Al n-lea număr din șirul fibonacci. Problema e similară cu cea -- anterioară, doar că avem de-a face cu o recurență de ordinul 2: -- -- - Numărul Fibonacci de la indicele 0 este 0 -- - Numărul Fibonacci de la indicele 1 este 1 -- - Numărul Fibonacci de la indicele n este suma numerelor de la -- indicele n - 1 și n - 2. fibonacci 0 = 0 fibonacci 1 = 1 fibonacci n = fibonacci (n - 1) + fibonacci (n - 2) -- Evaluarea fibonacci 4: -- > fibonacci 4 -- > fibonacci 3 + fibonacci 2 -- > (fibonacci 2 + fibonacci 1) + (fibonacci 1 + fibonacci 0) -- > ((fibonacci 1 + fibonacci 0) + 1) + (1 + 0) -- > ((1 + 0) + 1) + 1 -- > (1 + 1) + 1 -- > 2 + 1 -- > 3 -- -- Implementarea tail-recursive nu este așa de evidentă. La fiecare pas -- trebuie să reținem n - 1 și n - 2, iar la următorul pas n - 1 devine -- n - 2 iar n devine n - 1. -- -- Scriem primele elemente din șirul lui Fibonacci: -- -- 0 1 1 2 3 5 8 13 -- -- și vrem să le calculăm pe avans. Pentru asta trebuie să reținem două -- acumulatoare, câte unul pentru fiecare rezultat parțial al șirului -- Fibonacci. "Mutăm" acumulatorul asociat lui (n - 1) în cel asociat -- lui (n - 2) după calculul noului rezultat parțial: -- -- acc1_0 = 1; acc1_1 = (1 + 0) = 1; acc1_2 = 2; acc1_3 = 3; ... -- acc2_0 = 0; acc2_1 = acc1_0 = 1; acc2_2 = 1; acc2_3 = 2; ... fibonacciTail n = let fibonacciAux acc1 acc2 0 = acc2 fibonacciAux acc1 acc2 n = fibonacciAux (acc1 + acc2) acc1 (n - 1) in fibonacciAux 1 0 n -- Notă: cazul de bază fibonacciAux acc1 acc2 1 nu e necesar, va fi în -- mod natural calculat când se trece de la 1 la 0 (acc2 va primi vechea -- valoare a lui acc1 și va fi întors de funcție). -- 3. Avem de implementat două funcții: concatenarea a două liste și -- inversul unei liste. Cele două funcții sunt folosite pentru a ilustra -- faptul că recursivitatea poate fi uneori făcută „în mod natural” pe -- coadă. -- -- 3.a. Concatenarea a două liste. Să luăm un exemplu: -- cat [1,2,3,4] [5,6,7] = [1,2,3,4,5,6,7] -- -- O putem privi ca pe adăugarea „în coada lui [1,2,3,4]” pe -- [5,6,7]. Dat fiind că nu putem accesa coada lui [1,2,3,4] imediat, -- trebuie să o parcurgem până ajungem la lista vidă. -- -- În limbaj natural: -- - concatenarea listei vide l1 cu o listă l2 este lista l2 -- - concatenarea unei liste formată din elementul h și lista l1 este -- lista formată din elemetnul h și concatenarea lui l1 la l2. cat [] l2 = l2 cat (h : l1) l2 = h : cat l1 l2 -- Evaluare: -- > cat [1,2,3,4] [5,6,7] -- > 1 : cat [2,3,4] [5,6,7] -- > 1 : 2 : cat [3,4] [5,6,7] -- > 1 : 2 : 3 : cat [4] [5,6,7] -- > 1 : 2 : 3 : 4 : cat [] [5,6,7] -- > 1 : 2 : 3 : 4 : [5,6,7] -- -- Deși cat nu e tail-recursive, este ceea ce se numește „tail-recursive -- modulo cons”, i.e. ultimul apel este un cons. Funcțiile -- tail-recursive modulo cons pot fi la rândul lor optimizate să -- utilizeze spațiu constant pe stivă. -- 3.b. Inversarea ordinii elementelor dintr-o listă. La o primă vedere, -- cea mai intuitivă metodă de implementare a funcției ar folosi append, -- i.e. pentru pasul de recursivitate facem append în coada -- listei. Dezavantajul acestei metode e că merge în O(n^2). -- -- O implementare naturală ar fi însă cea în care facem cons într-un -- acumulator, iar pe cazul de bază întoarcem acumulatorul: -- - Inversul listei vide este acumulatorul -- - Inversul unei liste formată din h și l este același cu inversul -- lui l când acumulatorul conține ca prim element h. inv l = let invAux acc [] = acc invAux acc (h : l) = invAux (h : acc) l in invAux [] l -- Evaluare: -- > inv [1,2,3,4] -- > invAux [] [1,2,3,4] -- > invAux (1 : []) [2,3,4] -- > invAux (2 : [1]) [3,4] -- > invAux (3 : [2,1]) [4] -- > invAux (4 : [3,2,1]) [] -- > [4,3,2,1] -- 4. Sortări pe liste. -- 4.a. Merge sort -- -- Considerăm două cazuri de bază: lista vidă și lista cu un singur -- element. Al doilea caz e folosit pentru a lăsa recursivitatea să se -- oprească în mod natural atunci când lista e împărțită în două. mergeSort [] = [] mergeSort [x] = [x] mergeSort l = let untilSplit = length l `div` 2 -- funcția de interclasare merge l1 [] = l1 merge [] l2 = l2 merge (h1 : l1) (h2 : l2) = if h1 < h2 then h1 : merge l1 (h2 : l2) else h2 : merge (h1 : l1) l2 -- împarte lista în două jumătăți (în funcție de -- numărul de elemente) left = take untilSplit l right = drop untilSplit l -- sortează rezultatele parțiale și le interclasează in merge (mergeSort left) (mergeSort right) -- 4.b. Insertion sort -- -- E oarecum similar cu bubble sort, doar că în loc de swap, parcurge -- lista și inserează elementele în ordinea dorită. insertionSort [] = [] insertionSort (h : l) = let insert e [] = [e] insert e (h : l) = if e < h -- dacă elementul e mai -- mic decât capul -- listei, inserează-l în -- cap then e : h : l -- altfel caută-i alt loc else h : insert e l -- inserează elementul în lista sortată in insert h (insertionSort l) -- 4.c. QuickSort -- -- Ideea din spatele algoritmului: -- - Alege un element pivot -- - Împarte lista în două subliste: -- + Sublista conținând elementele < pivot -- + Sublista conținând elementele >= pivot -- - Concatenează listele obținute plus pivotul quickSort [] = [] quickSort (p : l) = let left = filter (< p) l right = filter (>= p) l in quickSort left ++ [p] ++ quickSort right -- 5. Numărul de inversiuni dintr-o listă -- -- Folosim aceeași definiție ca cea din laborator: având dată o listă l -- și l[i] fiind elementul de pe poziția i (unde i începe cu 0 și se -- termină cu lungimea listei - 1), să se afle numărul de elemente ale -- listei care respectă proprietatea l[i] > l[j] și i < j. -- -- (Sau, intuitiv, numărul de elemente care nu sunt „în poziția în care -- ar trebui să fie” în raport cu o listă sortată.) -- -- Intuitiv, trebuie să comparăm fiecare două elemente din listă la un -- loc cu pozițiile lor, și să adunăm 1 la rezultatul parțial (aka -- „acumulator”) când proprietatea ține numberOfInversions l = let -- am terminat de parcurs l1, întorc rezultatul go [] l2 n1 n2 acc = acc -- am terminat de parcurs l2, reparcurg pentru restul lui l1 go (h1 : l1) [] n1 n2 acc = go l1 l (n1 + 1) 0 acc go (h1 : l1) (h2 : l2) n1 n2 acc = -- dacă am o inversiune, incrementez acc, altfel îl las cum e let acc1 = if h1 > h2 && n1 < n2 then acc + 1 else acc in go (h1 : l1) l2 n1 (n2 + 1) acc1 in go l l 0 0 0 -- Alternativ, se poate modifica mergeSort pentru a număra inversiunile -- dintr-o listă (în pasul de interclasare).
spyked/slides
misc-notes/pp-cb-labs/lab-02/lab-02.hs
Haskell
cc0-1.0
8,319
-- Project Euler Problem 30 - digit fifth powers -- -- sum all numbers that are the sum of the fifth powers of their digits -- -- (does not include 1) -- import Data.List import Data.Char digits x = [ digitToInt y | y <- (show x)] p5dsum x = sum [ y^5 | y <- digits x] max_possible = (9^5)*10 -- assuming no more than ten digits, which is true as 9^5 is a 6 digit number (bound not tight) main = do print ( sum [ x | x<-[2..max_possible], x==(p5dsum x)] )
yunwilliamyu/programming-exercises
project_euler/p030_digit_fifth_powers.hs
Haskell
cc0-1.0
468
module Problem011 where main = do str <- readFile "problem-011.txt" let xs = map (read :: String -> Int) $ words str print $ maximum $ [prod xs is | i <- [0..400], o <- offsets, let is = map (+ i) o, last is < 400] offsets = [[0, 1, 2, 3] ,[0,20,40,60] ,[0,21,42,63] ,[3,22,41,60] ] prod xs is = product $ map (xs !!) is
vasily-kartashov/playground
euler/problem-011.hs
Haskell
apache-2.0
384
{-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE DeriveGeneric #-} module Main where import Data.Monoid import Data.String import Web.Spock.Safe import Data.Aeson import GHC.Generics main :: IO () data User = User { name :: String, height :: Int } deriving Generic instance ToJSON User main = runSpock 8080 $ spockT id $ do get root $ Web.Spock.Safe.json (map (User "10") [4,5,6,6]) get ("hello" <//> var) $ \name -> text ("Hello " <> name <> "!")
justinholmes/haskell-playground
src/Main.hs
Haskell
apache-2.0
491
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="ru-RU"> <title>Form Handler | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
secdec/zap-extensions
addOns/formhandler/src/main/javahelp/org/zaproxy/zap/extension/formhandler/resources/help_ru_RU/helpset_ru_RU.hs
Haskell
apache-2.0
973
module AlecSequences.A271328Spec (main, spec) where import Test.Hspec import AlecSequences.A271328 (a271328) main :: IO () main = hspec spec spec = describe "A271328" $ it "correctly computes the first 20 elements" $ take 20 (map a271328 [1..]) `shouldBe` expectedValue where expectedValue = [1,5,10,17,28,37,50,65,82,106,122,145,170,197,228,257,294,325,362,406]
peterokagey/haskellOEIS
test/AlecSequences/A271328Spec.hs
Haskell
apache-2.0
377
module Web.Socdiff.Github.Github where import Control.Applicative import qualified Data.Text as T import Haxl.Core import Web.Socdiff.Github.DataSource -- | Fetch a list of followers for the given username getFollowers :: T.Text -> GenHaxl u [T.Text] getFollowers u = dataFetch (GetFollowers u) -- | Fetch a list of repos for the given username getRepos :: T.Text -> GenHaxl u [T.Text] getRepos u = dataFetch (GetRepos u) -- | Fetch a list of stargazers for the given repository getStargazers :: T.Text -> T.Text -> GenHaxl u (T.Text, [T.Text]) getStargazers u r = (,) r <$> dataFetch (GetStargazers u r) -- | Fetch a list of watchers for the given repository getWatchers :: T.Text -> T.Text -> GenHaxl u (T.Text, [T.Text]) getWatchers u r = (,) r <$> dataFetch (GetWatchers u r)
relrod/socdiff
src/Web/Socdiff/Github/Github.hs
Haskell
bsd-2-clause
785
module GTKMainWindow where import GTKContext import Control.Monad.Trans import Control.Monad.Trans.Reader import qualified GI.Gtk as Gtk setupMainWindow :: ReaderT GTKContext IO () setupMainWindow = do o <- gtkGetObj Gtk.Window "window1" _ <- liftIO $ Gtk.widgetShow o _ <- liftIO $ Gtk.onWidgetDestroy o Gtk.mainQuit return ()
nbrk/ld
executable/GTKMainWindow.hs
Haskell
bsd-2-clause
342
import Network.Wai.Handler.Snap import Controller main :: IO () main = putStrLn "Loaded" >> withLounge (run 3000)
fortytools/lounge
snap-server.hs
Haskell
bsd-2-clause
116
{-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE RankNTypes #-} {-# LANGUAGE ScopedTypeVariables #-} module HERMIT.Dictionary.Function ( externals , appArgM , buildAppM , buildAppsM , buildCompositionT , buildFixT , buildIdT , staticArgR , staticArgPosR , staticArgPredR , staticArgTypesR ) where import Control.Arrow import Control.Monad import Control.Monad.IO.Class import Data.List (nub, intercalate, intersect, partition, transpose) import Data.Maybe (isNothing) import Data.String (fromString) import HERMIT.Context import HERMIT.Core import HERMIT.External import HERMIT.GHC import HERMIT.Kure import HERMIT.Monad import HERMIT.Name import HERMIT.Dictionary.Common externals :: [External] externals = [ external "static-arg" (promoteDefR staticArgR :: RewriteH LCore) [ "perform the static argument transformation on a recursive function." ] , external "static-arg-types" (promoteDefR staticArgTypesR :: RewriteH LCore) [ "perform the static argument transformation on a recursive function, only transforming type arguments." ] , external "static-arg-pos" (promoteDefR . staticArgPosR :: [Int] -> RewriteH LCore) [ "perform the static argument transformation on a recursive function, only transforming the arguments specified (by index)." ] ] ------------------------------------------------------------------------------------------------------ -- | Traditional Static Argument Transformation staticArgR :: (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb, MonadCatch m, MonadUnique m) => Rewrite c m CoreDef staticArgR = staticArgPredR (return . map fst) -- | Static Argument Transformation that only considers type arguments to be static. staticArgTypesR :: (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb, MonadCatch m, MonadUnique m) => Rewrite c m CoreDef staticArgTypesR = staticArgPredR (return . map fst . filter (isTyVar . snd)) -- | Static Argument Transformations which requires that arguments in the given position are static. staticArgPosR :: (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb, MonadCatch m, MonadUnique m) => [Int] -> Rewrite c m CoreDef staticArgPosR is' = staticArgPredR $ \ss' -> let is = nub is' ss = map fst ss' in if is == (is `intersect` ss) then return is else fail $ "args " ++ commas (filter (`notElem` ss) is) ++ " are not static." -- | Generalized Static Argument Transformation, which allows static arguments to be filtered. staticArgPredR :: forall c m. (AddBindings c, ExtendPath c Crumb, HasEmptyContext c, ReadPath c Crumb , MonadCatch m, MonadUnique m) => ([(Int, Var)] -> m [Int]) -- ^ given list of static args and positions, decided which to transform -> Rewrite c m CoreDef staticArgPredR decide = prefixFailMsg "static-arg failed: " $ do Def f rhs <- idR let (bnds, body) = collectBinders rhs guardMsg (notNull bnds) "rhs is not a function" contextonlyT $ \ c -> do let bodyContext = foldl (flip addLambdaBinding) c bnds callPatsT :: Transform c m CoreExpr [[CoreExpr]] callPatsT = extractT $ collectPruneT (promoteExprT $ callPredT (const . (== f)) >>> arr snd :: Transform c m Core [CoreExpr]) callPats <- applyT callPatsT bodyContext body let argExprs = transpose callPats numCalls = length callPats allBinds = zip [0..] bnds staticBinds = [ (i,b) | ((i,b),exprs) <- zip allBinds $ argExprs ++ repeat [] , length exprs == numCalls && isStatic b exprs ] -- ensure argument is present in every call (partial applications boo) isStatic _ [] = True -- all were static isStatic b ((Var b'):es) | b == b' = isStatic b es isStatic b ((Type (TyVarTy v)):es) | b == v = isStatic b es isStatic b ((Coercion (CoVarCo v)):es) | b == v = isStatic b es isStatic _ _ = False -- not a simple repass, so dynamic chosen <- decide staticBinds let choices = map fst staticBinds guardMsg (notNull chosen) "no arguments selected for transformation." guardMsg (all (`elem` choices) chosen) $ "args " ++ commas choices ++ " are static, but " ++ commas chosen ++ " were selected." let (chosenBinds, dynBinds) = partition ((`elem` chosen) . fst) allBinds (ps, dbnds) = unzip dynBinds unboundTys = concat [ [ (i,i') | (i',b') <- dynBinds, i' < i , b' `elem` fvs ] | (i,b) <- chosenBinds, let fvs = varSetElems (varTypeTyVars b) ] guardMsg (null unboundTys) $ "type variables in args " ++ commas (nub $ map fst unboundTys) ++ " would become unbound unless args " ++ commas (nub $ map snd unboundTys) ++ " are included in the transformation." wkr <- newIdH (unqualifiedName f ++ "'") (exprType (mkCoreLams dbnds body)) let replaceCall :: Monad m => Rewrite c m CoreExpr replaceCall = do (_,exprs) <- callPredT (const . (== f)) return $ mkApps (Var wkr) [ e | (p,e) <- zip [0..] exprs, (p::Int) `elem` ps ] body' <- applyT (extractR $ prunetdR (promoteExprR replaceCall :: Rewrite c m Core)) bodyContext body return $ Def f $ mkCoreLams bnds $ Let (Rec [(wkr, mkCoreLams dbnds body')]) $ mkApps (Var wkr) (varsToCoreExprs dbnds) ------------------------------------------------------------------------------ -- | Get the nth argument of an application. Arg 0 is the function being applied. appArgM :: Monad m => Int -> CoreExpr -> m CoreExpr appArgM n e | n < 0 = fail "appArgM: arg must be non-negative" | otherwise = let (fn,args) = collectArgs e l = fn : args in if n > length args then fail "appArgM: not enough arguments" else return $ l !! n -- | Build composition of two functions. buildCompositionT :: (BoundVars c, HasHermitMEnv m, LiftCoreM m, MonadCatch m, MonadIO m, MonadThings m) => CoreExpr -> CoreExpr -> Transform c m x CoreExpr buildCompositionT f g = do composeId <- findIdT $ fromString "Data.Function.." fDot <- prefixFailMsg "building (.) f failed:" $ buildAppM (varToCoreExpr composeId) f prefixFailMsg "building f . g failed:" $ buildAppM fDot g buildAppsM :: MonadCatch m => CoreExpr -> [CoreExpr] -> m CoreExpr buildAppsM = foldM buildAppM -- | Given expression for f and for x, build f x, figuring out the type arguments. buildAppM :: MonadCatch m => CoreExpr -> CoreExpr -> m CoreExpr buildAppM f x = do (vsF, domF, _) <- splitFunTypeM (exprType f) let (vsX, xTy) = splitForAllTys (exprType x) allTvs = vsF ++ vsX bindFn v = if v `elem` allTvs then BindMe else Skolem sub <- maybe (fail "buildAppM - domain of f and type of x do not unify") return (tcUnifyTys bindFn [domF] [xTy]) f' <- substOrApply f [ (v, Type $ substTyVar sub v) | v <- vsF ] x' <- substOrApply x [ (v, Type $ substTyVar sub v) | v <- vsX ] let vs = [ v | v <- vsF ++ vsX, isNothing $ lookupTyVar sub v ] -- things we should stick back on as foralls -- TODO: make sure vsX don't capture anything in f' -- and vsF' doesn't capture anything in x' return $ mkCoreLams vs $ mkCoreApp f' x' -- | Given expression for f, build fix f. buildFixT :: (BoundVars c, LiftCoreM m, HasHermitMEnv m, MonadCatch m, MonadIO m, MonadThings m) => CoreExpr -> Transform c m x CoreExpr buildFixT f = do (tvs, ty) <- endoFunExprTypeM f fixId <- findIdT $ fromString "Data.Function.fix" f' <- substOrApply f [ (v, varToCoreExpr v) | v <- tvs ] return $ mkCoreLams tvs $ mkCoreApps (varToCoreExpr fixId) [Type ty, f'] -- | Build an expression that is the monomorphic id function for given type. buildIdT :: (BoundVars c, LiftCoreM m, HasHermitMEnv m, MonadCatch m, MonadIO m, MonadThings m) => Type -> Transform c m x CoreExpr buildIdT ty = do idId <- findIdT $ fromString "Data.Function.id" return $ mkCoreApp (varToCoreExpr idId) (Type ty) ------------------------------------------------------------------------------ commas :: Show a => [a] -> String commas = intercalate "," . map show -- | Like mkCoreApps, but automatically beta-reduces when possible. substOrApply :: Monad m => CoreExpr -> [(Var,CoreExpr)] -> m CoreExpr substOrApply e [] = return e substOrApply (Lam b e) ((v,ty):r) = if b == v then substOrApply e r >>= return . substCoreExpr b ty else fail $ "substOrApply: unexpected binder - " ++ unqualifiedName b ++ " - " ++ unqualifiedName v substOrApply e rest = return $ mkCoreApps e (map snd rest) ------------------------------------------------------------------------------
beni55/hermit
src/HERMIT/Dictionary/Function.hs
Haskell
bsd-2-clause
9,522
module REPL.REPL ( repl ) where import System.IO (hFlush, stdout) import Control.Monad (when) import System.Exit (exitSuccess) import Data.Char (toLower) import Rating import REPL.Commands import REPL.Exit import REPL.List import REPL.NPC import REPL.Set import REPL.Unset import REPL.Lock import REPL.Update import REPL.Stock import REPL.Suggest repl :: Rating -> [Int]-> IO () repl _ [] = error "empty list given." repl r (l:ls) = do putStr $ "AlbanKnights(" ++ show l ++ "): " hFlush stdout input <- getLine when (isExit input) $ do putExitMessage r exitSuccess case words $ map toLower input of [] -> repl r ls (command:args) -> case dispatch command args r of Left str -> do putStrLn str repl r ls Right r' -> repl r' ls dispatch :: String -> [String] -> Rating -> Either String Rating dispatch cmd args r | isList cmd = list r | isSet cmd = set args r | isUnset cmd = unset args r | isUpdate cmd = update r | isLock cmd = lock args r | isStock cmd = stock args r | isSuggest cmd = suggest args r | isNPC cmd = npc cmd args r | otherwise = Left $ "unknown command: '" ++ cmd ++ "'"
sandmark/AlbanKnights
src/REPL/REPL.hs
Haskell
bsd-3-clause
1,203
module Purescript.Ide.CodecJSON where import Purescript.Ide.Externs (ExternDecl(..)) import Data.Aeson instance ToJSON ExternDecl where toJSON (FunctionDecl n t) = object ["name" .= n, "type" .= t] toJSON (ModuleDecl n t) = object ["name" .= n, "type" .= t] toJSON (DataDecl n t) = object ["name" .= n, "type" .= t] toJSON (Dependency n names) = object ["module" .= n, "names" .= names] toJSON (FixityDeclaration f p n) = object ["name" .= n, "fixity" .= show f, "precedence" .= p]
passy/psc-ide
lib/Purescript/Ide/CodecJSON.hs
Haskell
bsd-3-clause
526
{-# LANGUAGE DataKinds #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE TypeApplications #-} module HaskellCI.Config.ConstraintSet where import HaskellCI.Prelude import qualified Distribution.FieldGrammar as C import HaskellCI.Newtypes import HaskellCI.OptionsGrammar data ConstraintSet = ConstraintSet { csName :: String , csGhcVersions :: VersionRange , csConstraints :: [String] -- we parse these simply as strings , csTests :: Bool , csRunTests :: Bool , csDocspec :: Bool , csBenchmarks :: Bool , csHaddock :: Bool } deriving (Show, Generic) emptyConstraintSet :: String -> ConstraintSet emptyConstraintSet n = ConstraintSet n anyVersion [] False False False False False ------------------------------------------------------------------------------- -- Grammar ------------------------------------------------------------------------------- constraintSetGrammar :: ( OptionsGrammar c g, Applicative (g ConstraintSet) ) => String -> g ConstraintSet ConstraintSet constraintSetGrammar name = ConstraintSet name <$> C.optionalFieldDef "ghc" (field @"csGhcVersions") anyVersion <*> C.monoidalFieldAla "constraints" (C.alaList' C.CommaVCat NoCommas) (field @"csConstraints") <*> C.booleanFieldDef "tests" (field @"csTests") False <*> C.booleanFieldDef "run-tests" (field @"csRunTests") False <*> C.booleanFieldDef "docspec" (field @"csDocspec") False <*> C.booleanFieldDef "benchmarks" (field @"csBenchmarks") False <*> C.booleanFieldDef "haddock" (field @"csHaddock") False
hvr/multi-ghc-travis
src/HaskellCI/Config/ConstraintSet.hs
Haskell
bsd-3-clause
1,838
-- Compiler Toolkit: finite maps -- -- Author : Manuel M. T. Chakravarty -- Created: 23 March 95 -- -- Version $Revision: 1.12 $ from $Date: 2003/04/16 11:11:46 $ -- -- Copyright (c) [1995..2000] Manuel M. T. Chakravarty -- -- This file is free software; you can redistribute it and/or modify -- it under the terms of the GNU General Public License as published by -- the Free Software Foundation; either version 2 of the License, or -- (at your option) any later version. -- -- This file is distributed in the hope that it will be useful, -- but WITHOUT ANY WARRANTY; without even the implied warranty of -- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -- GNU General Public License for more details. -- --- DESCRIPTION --------------------------------------------------------------- -- -- This module provides finite maps as an abstract data type. The idea is -- taken from the GHC module `FiniteMap' and the implementation follows -- closely the ideas found in ``Efficient sets---a balancing act'' from -- Stephan Adams in ``Journal of Functional Programming'', 3(4), 1993, -- drawing also from the longer exposition in ``Implementing Sets Efficiently -- in a Functional Language'' also from Stephan Adams, CSTR 92-10 in Technical -- Report Series, Unversity of Southampton, Department of Electronics and -- Computer Science, U.K. -- --- DOCU ---------------------------------------------------------------------- -- -- language: Haskell 98 -- -- * This implementation is based in bounded balance binary trees. They -- achieve good balancing while being simpler to maintain than AVL trees. -- -- * The implementation design is based on the idea of smart constructors, -- i.e., constructors that guarantee the compliance of the result with some -- constraints applied to the construction of the data type. -- --- TODO ---------------------------------------------------------------------- -- -- * `joinFM' would be a bit more efficient if the ``hedge union'' algorithm -- of the above mentioned technical report would be implemented. -- module Text.CTK.FiniteMaps (FiniteMap, zeroFM, unitFM, listToFM, listToCombFM, joinFM, joinCombFM, sizeFM, addToFM, addToCombFM, delFromFM, diffFM, intersectFM, intersectCombFM, mapFM, foldFM, filterFM, lookupFM, lookupDftFM, toListFM, domFM, imageFM) where -- finite maps are represented as ordered binary trees; each node represents -- a key-element pair in the map, its children contain pair with smaller and -- greater keys respectively (this requires an ordering relation on the keys); -- all keys in a tree are distinct -- data (Ord key) => FiniteMap key elem = Leaf | Node key -- this key elem -- assoc with key Int -- size >= 1 (FiniteMap key elem) -- smaller keys (FiniteMap key elem) -- greater keys -- we define two finite maps to be equal if they range over the same domain -- --instance Ord k => Eq (FiniteMap k e) where -- fm1 == fm2 = ((map fst . toListFM) $ fm1) == ((map fst . toListFM) $ fm2) instance (Ord k, Eq e) => Eq (FiniteMap k e) where fm1 == fm2 = (toListFM fm1) == (toListFM fm2) -- we define a total ordering on finite maps by lifting the lexicographical -- ordering over their domains (which we assume to be sorted) -- --instance Ord k => Ord (FiniteMap k e) where -- fm1 <= fm2 = ((map fst . toListFM) $ fm1) <= ((map fst . toListFM) $ fm2) instance (Ord k, Ord e) => Ord (FiniteMap k e) where fm1 <= fm2 = (toListFM fm1) <= (toListFM fm2) instance (Show k, Show e, Ord k) => Show (FiniteMap k e) where showsPrec = toShowS -- defined below -- weight ratio is respected by the balanced tree, i.e., no subtree will ever -- contain `ratio' times more elements than its sister -- ratio :: Int ratio = 5 -- this gives us an empty map -- zeroFM :: Ord k => FiniteMap k e zeroFM = Leaf -- a map with a single element -- unitFM :: Ord k => k -> e -> FiniteMap k e unitFM k e = Node k e 1 Leaf Leaf -- makes a list of key-element pairs into a finite map -- -- in case of duplicates, the last is taken -- listToFM :: Ord k => [(k, e)] -> FiniteMap k e listToFM = listToCombFM const -- makes a list of key-element pairs into a finite map where collisions are -- resolved by an explicit combiner fun -- -- the combiner expects the new element as its first argument -- listToCombFM :: Ord k => (e -> e -> e) -> [(k, e)] -> FiniteMap k e listToCombFM c = foldl addOnePair zeroFM where addOnePair m (k, e) = addToCombFM c k e m -- the number of elements in the map -- sizeFM :: Ord k => FiniteMap k e -> Int sizeFM Leaf = 0 sizeFM (Node _ _ s _ _) = s -- builds a node that automagically contains the right size -- smartNode :: Ord k => k -> e -> (FiniteMap k e) -> (FiniteMap k e) -> (FiniteMap k e) smartNode k e sm gr = Node k e (1 + sizeFM sm + sizeFM gr) sm gr -- builds a node that automagically balances the tree if necessary and inserts -- the right size; ONLY ONE of the subtrees is allowed to be off balance and -- only by ONE element -- smarterNode :: Ord k => k -> e -> (FiniteMap k e) -> (FiniteMap k e) -> (FiniteMap k e) smarterNode k e sm gr = let sm_n = sizeFM sm gr_n = sizeFM gr in if (sm_n + gr_n) < 2 -- very small tree (one part is a leaf) then smartNode k e sm gr -- => construct directly else if gr_n > (ratio * sm_n) -- child with greater keys is too big then -- => rotate left let Node _ _ _ gr_sm gr_gr = gr gr_sm_n = sizeFM gr_sm gr_gr_n = sizeFM gr_gr in if gr_sm_n < gr_gr_n then single_L k e sm gr else double_L k e sm gr else if sm_n > (ratio * gr_n) -- child with smaller keys is too big then -- => rotate right let Node _ _ _ sm_sm sm_gr = sm sm_sm_n = sizeFM sm_sm sm_gr_n = sizeFM sm_gr in if sm_gr_n < sm_sm_n then single_R k e sm gr else double_R k e sm gr else smartNode k e sm gr -- else nearly balanced => construct directly where single_L ka ea x (Node kb eb _ y z) = smartNode kb eb (smartNode ka ea x y) z double_L ka ea x (Node kc ec _ (Node kb eb _ y1 y2) z) = smartNode kb eb (smartNode ka ea x y1) (smartNode kc ec y2 z) single_R kb eb (Node ka ea _ x y) z = smartNode ka ea x (smartNode kb eb y z) double_R kc ec (Node ka ea _ x (Node kb eb _ y1 y2)) z = smartNode kb eb (smartNode ka ea x y1) (smartNode kc ec y2 z) -- add the given key-element pair to the map -- -- overrides previous entries -- addToFM :: Ord k => k -> e -> FiniteMap k e -> FiniteMap k e addToFM = addToCombFM const -- add the given key-element pair to the map where collisions are resolved by -- an explicit combiner fun -- -- the combiner expects the new element as its first argument -- addToCombFM :: Ord k => (e -> e -> e) -> k -> e -> FiniteMap k e -> FiniteMap k e addToCombFM c k e Leaf = unitFM k e addToCombFM c k e (Node k' e' n sm gr) | k < k' = smarterNode k' e' (addToCombFM c k e sm) gr | k > k' = smarterNode k' e' sm (addToCombFM c k e gr) | otherwise = Node k (c e e') n sm gr -- removes the key-element pair specified by the given key from a map -- -- does not complain if the key is not in the map -- delFromFM :: Ord k => k -> FiniteMap k e -> FiniteMap k e delFromFM k Leaf = Leaf delFromFM k (Node k' e' n sm gr) | k < k' = smarterNode k' e' (delFromFM k sm) gr | k > k' = smarterNode k' e' sm (delFromFM k gr) | otherwise = smartGlue sm gr -- given two maps where all keys in the left are smaller than those in the -- right and they are not too far out of balance (within ratio), glue them -- into one map -- smartGlue :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e smartGlue Leaf gr = gr smartGlue sm Leaf = sm smartGlue sm gr = let (k, e, gr') = extractMin gr in smarterNode k e sm gr' -- extract the association with the minimal key (i.e., leftmost in the tree) -- and simultaneously return the map without this association -- extractMin :: Ord k => FiniteMap k e -> (k, e, FiniteMap k e) extractMin (Node k e _ Leaf gr) = (k, e, gr) extractMin (Node k e _ sm gr) = let (minK, minE, sm') = extractMin sm in (minK, minE, smarterNode k e sm' gr) -- given two maps where all keys in the left are smaller than those in the -- right, glue them into one map -- glue :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e glue Leaf gr = gr glue sm Leaf = sm glue sm@(Node k_sm e_sm n_sm sm_sm gr_sm) gr@(Node k_gr e_gr n_gr sm_gr gr_gr) | (ratio * n_sm) < n_gr = smarterNode k_gr e_gr (glue sm sm_gr) gr_gr | (ratio * n_gr) < n_sm = smarterNode k_sm e_sm sm_sm (glue gr_sm gr) | otherwise = let (k, e, gr') = extractMin gr in smarterNode k e sm gr' -- builds a node that automagically balances the tree if necessary and inserts -- the right size (just as `smarterNode'), BUT which is only applicable if the -- two given maps do not overlap (in their key values) and the new, given key -- lies between the keys in the first and the second map -- -- its time complexity is proportional to the _difference_ in the height of -- the two trees representing the given maps -- smartestNode :: Ord k => k -> e -> (FiniteMap k e) -> (FiniteMap k e) -> (FiniteMap k e) -- -- if any of both trees is too big (with respect to the ratio), we insert -- into the other; otherwise, a simple creation of a new node is sufficient -- smartestNode k e Leaf gr = addToFM k e gr smartestNode k e sm Leaf = addToFM k e sm smartestNode k e sm@(Node k_sm e_sm n_sm sm_sm gr_sm) gr@(Node k_gr e_gr n_gr sm_gr gr_gr) | (ratio * n_sm) < n_gr = smarterNode k_gr e_gr (smartestNode k e sm sm_gr) gr_gr | (ratio * n_gr) < n_sm = smarterNode k_sm e_sm sm_sm (smartestNode k e gr_sm gr) | otherwise = smartNode k e sm gr -- joins two maps -- -- entries in the left map shadow those in the right -- joinFM :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e -- -- explicitly coded, instead of using `joinCombFM', to avoid the `lookupFM' -- for each element in the left map, which is unnecessary in this case -- joinFM m Leaf = m joinFM Leaf m = m joinFM (Node k e _ sm gr) m = smartestNode k e sm' gr' where sm' = joinFM sm (smaller k m) gr' = joinFM gr (greater k m) -- joins two maps where collisions are resolved by an explicit combiner fun -- joinCombFM :: Ord k => (e -> e -> e) -> FiniteMap k e -> FiniteMap k e -> FiniteMap k e joinCombFM c m Leaf = m joinCombFM c Leaf m = m joinCombFM c (Node k e _ sm gr) m = smartestNode k e' sm' gr' where sm' = joinCombFM c sm (smaller k m) gr' = joinCombFM c gr (greater k m) e' = case lookupFM m k of Just f -> c e f Nothing -> e -- cut the part of the tree that is smaller than the given key out of the -- map -- smaller :: Ord k => k -> FiniteMap k e -> FiniteMap k e smaller _ Leaf = Leaf smaller k (Node k' e _ sm gr) | k < k' = smaller k sm | k > k' = smartestNode k' e sm (smaller k gr) | otherwise = sm -- cut the part of the tree that is greater than the given key out of the -- map -- greater :: Ord k => k -> FiniteMap k e -> FiniteMap k e greater _ Leaf = Leaf greater k (Node k' e _ sm gr) | k > k' = greater k gr | k < k' = smartestNode k' e (greater k sm) gr | otherwise = gr -- given two finite maps, yields a finite map containg all elements of the -- first argument except those having a key that is contained in the second -- map -- diffFM :: Ord k => FiniteMap k e -> FiniteMap k e' -> FiniteMap k e diffFM Leaf _ = Leaf diffFM m Leaf = m diffFM m (Node k _ _ sm gr) = glue (diffFM sm' sm) (diffFM gr' gr) where sm' = smaller k m gr' = greater k m -- given two finite maps, yield the map containing only entries of which the -- keys are in both maps -- -- the elements are taken from the left map -- intersectFM :: Ord k => FiniteMap k e -> FiniteMap k e -> FiniteMap k e intersectFM = intersectCombFM const -- given two finite maps, yield the map containing only entries of which the -- keys are in both maps -- -- the corresponding elements of the two maps are combined using the given, -- function -- intersectCombFM :: Ord k => (e -> e -> e) -> FiniteMap k e -> FiniteMap k e -> FiniteMap k e intersectCombFM c _ Leaf = Leaf intersectCombFM c Leaf _ = Leaf intersectCombFM c (Node k e _ sm gr) m | contained = smartestNode k (c e e') sm' gr' | otherwise = glue sm' gr' where sm' = intersectCombFM c sm (smaller k m) gr' = intersectCombFM c gr (greater k m) (contained, e') = case lookupFM m k of Just f -> (True, f) Nothing -> (False, undefined) undefined = error "FiniteMaps: intersectCombFM: Undefined" -- given a function on a finite maps elements and a finite map, yield the -- finite map where every element is replaced as specified by the function -- mapFM :: Ord k => (k -> e -> e') -> FiniteMap k e -> FiniteMap k e' mapFM f Leaf = Leaf mapFM f (Node k e n sm gr) = Node k (f k e) n (mapFM f sm) (mapFM f gr) -- folds a finite map according to a given function and _neutral_ value (with -- respect to the function) that is used for an empty map -- foldFM :: Ord k => (k -> e -> a -> a) -> a -> FiniteMap k e -> a foldFM f z Leaf = z foldFM f z (Node k e _ sm gr) = foldFM f (f k e (foldFM f z gr)) sm -- given a predicate and a finite map, yields the finite map containing all -- key-element pairs satisfying the predicate -- filterFM :: Ord k => (k -> e -> Bool) -> FiniteMap k e -> FiniteMap k e filterFM p Leaf = Leaf filterFM p (Node k e _ sm gr) | p k e = smartestNode k e sm' gr' | otherwise = glue sm' gr' where sm' = filterFM p sm gr' = filterFM p gr -- given a map and a key, returns `Just e' iff the key associates to `e'; -- if the key is not in the map, `Nothing' is returned -- lookupFM :: Ord k => FiniteMap k e -> k -> Maybe e lookupFM Leaf _ = Nothing lookupFM (Node k e _ sm gr) k' | k' == k = Just e | k' < k = lookupFM sm k' | k' > k = lookupFM gr k' -- just as `lookupFM', but instead of returning a `Maybe' type, a default -- value to be returned in case that the key is not in the map has to be -- specified -- lookupDftFM :: Ord k => FiniteMap k e -> e -> k -> e lookupDftFM map e k = case lookupFM map k of Just e' -> e' Nothing -> e -- given a finite map, yields a list of the key-element pairs -- toListFM :: Ord k => FiniteMap k e -> [(k, e)] toListFM = foldFM (\k e kes -> (k, e):kes) [] -- |Yield the domain of a finite map as a list -- domFM :: Ord k => FiniteMap k e -> [k] domFM = map fst . toListFM -- |Yield the image of a finite map as a list -- imageFM :: Ord k => FiniteMap k e -> [e] imageFM = map snd . toListFM -- pretty print routine (used as a method in FiniteMap's instance of `Show') -- toShowS :: (Show a, Show b, Ord a) => Int -> FiniteMap a b -> ShowS toShowS _ fm = format fm 0 where format Leaf _ = id format (Node k e n sm gr) indent = let this = showString (take indent (repeat ' ')) . shows k . showString " --> " . shows e . showString " (size: " . shows n . showString ")\n" in this . format sm (indent + 2) . format gr (indent + 2)
mwotton/ctkl
src/Text/CTK/FiniteMaps.hs
Haskell
bsd-3-clause
16,301
import GL import Compile0 example :: GCM () example = do a <- createPort :: GCM (Port Int) b <- createPort component $ do assert $ val a === val b + 1 output a "a" output b "b"
GRACeFUL-project/DSL-WP
deliverables/d4.3/test.hs
Haskell
bsd-3-clause
194
{-| Module : Idris.Core.Evaluate Description : Evaluate Idris expressions. Copyright : License : BSD3 Maintainer : The Idris Community. -} {-# LANGUAGE BangPatterns, DeriveGeneric, FlexibleInstances, MultiParamTypeClasses, PatternGuards #-} {-# OPTIONS_GHC -fwarn-incomplete-patterns #-} module Idris.Core.Evaluate(normalise, normaliseTrace, normaliseC, normaliseAll, normaliseBlocking, toValue, quoteTerm, rt_simplify, simplify, specialise, hnf, convEq, convEq', Def(..), CaseInfo(..), CaseDefs(..), Accessibility(..), Injectivity, Totality(..), PReason(..), MetaInformation(..), Context, initContext, ctxtAlist, next_tvar, addToCtxt, setAccess, setInjective, setTotal, setMetaInformation, addCtxtDef, addTyDecl, addDatatype, addCasedef, simplifyCasedef, addOperator, lookupNames, lookupTyName, lookupTyNameExact, lookupTy, lookupTyExact, lookupP, lookupP_all, lookupDef, lookupNameDef, lookupDefExact, lookupDefAcc, lookupDefAccExact, lookupVal, mapDefCtxt, lookupTotal, lookupTotalExact, lookupInjectiveExact, lookupNameTotal, lookupMetaInformation, lookupTyEnv, isTCDict, isCanonical, isDConName, canBeDConName, isTConName, isConName, isFnName, Value(..), Quote(..), initEval, uniqueNameCtxt, uniqueBindersCtxt, definitions, isUniverse) where import Idris.Core.CaseTree import Idris.Core.TT import Control.Applicative hiding (Const) import Control.Monad.State import Data.Binary hiding (get, put) import qualified Data.Binary as B import Data.Maybe (listToMaybe) import Debug.Trace import GHC.Generics (Generic) data EvalState = ES { limited :: [(Name, Int)], nexthole :: Int, blocking :: Bool } deriving Show type Eval a = State EvalState a data EvalOpt = Spec | HNF | Simplify | AtREPL | RunTT deriving (Show, Eq) initEval = ES [] 0 False -- VALUES (as HOAS) --------------------------------------------------------- -- | A HOAS representation of values data Value = VP NameType Name Value | VV Int -- True for Bool indicates safe to reduce | VBind Bool Name (Binder Value) (Value -> Eval Value) -- For frozen let bindings when simplifying | VBLet Int Name Value Value Value | VApp Value Value | VType UExp | VUType Universe | VErased | VImpossible | VConstant Const | VProj Value Int -- | VLazy Env [Value] Term | VTmp Int instance Show Value where show x = show $ evalState (quote 100 x) initEval instance Show (a -> b) where show x = "<<fn>>" -- THE EVALUATOR ------------------------------------------------------------ -- The environment is assumed to be "locally named" - i.e., not de Bruijn -- indexed. -- i.e. it's an intermediate environment that we have while type checking or -- while building a proof. -- | Normalise fully type checked terms (so, assume all names/let bindings resolved) normaliseC :: Context -> Env -> TT Name -> TT Name normaliseC ctxt env t = evalState (do val <- eval False ctxt [] (map finalEntry env) t [] quote 0 val) initEval -- | Normalise everything, whether abstract, private or public normaliseAll :: Context -> Env -> TT Name -> TT Name normaliseAll ctxt env t = evalState (do val <- eval False ctxt [] (map finalEntry env) t [AtREPL] quote 0 val) initEval -- | As normaliseAll, but with an explicit list of names *not* to reduce normaliseBlocking :: Context -> Env -> [Name] -> TT Name -> TT Name normaliseBlocking ctxt env blocked t = evalState (do val <- eval False ctxt (map (\n -> (n, 0)) blocked) (map finalEntry env) t [AtREPL] quote 0 val) initEval normalise :: Context -> Env -> TT Name -> TT Name normalise = normaliseTrace False normaliseTrace :: Bool -> Context -> Env -> TT Name -> TT Name normaliseTrace tr ctxt env t = evalState (do val <- eval tr ctxt [] (map finalEntry env) (finalise t) [] quote 0 val) initEval toValue :: Context -> Env -> TT Name -> Value toValue ctxt env t = evalState (eval False ctxt [] (map finalEntry env) t []) initEval quoteTerm :: Value -> TT Name quoteTerm val = evalState (quote 0 val) initEval -- Return a specialised name, and an updated list of reductions available, -- so that the caller can tell how much specialisation was achieved. specialise :: Context -> Env -> [(Name, Int)] -> TT Name -> (TT Name, [(Name, Int)]) specialise ctxt env limits t = let (tm, st) = runState (do val <- eval False ctxt [] (map finalEntry env) (finalise t) [Spec] quote 0 val) (initEval { limited = limits }) in (tm, limited st) -- | Like normalise, but we only reduce functions that are marked as okay to -- inline (and probably shouldn't reduce lets?) -- 20130908: now only used to reduce for totality checking. Inlining should -- be done elsewhere. simplify :: Context -> Env -> TT Name -> TT Name simplify ctxt env t = evalState (do val <- eval False ctxt [(sUN "lazy", 0), (sUN "force", 0), (sUN "Force", 0), (sUN "assert_smaller", 0), (sUN "assert_total", 0), (sUN "par", 0), (sUN "prim__syntactic_eq", 0), (sUN "fork", 0)] (map finalEntry env) (finalise t) [Simplify] quote 0 val) initEval -- | Simplify for run-time (i.e. basic inlining) rt_simplify :: Context -> Env -> TT Name -> TT Name rt_simplify ctxt env t = evalState (do val <- eval False ctxt [(sUN "lazy", 0), (sUN "force", 0), (sUN "Force", 0), (sUN "par", 0), (sUN "prim__syntactic_eq", 0), (sUN "prim_fork", 0)] (map finalEntry env) (finalise t) [RunTT] quote 0 val) initEval -- | Reduce a term to head normal form hnf :: Context -> Env -> TT Name -> TT Name hnf ctxt env t = evalState (do val <- eval False ctxt [] (map finalEntry env) (finalise t) [HNF] quote 0 val) initEval -- unbindEnv env (quote 0 (eval ctxt (bindEnv env t))) finalEntry :: (Name, Binder (TT Name)) -> (Name, Binder (TT Name)) finalEntry (n, b) = (n, fmap finalise b) bindEnv :: EnvTT n -> TT n -> TT n bindEnv [] tm = tm bindEnv ((n, Let t v):bs) tm = Bind n (NLet t v) (bindEnv bs tm) bindEnv ((n, b):bs) tm = Bind n b (bindEnv bs tm) unbindEnv :: EnvTT n -> TT n -> TT n unbindEnv [] tm = tm unbindEnv (_:bs) (Bind n b sc) = unbindEnv bs sc unbindEnv env tm = error "Impossible case occurred: couldn't unbind env." usable :: Bool -- specialising -> Int -- Reduction depth limit (when simplifying/at REPL) -> Name -> [(Name, Int)] -> Eval (Bool, [(Name, Int)]) -- usable _ _ ns@((MN 0 "STOP", _) : _) = return (False, ns) usable False depthlimit n [] = return (True, []) usable True depthlimit n ns = do ES ls num b <- get if b then return (False, ns) else case lookup n ls of Just 0 -> return (False, ns) Just i -> return (True, ns) _ -> return (False, ns) usable False depthlimit n ns = case lookup n ns of Just 0 -> return (False, ns) Just i -> return $ (True, (n, abs (i-1)) : filter (\ (n', _) -> n/=n') ns) _ -> return $ (True, (n, depthlimit) : filter (\ (n', _) -> n/=n') ns) fnCount :: Int -> Name -> Eval () fnCount inc n = do ES ls num b <- get case lookup n ls of Just i -> do put $ ES ((n, (i - inc)) : filter (\ (n', _) -> n/=n') ls) num b _ -> return () setBlock :: Bool -> Eval () setBlock b = do ES ls num _ <- get put (ES ls num b) deduct = fnCount 1 reinstate = fnCount (-1) -- | Evaluate in a context of locally named things (i.e. not de Bruijn indexed, -- such as we might have during construction of a proof) -- The (Name, Int) pair in the arguments is the maximum depth of unfolding of -- a name. The corresponding pair in the state is the maximum number of -- unfoldings overall. eval :: Bool -> Context -> [(Name, Int)] -> Env -> TT Name -> [EvalOpt] -> Eval Value eval traceon ctxt ntimes genv tm opts = ev ntimes [] True [] tm where spec = Spec `elem` opts simpl = Simplify `elem` opts runtime = RunTT `elem` opts atRepl = AtREPL `elem` opts hnf = HNF `elem` opts -- returns 'True' if the function should block -- normal evaluation should return false blockSimplify (CaseInfo inl always dict) n stk | runtime = if always then False else not (inl || dict) || elem n stk | simpl = (not (inl || dict) || elem n stk) || (n == sUN "prim__syntactic_eq") | otherwise = False getCases cd | simpl = cases_totcheck cd | runtime = cases_runtime cd | otherwise = cases_compiletime cd ev ntimes stk top env (P _ n ty) | Just (Let t v) <- lookup n genv = ev ntimes stk top env v ev ntimes_in stk top env (P Ref n ty) | not top && hnf = liftM (VP Ref n) (ev ntimes stk top env ty) | otherwise = do let limit = if simpl then 100 else 10000 (u, ntimes) <- usable spec limit n ntimes_in let red = u && (tcReducible n ctxt || spec || atRepl || runtime || sUN "assert_total" `elem` stk) if red then do let val = lookupDefAcc n (spec || atRepl || runtime) ctxt case val of [(Function _ tm, Public)] -> ev ntimes (n:stk) True env tm [(TyDecl nt ty, _)] -> do vty <- ev ntimes stk True env ty return $ VP nt n vty [(CaseOp ci _ _ _ _ cd, acc)] | (acc == Public || acc == Hidden || sUN "assert_total" `elem` stk) && null (fst (cases_totcheck cd)) -> -- unoptimised version let (ns, tree) = getCases cd in if blockSimplify ci n stk then liftM (VP Ref n) (ev ntimes stk top env ty) else -- traceWhen runtime (show (n, ns, tree)) $ do c <- evCase ntimes n (n:stk) top env ns [] tree case c of (Nothing, _) -> liftM (VP Ref n) (ev ntimes stk top env ty) (Just v, _) -> return v _ -> liftM (VP Ref n) (ev ntimes stk top env ty) else liftM (VP Ref n) (ev ntimes stk top env ty) ev ntimes stk top env (P nt n ty) = liftM (VP nt n) (ev ntimes stk top env ty) ev ntimes stk top env (V i) | i < length env && i >= 0 = return $ snd (env !! i) | otherwise = return $ VV i ev ntimes stk top env (Bind n (Let t v) sc) | not runtime || occurrences n sc < 2 = do v' <- ev ntimes stk top env v --(finalise v) sc' <- ev ntimes stk top ((n, v') : env) sc wknV (-1) sc' | otherwise = do t' <- ev ntimes stk top env t v' <- ev ntimes stk top env v --(finalise v) -- use Tmp as a placeholder, then make it a variable reference -- again when evaluation finished hs <- get let vd = nexthole hs put (hs { nexthole = vd + 1 }) sc' <- ev ntimes stk top ((n, VP Bound (sMN vd "vlet") VErased) : env) sc return $ VBLet vd n t' v' sc' ev ntimes stk top env (Bind n (NLet t v) sc) = do t' <- ev ntimes stk top env (finalise t) v' <- ev ntimes stk top env (finalise v) sc' <- ev ntimes stk top ((n, v') : env) sc return $ VBind True n (Let t' v') (\x -> return sc') ev ntimes stk top env (Bind n b sc) = do b' <- vbind env b let n' = uniqueName n (map fst genv ++ map fst env) return $ VBind True -- (vinstances 0 sc < 2) n' b' (\x -> ev ntimes stk False ((n', x):env) sc) where vbind env t = fmapMB (\tm -> ev ntimes stk top env (finalise tm)) t -- block reduction immediately under codata (and not forced) ev ntimes stk top env (App _ (App _ (App _ d@(P _ (UN dly) _) l@(P _ (UN lco) _)) t) arg) | dly == txt "Delay" && lco == txt "Infinite" && not simpl = do let (f, _) = unApply arg let ntimes' = case f of P _ fn _ -> (fn, 0) : ntimes _ -> ntimes when spec $ setBlock True d' <- ev ntimes' stk False env d l' <- ev ntimes' stk False env l t' <- ev ntimes' stk False env t arg' <- ev ntimes' stk False env arg when spec $ setBlock False evApply ntimes' stk top env [l',t',arg'] d' -- Treat "assert_total" specially, as long as it's defined! ev ntimes stk top env (App _ (App _ (P _ n@(UN at) _) _) arg) | [(CaseOp _ _ _ _ _ _, _)] <- lookupDefAcc n (spec || atRepl || runtime) ctxt, at == txt "assert_total" && not simpl = ev ntimes (n : stk) top env arg ev ntimes stk top env (App _ f a) = do f' <- ev ntimes stk False env f a' <- ev ntimes stk False env a evApply ntimes stk top env [a'] f' ev ntimes stk top env (Proj t i) = do -- evaluate dictionaries if it means the projection works t' <- ev ntimes stk top env t -- tfull' <- reapply ntimes stk top env t' [] return (doProj t' (getValArgs t')) where doProj t' (VP (DCon _ _ _) _ _, args) | i >= 0 && i < length args = args!!i doProj t' _ = VProj t' i ev ntimes stk top env (Constant c) = return $ VConstant c ev ntimes stk top env Erased = return VErased ev ntimes stk top env Impossible = return VImpossible ev ntimes stk top env (TType i) = return $ VType i ev ntimes stk top env (UType u) = return $ VUType u evApply ntimes stk top env args (VApp f a) = evApply ntimes stk top env (a:args) f evApply ntimes stk top env args f = apply ntimes stk top env f args reapply ntimes stk top env f@(VP Ref n ty) args = let val = lookupDefAcc n (spec || atRepl || runtime) ctxt in case val of [(CaseOp ci _ _ _ _ cd, acc)] -> let (ns, tree) = getCases cd in do c <- evCase ntimes n (n:stk) top env ns args tree case c of (Nothing, _) -> return $ unload env (VP Ref n ty) args (Just v, rest) -> evApply ntimes stk top env rest v _ -> case args of (a : as) -> return $ unload env f (a : as) [] -> return f reapply ntimes stk top env (VApp f a) args = reapply ntimes stk top env f (a : args) reapply ntimes stk top env v args = return v apply ntimes stk top env (VBind True n (Lam t) sc) (a:as) = do a' <- sc a app <- apply ntimes stk top env a' as wknV 1 app apply ntimes_in stk top env f@(VP Ref n ty) args | not top && hnf = case args of [] -> return f _ -> return $ unload env f args | otherwise = do let limit = if simpl then 100 else 10000 (u, ntimes) <- usable spec limit n ntimes_in let red = u && (tcReducible n ctxt || spec || atRepl || runtime || sUN "assert_total" `elem` stk) if red then do let val = lookupDefAcc n (spec || atRepl || runtime) ctxt case val of [(CaseOp ci _ _ _ _ cd, acc)] | acc == Public || acc == Hidden -> -- unoptimised version let (ns, tree) = getCases cd in if blockSimplify ci n stk then return $ unload env (VP Ref n ty) args else -- traceWhen runtime (show (n, ns, tree)) $ do c <- evCase ntimes n (n:stk) top env ns args tree case c of (Nothing, _) -> return $ unload env (VP Ref n ty) args (Just v, rest) -> evApply ntimes stk top env rest v [(Operator _ i op, _)] -> if (i <= length args) then case op (take i args) of Nothing -> return $ unload env (VP Ref n ty) args Just v -> evApply ntimes stk top env (drop i args) v else return $ unload env (VP Ref n ty) args _ -> case args of [] -> return f _ -> return $ unload env f args else case args of (a : as) -> return $ unload env f (a:as) [] -> return f apply ntimes stk top env f (a:as) = return $ unload env f (a:as) apply ntimes stk top env f [] = return f -- specApply stk env f@(VP Ref n ty) args -- = case lookupCtxt n statics of -- [as] -> if or as -- then trace (show (n, map fst (filter (\ (_, s) -> s) (zip args as)))) $ -- return $ unload env f args -- else return $ unload env f args -- _ -> return $ unload env f args -- specApply stk env f args = return $ unload env f args unload :: [(Name, Value)] -> Value -> [Value] -> Value unload env f [] = f unload env f (a:as) = unload env (VApp f a) as evCase ntimes n stk top env ns args tree | length ns <= length args = do let args' = take (length ns) args let rest = drop (length ns) args when spec $ deduct n t <- evTree ntimes stk top env (zip ns args') tree when spec $ case t of Nothing -> reinstate n -- Blocked, count n again Just _ -> return () -- (zipWith (\n , t) -> (n, t)) ns args') tree return (t, rest) | otherwise = return (Nothing, args) evTree :: [(Name, Int)] -> [Name] -> Bool -> [(Name, Value)] -> [(Name, Value)] -> SC -> Eval (Maybe Value) evTree ntimes stk top env amap (UnmatchedCase str) = return Nothing evTree ntimes stk top env amap (STerm tm) = do let etm = pToVs (map fst amap) tm etm' <- ev ntimes stk (not (conHeaded tm)) (amap ++ env) etm return $ Just etm' evTree ntimes stk top env amap (ProjCase t alts) = do t' <- ev ntimes stk top env t doCase ntimes stk top env amap t' alts evTree ntimes stk top env amap (Case _ n alts) = case lookup n amap of Just v -> doCase ntimes stk top env amap v alts _ -> return Nothing evTree ntimes stk top env amap ImpossibleCase = return Nothing doCase ntimes stk top env amap v alts = do c <- chooseAlt env v (getValArgs v) alts amap case c of Just (altmap, sc) -> evTree ntimes stk top env altmap sc _ -> do c' <- chooseAlt' ntimes stk env v (getValArgs v) alts amap case c' of Just (altmap, sc) -> evTree ntimes stk top env altmap sc _ -> return Nothing conHeaded tm@(App _ _ _) | (P (DCon _ _ _) _ _, args) <- unApply tm = True conHeaded t = False chooseAlt' ntimes stk env _ (f, args) alts amap = do f' <- apply ntimes stk True env f args chooseAlt env f' (getValArgs f') alts amap chooseAlt :: [(Name, Value)] -> Value -> (Value, [Value]) -> [CaseAlt] -> [(Name, Value)] -> Eval (Maybe ([(Name, Value)], SC)) chooseAlt env _ (VP (DCon i a _) _ _, args) alts amap | Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc) | Just v <- findDefault alts = return $ Just (amap, v) chooseAlt env _ (VP (TCon i a) _ _, args) alts amap | Just (ns, sc) <- findTag i alts = return $ Just (updateAmap (zip ns args) amap, sc) | Just v <- findDefault alts = return $ Just (amap, v) chooseAlt env _ (VConstant c, []) alts amap | Just v <- findConst c alts = return $ Just (amap, v) | Just (n', sub, sc) <- findSuc c alts = return $ Just (updateAmap [(n',sub)] amap, sc) | Just v <- findDefault alts = return $ Just (amap, v) chooseAlt env _ (VP _ n _, args) alts amap | Just (ns, sc) <- findFn n alts = return $ Just (updateAmap (zip ns args) amap, sc) chooseAlt env _ (VBind _ _ (Pi i s k) t, []) alts amap | Just (ns, sc) <- findFn (sUN "->") alts = do t' <- t (VV 0) -- we know it's not in scope or it's not a pattern return $ Just (updateAmap (zip ns [s, t']) amap, sc) chooseAlt _ _ _ alts amap | Just v <- findDefault alts = if (any fnCase alts) then return $ Just (amap, v) else return Nothing | otherwise = return Nothing fnCase (FnCase _ _ _) = True fnCase _ = False -- Replace old variable names in the map with new matches -- (This is possibly unnecessary since we make unique names and don't -- allow repeated variables...?) updateAmap newm amap = newm ++ filter (\ (x, _) -> not (elem x (map fst newm))) amap findTag i [] = Nothing findTag i (ConCase n j ns sc : xs) | i == j = Just (ns, sc) findTag i (_ : xs) = findTag i xs findFn fn [] = Nothing findFn fn (FnCase n ns sc : xs) | fn == n = Just (ns, sc) findFn fn (_ : xs) = findFn fn xs findDefault [] = Nothing findDefault (DefaultCase sc : xs) = Just sc findDefault (_ : xs) = findDefault xs findSuc c [] = Nothing findSuc (BI val) (SucCase n sc : _) | val /= 0 = Just (n, VConstant (BI (val - 1)), sc) findSuc c (_ : xs) = findSuc c xs findConst c [] = Nothing findConst c (ConstCase c' v : xs) | c == c' = Just v findConst (AType (ATInt ITNative)) (ConCase n 1 [] v : xs) = Just v findConst (AType ATFloat) (ConCase n 2 [] v : xs) = Just v findConst (AType (ATInt ITChar)) (ConCase n 3 [] v : xs) = Just v findConst StrType (ConCase n 4 [] v : xs) = Just v findConst (AType (ATInt ITBig)) (ConCase n 6 [] v : xs) = Just v findConst (AType (ATInt (ITFixed ity))) (ConCase n tag [] v : xs) | tag == 7 + fromEnum ity = Just v findConst c (_ : xs) = findConst c xs getValArgs tm = getValArgs' tm [] getValArgs' (VApp f a) as = getValArgs' f (a:as) getValArgs' f as = (f, as) -- tmpToV i vd (VLetHole j) | vd == j = return $ VV i -- tmpToV i vd (VP nt n v) = liftM (VP nt n) (tmpToV i vd v) -- tmpToV i vd (VBind n b sc) = do b' <- fmapMB (tmpToV i vd) b -- let sc' = \x -> do x' <- sc x -- tmpToV (i + 1) vd x' -- return (VBind n b' sc') -- tmpToV i vd (VApp f a) = liftM2 VApp (tmpToV i vd f) (tmpToV i vd a) -- tmpToV i vd x = return x instance Eq Value where (==) x y = getTT x == getTT y where getTT v = evalState (quote 0 v) initEval class Quote a where quote :: Int -> a -> Eval (TT Name) instance Quote Value where quote i (VP nt n v) = liftM (P nt n) (quote i v) quote i (VV x) = return $ V x quote i (VBind _ n b sc) = do sc' <- sc (VTmp i) b' <- quoteB b liftM (Bind n b') (quote (i+1) sc') where quoteB t = fmapMB (quote i) t quote i (VBLet vd n t v sc) = do sc' <- quote i sc t' <- quote i t v' <- quote i v let sc'' = pToV (sMN vd "vlet") (addBinder sc') return (Bind n (Let t' v') sc'') quote i (VApp f a) = liftM2 (App MaybeHoles) (quote i f) (quote i a) quote i (VType u) = return (TType u) quote i (VUType u) = return (UType u) quote i VErased = return Erased quote i VImpossible = return Impossible quote i (VProj v j) = do v' <- quote i v return (Proj v' j) quote i (VConstant c) = return $ Constant c quote i (VTmp x) = return $ V (i - x - 1) wknV :: Int -> Value -> Eval Value wknV i (VV x) | x >= i = return $ VV (x - 1) wknV i (VBind red n b sc) = do b' <- fmapMB (wknV i) b return $ VBind red n b' (\x -> do x' <- sc x wknV (i + 1) x') wknV i (VApp f a) = liftM2 VApp (wknV i f) (wknV i a) wknV i t = return t isUniverse :: Term -> Bool isUniverse (TType _) = True isUniverse (UType _) = True isUniverse _ = False isUsableUniverse :: Term -> Bool isUsableUniverse (UType NullType) = False isUsableUniverse x = isUniverse x convEq' ctxt hs x y = evalStateT (convEq ctxt hs x y) (0, []) convEq :: Context -> [Name] -> TT Name -> TT Name -> StateT UCs TC Bool convEq ctxt holes topx topy = ceq [] topx topy where ceq :: [(Name, Name)] -> TT Name -> TT Name -> StateT UCs TC Bool ceq ps (P xt x _) (P yt y _) | x `elem` holes || y `elem` holes = return True | x == y || (x, y) `elem` ps || (y,x) `elem` ps = return True | otherwise = sameDefs ps x y ceq ps x (Bind n (Lam t) (App _ y (V 0))) = ceq ps x (substV (P Bound n t) y) ceq ps (Bind n (Lam t) (App _ x (V 0))) y = ceq ps (substV (P Bound n t) x) y ceq ps x (Bind n (Lam t) (App _ y (P Bound n' _))) | n == n' = ceq ps x y ceq ps (Bind n (Lam t) (App _ x (P Bound n' _))) y | n == n' = ceq ps x y ceq ps (Bind n (PVar t) sc) y = ceq ps sc y ceq ps x (Bind n (PVar t) sc) = ceq ps x sc ceq ps (Bind n (PVTy t) sc) y = ceq ps sc y ceq ps x (Bind n (PVTy t) sc) = ceq ps x sc ceq ps (V x) (V y) = return (x == y) ceq ps (V x) (P _ y _) | x >= 0 && length ps > x = return (fst (ps!!x) == y) | otherwise = return False ceq ps (P _ x _) (V y) | y >= 0 && length ps > y = return (x == snd (ps!!y)) | otherwise = return False ceq ps (Bind n xb xs) (Bind n' yb ys) = liftM2 (&&) (ceqB ps xb yb) (ceq ((n,n'):ps) xs ys) where ceqB ps (Let v t) (Let v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t') ceqB ps (Guess v t) (Guess v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t') ceqB ps (Pi i v t) (Pi i' v' t') = liftM2 (&&) (ceq ps v v') (ceq ps t t') ceqB ps b b' = ceq ps (binderTy b) (binderTy b') -- Special case for 'case' blocks - size of scope causes complications, -- we only want to check the blocks themselves are valid and identical -- in the current scope. So, just check the bodies, and the additional -- arguments the case blocks are applied to. ceq ps x@(App _ _ _) y@(App _ _ _) | (P _ cx _, xargs) <- unApply x, (P _ cy _, yargs) <- unApply y, caseName cx && caseName cy = sameCase ps cx cy xargs yargs ceq ps (App _ fx ax) (App _ fy ay) = liftM2 (&&) (ceq ps fx fy) (ceq ps ax ay) ceq ps (Constant x) (Constant y) = return (x == y) ceq ps (TType x) (TType y) | x == y = return True ceq ps (TType (UVal 0)) (TType y) = return True ceq ps (TType x) (TType y) = do (v, cs) <- get put (v, ULE x y : cs) return True ceq ps (UType AllTypes) x = return (isUsableUniverse x) ceq ps x (UType AllTypes) = return (isUsableUniverse x) ceq ps (UType u) (UType v) = return (u == v) ceq ps Erased _ = return True ceq ps _ Erased = return True ceq ps x y = return False caseeq ps (Case _ n cs) (Case _ n' cs') = caseeqA ((n,n'):ps) cs cs' where caseeqA ps (ConCase x i as sc : rest) (ConCase x' i' as' sc' : rest') = do q1 <- caseeq (zip as as' ++ ps) sc sc' q2 <- caseeqA ps rest rest' return $ x == x' && i == i' && q1 && q2 caseeqA ps (ConstCase x sc : rest) (ConstCase x' sc' : rest') = do q1 <- caseeq ps sc sc' q2 <- caseeqA ps rest rest' return $ x == x' && q1 && q2 caseeqA ps (DefaultCase sc : rest) (DefaultCase sc' : rest') = liftM2 (&&) (caseeq ps sc sc') (caseeqA ps rest rest') caseeqA ps [] [] = return True caseeqA ps _ _ = return False caseeq ps (STerm x) (STerm y) = ceq ps x y caseeq ps (UnmatchedCase _) (UnmatchedCase _) = return True caseeq ps _ _ = return False sameDefs ps x y = case (lookupDef x ctxt, lookupDef y ctxt) of ([Function _ xdef], [Function _ ydef]) -> ceq ((x,y):ps) xdef ydef ([CaseOp _ _ _ _ _ xd], [CaseOp _ _ _ _ _ yd]) -> let (_, xdef) = cases_compiletime xd (_, ydef) = cases_compiletime yd in caseeq ((x,y):ps) xdef ydef _ -> return False sameCase :: [(Name, Name)] -> Name -> Name -> [Term] -> [Term] -> StateT UCs TC Bool sameCase ps x y xargs yargs = case (lookupDef x ctxt, lookupDef y ctxt) of ([Function _ xdef], [Function _ ydef]) -> ceq ((x,y):ps) xdef ydef ([CaseOp _ _ _ _ _ xd], [CaseOp _ _ _ _ _ yd]) -> let (xin, xdef) = cases_compiletime xd (yin, ydef) = cases_compiletime yd in do liftM2 (&&) (do ok <- zipWithM (ceq ps) (drop (length xin) xargs) (drop (length yin) yargs) return (and ok)) (caseeq ((x,y):ps) xdef ydef) _ -> return False -- SPECIALISATION ----------------------------------------------------------- -- We need too much control to be able to do this by tweaking the main -- evaluator spec :: Context -> Ctxt [Bool] -> Env -> TT Name -> Eval (TT Name) spec ctxt statics genv tm = error "spec undefined" -- CONTEXTS ----------------------------------------------------------------- {-| A definition is either a simple function (just an expression with a type), a constant, which could be a data or type constructor, an axiom or as an yet undefined function, or an Operator. An Operator is a function which explains how to reduce. A CaseOp is a function defined by a simple case tree -} data Def = Function !Type !Term | TyDecl NameType !Type | Operator Type Int ([Value] -> Maybe Value) | CaseOp CaseInfo !Type ![(Type, Bool)] -- argument types, whether canonical ![Either Term (Term, Term)] -- original definition ![([Name], Term, Term)] -- simplified for totality check definition !CaseDefs deriving Generic -- [Name] SC -- Compile time case definition -- [Name] SC -- Run time cae definitions data CaseDefs = CaseDefs { cases_totcheck :: !([Name], SC), cases_compiletime :: !([Name], SC), cases_inlined :: !([Name], SC), cases_runtime :: !([Name], SC) } deriving Generic data CaseInfo = CaseInfo { case_inlinable :: Bool, -- decided by machine case_alwaysinline :: Bool, -- decided by %inline flag tc_dictionary :: Bool } deriving Generic {-! deriving instance Binary Def !-} {-! deriving instance Binary CaseInfo !-} {-! deriving instance Binary CaseDefs !-} instance Show Def where show (Function ty tm) = "Function: " ++ show (ty, tm) show (TyDecl nt ty) = "TyDecl: " ++ show nt ++ " " ++ show ty show (Operator ty _ _) = "Operator: " ++ show ty show (CaseOp (CaseInfo inlc inla inlr) ty atys ps_in ps cd) = let (ns, sc) = cases_compiletime cd (ns_t, sc_t) = cases_totcheck cd (ns', sc') = cases_runtime cd in "Case: " ++ show ty ++ " " ++ show ps ++ "\n" ++ "TOTALITY CHECK TIME:\n\n" ++ show ns_t ++ " " ++ show sc_t ++ "\n\n" ++ "COMPILE TIME:\n\n" ++ show ns ++ " " ++ show sc ++ "\n\n" ++ "RUN TIME:\n\n" ++ show ns' ++ " " ++ show sc' ++ "\n\n" ++ if inlc then "Inlinable" else "Not inlinable" ++ if inla then " Aggressively\n" else "\n" ------- -- Hidden => Programs can't access the name at all -- Public => Programs can access the name and use at will -- Frozen => Programs can access the name, which doesn't reduce -- Private => Programs can't access the name, doesn't reduce internally data Accessibility = Hidden | Public | Frozen | Private deriving (Eq, Ord, Generic) instance Show Accessibility where show Public = "public export" show Frozen = "export" show Private = "private" show Hidden = "hidden" type Injectivity = Bool -- | The result of totality checking data Totality = Total [Int] -- ^ well-founded arguments | Productive -- ^ productive | Partial PReason | Unchecked | Generated deriving (Eq, Generic) -- | Reasons why a function may not be total data PReason = Other [Name] | Itself | NotCovering | NotPositive | UseUndef Name | ExternalIO | BelieveMe | Mutual [Name] | NotProductive deriving (Show, Eq, Generic) instance Show Totality where show (Total args)= "Total" -- ++ show args ++ " decreasing arguments" show Productive = "Productive" -- ++ show args ++ " decreasing arguments" show Unchecked = "not yet checked for totality" show (Partial Itself) = "possibly not total as it is not well founded" show (Partial NotCovering) = "not total as there are missing cases" show (Partial NotPositive) = "not strictly positive" show (Partial ExternalIO) = "an external IO primitive" show (Partial NotProductive) = "not productive" show (Partial BelieveMe) = "not total due to use of believe_me in proof" show (Partial (Other ns)) = "possibly not total due to: " ++ showSep ", " (map show ns) show (Partial (Mutual ns)) = "possibly not total due to recursive path " ++ showSep " --> " (map show ns) show (Partial (UseUndef n)) = "possibly not total because it uses the undefined name " ++ show n show Generated = "auto-generated" {-! deriving instance Binary Accessibility !-} {-! deriving instance Binary Totality !-} {-! deriving instance Binary PReason !-} -- Possible attached meta-information for a definition in context data MetaInformation = EmptyMI -- ^ No meta-information | DataMI [Int] -- ^ Meta information for a data declaration with position of parameters deriving (Eq, Show, Generic) -- | Contexts used for global definitions and for proof state. They contain -- universe constraints and existing definitions. data Context = MkContext { next_tvar :: Int, definitions :: Ctxt (Def, Injectivity, Accessibility, Totality, MetaInformation) } deriving (Show, Generic) -- | The initial empty context initContext = MkContext 0 emptyContext mapDefCtxt :: (Def -> Def) -> Context -> Context mapDefCtxt f (MkContext t !defs) = MkContext t (mapCtxt f' defs) where f' (!d, i, a, t, m) = f' (f d, i, a, t, m) -- | Get the definitions from a context ctxtAlist :: Context -> [(Name, Def)] ctxtAlist ctxt = map (\(n, (d, i, a, t, m)) -> (n, d)) $ toAlist (definitions ctxt) veval ctxt env t = evalState (eval False ctxt [] env t []) initEval addToCtxt :: Name -> Term -> Type -> Context -> Context addToCtxt n tm ty uctxt = let ctxt = definitions uctxt !ctxt' = addDef n (Function ty tm, False, Public, Unchecked, EmptyMI) ctxt in uctxt { definitions = ctxt' } setAccess :: Name -> Accessibility -> Context -> Context setAccess n a uctxt = let ctxt = definitions uctxt !ctxt' = updateDef n (\ (d, i, _, t, m) -> (d, i, a, t, m)) ctxt in uctxt { definitions = ctxt' } setInjective :: Name -> Injectivity -> Context -> Context setInjective n i uctxt = let ctxt = definitions uctxt !ctxt' = updateDef n (\ (d, _, a, t, m) -> (d, i, a, t, m)) ctxt in uctxt { definitions = ctxt' } setTotal :: Name -> Totality -> Context -> Context setTotal n t uctxt = let ctxt = definitions uctxt !ctxt' = updateDef n (\ (d, i, a, _, m) -> (d, i, a, t, m)) ctxt in uctxt { definitions = ctxt' } setMetaInformation :: Name -> MetaInformation -> Context -> Context setMetaInformation n m uctxt = let ctxt = definitions uctxt !ctxt' = updateDef n (\ (d, i, a, t, _) -> (d, i, a, t, m)) ctxt in uctxt { definitions = ctxt' } addCtxtDef :: Name -> Def -> Context -> Context addCtxtDef n d c = let ctxt = definitions c !ctxt' = addDef n (d, False, Public, Unchecked, EmptyMI) $! ctxt in c { definitions = ctxt' } addTyDecl :: Name -> NameType -> Type -> Context -> Context addTyDecl n nt ty uctxt = let ctxt = definitions uctxt !ctxt' = addDef n (TyDecl nt ty, False, Public, Unchecked, EmptyMI) ctxt in uctxt { definitions = ctxt' } addDatatype :: Datatype Name -> Context -> Context addDatatype (Data n tag ty unique cons) uctxt = let ctxt = definitions uctxt ty' = normalise uctxt [] ty !ctxt' = addCons 0 cons (addDef n (TyDecl (TCon tag (arity ty')) ty, True, Public, Unchecked, EmptyMI) ctxt) in uctxt { definitions = ctxt' } where addCons tag [] ctxt = ctxt addCons tag ((n, ty) : cons) ctxt = let ty' = normalise uctxt [] ty in addCons (tag+1) cons (addDef n (TyDecl (DCon tag (arity ty') unique) ty, True, Public, Unchecked, EmptyMI) ctxt) -- FIXME: Too many arguments! Refactor all these Bools. -- -- Issue #1724 on the issue tracker. -- https://github.com/idris-lang/Idris-dev/issues/1724 addCasedef :: Name -> ErasureInfo -> CaseInfo -> Bool -> SC -> -- default case Bool -> Bool -> [(Type, Bool)] -> -- argument types, whether canonical [Int] -> -- inaccessible arguments [Either Term (Term, Term)] -> [([Name], Term, Term)] -> -- totality [([Name], Term, Term)] -> -- compile time [([Name], Term, Term)] -> -- inlined [([Name], Term, Term)] -> -- run time Type -> Context -> TC Context addCasedef n ei ci@(CaseInfo inline alwaysInline tcdict) tcase covering reflect asserted argtys inacc ps_in ps_tot ps_inl ps_ct ps_rt ty uctxt = do let ctxt = definitions uctxt access = case lookupDefAcc n False uctxt of [(_, acc)] -> acc _ -> Public totalityTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_tot ei compileTime <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_ct ei inlined <- simpleCase tcase covering reflect CompileTime emptyFC inacc argtys ps_inl ei runtime <- simpleCase tcase covering reflect RunTime emptyFC inacc argtys ps_rt ei ctxt' <- case (totalityTime, compileTime, inlined, runtime) of (CaseDef args_tot sc_tot _, CaseDef args_ct sc_ct _, CaseDef args_inl sc_inl _, CaseDef args_rt sc_rt _) -> let inl = alwaysInline -- tcdict inlc = (inl || small n args_ct sc_ct) && (not asserted) inlr = inl || small n args_rt sc_rt cdef = CaseDefs (args_tot, sc_tot) (args_ct, sc_ct) (args_inl, sc_inl) (args_rt, sc_rt) op = (CaseOp (ci { case_inlinable = inlc }) ty argtys ps_in ps_tot cdef, False, access, Unchecked, EmptyMI) in return $ addDef n op ctxt -- other -> tfail (Msg $ "Error adding case def: " ++ show other) return uctxt { definitions = ctxt' } -- simplify a definition for totality checking simplifyCasedef :: Name -> ErasureInfo -> Context -> TC Context simplifyCasedef n ei uctxt = do let ctxt = definitions uctxt ctxt' <- case lookupCtxt n ctxt of [(CaseOp ci ty atys [] ps _, inj, acc, tot, metainf)] -> return ctxt -- nothing to simplify (or already done...) [(CaseOp ci ty atys ps_in ps cd, inj, acc, tot, metainf)] -> do let ps_in' = map simpl ps_in pdef = map debind ps_in' CaseDef args sc _ <- simpleCase False (STerm Erased) False CompileTime emptyFC [] atys pdef ei return $ addDef n (CaseOp ci ty atys ps_in' ps (cd { cases_totcheck = (args, sc) }), inj, acc, tot, metainf) ctxt _ -> return ctxt return uctxt { definitions = ctxt' } where depat acc (Bind n (PVar t) sc) = depat (n : acc) (instantiate (P Bound n t) sc) depat acc x = (acc, x) debind (Right (x, y)) = let (vs, x') = depat [] x (_, y') = depat [] y in (vs, x', y') debind (Left x) = let (vs, x') = depat [] x in (vs, x', Impossible) simpl (Right (x, y)) = Right (x, simplify uctxt [] y) simpl t = t addOperator :: Name -> Type -> Int -> ([Value] -> Maybe Value) -> Context -> Context addOperator n ty a op uctxt = let ctxt = definitions uctxt ctxt' = addDef n (Operator ty a op, False, Public, Unchecked, EmptyMI) ctxt in uctxt { definitions = ctxt' } tfst (a, _, _, _, _) = a lookupNames :: Name -> Context -> [Name] lookupNames n ctxt = let ns = lookupCtxtName n (definitions ctxt) in map fst ns -- | Get the list of pairs of fully-qualified names and their types that match some name lookupTyName :: Name -> Context -> [(Name, Type)] lookupTyName n ctxt = do (name, def) <- lookupCtxtName n (definitions ctxt) ty <- case tfst def of (Function ty _) -> return ty (TyDecl _ ty) -> return ty (Operator ty _ _) -> return ty (CaseOp _ ty _ _ _ _) -> return ty return (name, ty) -- | Get the pair of a fully-qualified name and its type, if there is a unique one matching the name used as a key. lookupTyNameExact :: Name -> Context -> Maybe (Name, Type) lookupTyNameExact n ctxt = listToMaybe [ (nm, v) | (nm, v) <- lookupTyName n ctxt, nm == n ] -- | Get the types that match some name lookupTy :: Name -> Context -> [Type] lookupTy n ctxt = map snd (lookupTyName n ctxt) -- | Get the single type that matches some name precisely lookupTyExact :: Name -> Context -> Maybe Type lookupTyExact n ctxt = fmap snd (lookupTyNameExact n ctxt) -- | Return true if the given type is a concrete type familyor primitive -- False it it's a function to compute a type or a variable isCanonical :: Type -> Context -> Bool isCanonical t ctxt = case unApply t of (P _ n _, _) -> isConName n ctxt (Constant _, _) -> True _ -> False isConName :: Name -> Context -> Bool isConName n ctxt = isTConName n ctxt || isDConName n ctxt isTConName :: Name -> Context -> Bool isTConName n ctxt = case lookupDefExact n ctxt of Just (TyDecl (TCon _ _) _) -> True _ -> False -- | Check whether a resolved name is certainly a data constructor isDConName :: Name -> Context -> Bool isDConName n ctxt = case lookupDefExact n ctxt of Just (TyDecl (DCon _ _ _) _) -> True _ -> False -- | Check whether any overloading of a name is a data constructor canBeDConName :: Name -> Context -> Bool canBeDConName n ctxt = or $ do def <- lookupCtxt n (definitions ctxt) case tfst def of (TyDecl (DCon _ _ _) _) -> return True _ -> return False isFnName :: Name -> Context -> Bool isFnName n ctxt = case lookupDefExact n ctxt of Just (Function _ _) -> True Just (Operator _ _ _) -> True Just (CaseOp _ _ _ _ _ _) -> True _ -> False isTCDict :: Name -> Context -> Bool isTCDict n ctxt = case lookupDefExact n ctxt of Just (Function _ _) -> False Just (Operator _ _ _) -> False Just (CaseOp ci _ _ _ _ _) -> tc_dictionary ci _ -> False lookupP :: Name -> Context -> [Term] lookupP = lookupP_all False False lookupP_all :: Bool -> Bool -> Name -> Context -> [Term] lookupP_all all exact n ctxt = do (n', def) <- names p <- case def of (Function ty tm, inj, a, _, _) -> return (P Ref n' ty, a) (TyDecl nt ty, _, a, _, _) -> return (P nt n' ty, a) (CaseOp _ ty _ _ _ _, inj, a, _, _) -> return (P Ref n' ty, a) (Operator ty _ _, inj, a, _, _) -> return (P Ref n' ty, a) case snd p of Hidden -> if all then return (fst p) else [] Private -> if all then return (fst p) else [] _ -> return (fst p) where names = let ns = lookupCtxtName n (definitions ctxt) in if exact then filter (\ (n', d) -> n' == n) ns else ns lookupDefExact :: Name -> Context -> Maybe Def lookupDefExact n ctxt = tfst <$> lookupCtxtExact n (definitions ctxt) lookupDef :: Name -> Context -> [Def] lookupDef n ctxt = tfst <$> lookupCtxt n (definitions ctxt) lookupNameDef :: Name -> Context -> [(Name, Def)] lookupNameDef n ctxt = mapSnd tfst $ lookupCtxtName n (definitions ctxt) where mapSnd f [] = [] mapSnd f ((x,y):xys) = (x, f y) : mapSnd f xys lookupDefAcc :: Name -> Bool -> Context -> [(Def, Accessibility)] lookupDefAcc n mkpublic ctxt = map mkp $ lookupCtxt n (definitions ctxt) -- io_bind a special case for REPL prettiness where mkp (d, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure")) then (d, Public) else (d, a) lookupDefAccExact :: Name -> Bool -> Context -> Maybe (Def, Accessibility) lookupDefAccExact n mkpublic ctxt = fmap mkp $ lookupCtxtExact n (definitions ctxt) -- io_bind a special case for REPL prettiness where mkp (d, inj, a, _, _) = if mkpublic && (not (n == sUN "io_bind" || n == sUN "io_pure")) then (d, Public) else (d, a) lookupTotal :: Name -> Context -> [Totality] lookupTotal n ctxt = map mkt $ lookupCtxt n (definitions ctxt) where mkt (d, inj, a, t, m) = t lookupTotalExact :: Name -> Context -> Maybe Totality lookupTotalExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt) where mkt (d, inj, a, t, m) = t lookupInjectiveExact :: Name -> Context -> Maybe Injectivity lookupInjectiveExact n ctxt = fmap mkt $ lookupCtxtExact n (definitions ctxt) where mkt (d, inj, a, t, m) = inj -- Check if a name is reducible in the type checker. Partial definitions -- are not reducible (so treated as a constant) tcReducible :: Name -> Context -> Bool tcReducible n ctxt = case lookupTotalExact n ctxt of Nothing -> True Just (Partial _) -> False _ -> True lookupMetaInformation :: Name -> Context -> [MetaInformation] lookupMetaInformation n ctxt = map mkm $ lookupCtxt n (definitions ctxt) where mkm (d, inj, a, t, m) = m lookupNameTotal :: Name -> Context -> [(Name, Totality)] lookupNameTotal n = map (\(n, (_, _, _, t, _)) -> (n, t)) . lookupCtxtName n . definitions lookupVal :: Name -> Context -> [Value] lookupVal n ctxt = do def <- lookupCtxt n (definitions ctxt) case tfst def of (Function _ htm) -> return (veval ctxt [] htm) (TyDecl nt ty) -> return (VP nt n (veval ctxt [] ty)) _ -> [] lookupTyEnv :: Name -> Env -> Maybe (Int, Type) lookupTyEnv n env = li n 0 env where li n i [] = Nothing li n i ((x, b): xs) | n == x = Just (i, binderTy b) | otherwise = li n (i+1) xs -- | Create a unique name given context and other existing names uniqueNameCtxt :: Context -> Name -> [Name] -> Name uniqueNameCtxt ctxt n hs | n `elem` hs = uniqueNameCtxt ctxt (nextName n) hs | [_] <- lookupTy n ctxt = uniqueNameCtxt ctxt (nextName n) hs | otherwise = n uniqueBindersCtxt :: Context -> [Name] -> TT Name -> TT Name uniqueBindersCtxt ctxt ns (Bind n b sc) = let n' = uniqueNameCtxt ctxt n ns in Bind n' (fmap (uniqueBindersCtxt ctxt (n':ns)) b) (uniqueBindersCtxt ctxt ns sc) uniqueBindersCtxt ctxt ns (App s f a) = App s (uniqueBindersCtxt ctxt ns f) (uniqueBindersCtxt ctxt ns a) uniqueBindersCtxt ctxt ns t = t
ben-schulz/Idris-dev
src/Idris/Core/Evaluate.hs
Haskell
bsd-3-clause
52,168
module Codec.Crypto.DSA( module Codec.Crypto.DSA.Exceptions ) where import Codec.Crypto.DSA.Exceptions
GaloisInc/DSA
src/Codec/Crypto/DSA.hs
Haskell
bsd-3-clause
121
-- -- Copyright © 2013-2014 Anchor Systems, Pty Ltd and Others -- -- The code in this file, and the program it is a part of, is -- made available to you by its authors as open source software: -- you can redistribute it and/or modify it under the terms of -- the 3-clause BSD licence. -- {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE ScopedTypeVariables #-} module Selecto.NagiosSearch where import Chevalier.Types (SourceQuery (..)) import Chevalier.Util (buildFuzzyRequestFromPairs,buildFuzzyRequestTag) import Control.Applicative import Control.Concurrent hiding (yield) import Control.Monad.IO.Class import Data.ByteString.Lazy.Builder (stringUtf8) import Data.Maybe import Selecto.Util import Snap.Core import System.Timeout (timeout) nagiosSearch :: MVar SourceQuery -> Snap () nagiosSearch chevalier_mvar = do host <- utf8Or400 =<< fromMaybe "*" <$> getParam "host" service <- utf8Or400 =<< fromMaybe "*" <$> getParam "service" origin_alias <- getParam "origin" >>= (\o -> case o of Just bs -> utf8Or400 bs Nothing -> writeError 400 $ stringUtf8 "Must specify 'origin'") address <- utf8Or400 =<< fromMaybe "*" <$> getParam "address" page <- toInt <$> fromMaybe "0" <$> getParam "page" page_size <- toInt <$> fromMaybe "64" <$> getParam "page_size" let origin = case origin_alias of "SYD1" -> "R82KX1" "LAX1" -> "LMRH8C" let query = buildFuzzyRequestTag "host" host --FromPairs [("host", host), ("service",service)] maybe_response <- liftIO $ do response_mvar <- newEmptyMVar putMVar chevalier_mvar $ SourceQuery query address page page_size origin response_mvar timeout chevalierTimeout $ takeMVar response_mvar either_response <- maybe timeoutError return maybe_response either chevalierError writeJSON either_response where chevalierTimeout = 10000000 -- 10 seconds chevalierError e = writeError 500 $ stringUtf8 ("Exception talking to chevalier backend" ++ show e) timeoutError = do let msg = "Timed out talking to chevalier backend" writeError 500 $ stringUtf8 msg
glasnt/selecto
src/Selecto/NagiosSearch.hs
Haskell
bsd-3-clause
2,187
{-# LANGUAGE CPP #-} {-# LANGUAGE TemplateHaskell #-} module Test.Async.IO ( ioTestGroup ) where import Control.Monad (when, void) import Data.Maybe (isJust, isNothing) import Control.Concurrent.Lifted import Control.Exception.Lifted as E #if MIN_VERSION_monad_control(1, 0, 0) import Control.Concurrent.Async.Lifted.Safe #else import Control.Concurrent.Async.Lifted #endif import Test.Async.Common ioTestGroup :: TestTree ioTestGroup = $(testGroupGenerator) case_async_waitCatch :: Assertion case_async_waitCatch = do a <- async (return value) r <- waitCatch a case r of Left _ -> assertFailure "" Right e -> e @?= value case_async_wait :: Assertion case_async_wait = do a <- async (return value) r <- wait a assertEqual "async_wait" r value case_async_exwaitCatch :: Assertion case_async_exwaitCatch = do a <- async (throwIO TestException) r <- waitCatch a case r of Left e -> fromException e @?= Just TestException Right _ -> assertFailure "" case_async_exwait :: Assertion case_async_exwait = do a <- async (throwIO TestException) (wait a >> assertFailure "") `E.catch` \e -> e @?= TestException case_withAsync_waitCatch :: Assertion case_withAsync_waitCatch = do withAsync (return value) $ \a -> do r <- waitCatch a case r of Left _ -> assertFailure "" Right e -> e @?= value case_withAsync_wait2 :: Assertion case_withAsync_wait2 = do a <- withAsync (threadDelay 1000000) $ return r <- waitCatch a case r of Left e -> fromException e @?= Just ThreadKilled Right _ -> assertFailure "" case_async_cancel :: Assertion case_async_cancel = sequence_ $ replicate 1000 run where run = do a <- async (return value) cancelWith a TestException r <- waitCatch a case r of Left e -> fromException e @?= Just TestException Right r' -> r' @?= value case_async_poll :: Assertion case_async_poll = do a <- async (threadDelay 1000000) r <- poll a when (isJust r) $ assertFailure "" r' <- poll a -- poll twice, just to check we don't deadlock when (isJust r') $ assertFailure "" case_async_poll2 :: Assertion case_async_poll2 = do a <- async (return value) void $ wait a r <- poll a when (isNothing r) $ assertFailure "" r' <- poll a -- poll twice, just to check we don't deadlock when (isNothing r') $ assertFailure ""
dmjio/lifted-async
tests/Test/Async/IO.hs
Haskell
bsd-3-clause
2,374
module CodeGeneration.CodeGenerator (generateCode) where import Control.Monad.Reader import Control.Monad.Writer import Tree.HtmlTree import CodeGeneration.JavascriptCode import qualified Data.Map as Map type CodeGeneration = WriterT String (Reader JavascriptCode) () -- | Generate the JS code to create the nodes generateCode :: JavascriptCode -> [HtmlNode] -> String generateCode js nodes = let (_, code) = runReader (runWriterT $ mapM_ (writeNode Nothing) nodes) js in code writeNode :: Maybe HtmlNode -> HtmlNode -> CodeGeneration -- | Write a comment node. Writes the comment as a JS comment writeNode _ (HtmlComment text) = writeLine $ "/*" ++ text ++ " */" -- | Write a text node. Creates the text node and appends it to it's parent writeNode parent HtmlText { textVarName = Just varName, text = text'} = do js <- lift ask writeLine $ "var " ++ varName ++ " = document.createTextNode(" ++ stringTemplate js text' ++ ");" appendToParent parent varName -- | Write an element node writeNode parent node@HtmlElement { elementVarName = Just varName, tag = tag', children = children', attributes = attributes' } = do js <- lift ask writeLine $ "var " ++ varName ++ " = document.createElement(\"" ++ tag' ++ "\");" writeAttributes varName attributes' forM_ children' (writeNode (Just node)) appendToParent parent varName -- | Write a repeated node writeNode parent HtmlRepeatedElement { repeater = repeater', variable = variable', node = node' } = do js <- lift ask (_, body) <- lift (runWriterT $ writeNode parent node') writeLine $ repeater' ++ ".forEach(" ++ writeFunction js [variable'] body ++ ");" -- | Write the code to append a node to its parent appendToParent :: Maybe HtmlNode -> String -> CodeGeneration appendToParent (Just HtmlElement { elementVarName = Just varName }) child = writeLine $ varName ++ ".appendChild(" ++ child ++ ");" appendToParent Nothing _ = return () -- | Write the code for the attribute s of the node writeAttributes :: String -> Map.Map String String -> CodeGeneration writeAttributes varName = sequence_ . Map.foldrWithKey (\k v acc -> (writeLine (varName ++ ".setAttribute(\"" ++ k ++ "\", \"" ++ v ++ "\");"):acc)) [] -- | Append a line to the current code writeLine :: String -> CodeGeneration writeLine = tell . (++ "\n")
sergioifg94/Hendoman
src/CodeGeneration/CodeGenerator.hs
Haskell
bsd-3-clause
2,414
module Horbits.Orbit (module X, module Horbits.Orbit) where import Control.Lens import Horbits.Body import Horbits.Orbit.Class as X import Horbits.Orbit.Data as X import Horbits.Orbit.Geometry as X import Horbits.Orbit.Position as X import Horbits.Orbit.Properties as X import Horbits.Orbit.Velocity as X parentBodyId :: Fold BodyId BodyId parentBodyId = bodyOrbit . orbitBodyId
chwthewke/horbits
src/horbits/Horbits/Orbit.hs
Haskell
bsd-3-clause
481
{-# LANGUAGE TemplateHaskell #-} {-# LANGUAGE DeriveDataTypeable #-} module Harihara.Options ( parseOptions , HariharaOptions (..) , HariharaException (..) ) where import Control.Exception import Data.Foldable (foldrM) import qualified Data.Set as S import Data.Typeable (Typeable()) import System.Console.GetOpt import System.Directory import System.FilePath.Posix import System.Exit import Harihara.Log -- Harihara Exceptions {{{ data HariharaException = CantFreshDB String | FileDoesNotExist String | InvalidPath String | NoFlagParse String String | MissingLastfmConfig deriving (Typeable) instance Show HariharaException where show e = case e of CantFreshDB fp -> "Can't make a fresh database with path " ++ show fp FileDoesNotExist fp -> "File does not exist: " ++ show fp InvalidPath fp -> "Not a valid filepath: " ++ show fp NoFlagParse flag arg -> "Couldn't parse arg for flag " ++ show flag ++ ": " ++ show arg MissingLastfmConfig -> "Can't use Lastfm function, config didn't define API key or Secret" instance Exception HariharaException -- }}} -- HariharaOptions {{{ data HariharaOptions = HariharaOptions { optsLogLevel :: LogLevel , optsFiles :: S.Set FilePath , optsDBPath :: FilePath , optsDBFresh :: Bool } deriving (Show) defaultOptions :: HariharaOptions defaultOptions = HariharaOptions { optsLogLevel = LogInfo , optsFiles = S.empty , optsDBPath = ".harihara.db" , optsDBFresh = False } -------- onOptsLogLevel :: (LogLevel -> LogLevel) -> OptionsBuilder onOptsLogLevel f o = return $ o { optsLogLevel = f $ optsLogLevel o } setOptsLogLevel :: LogLevel -> OptionsBuilder setOptsLogLevel ll = onOptsLogLevel $ const ll -------- onOptsFiles :: (S.Set FilePath -> S.Set FilePath) -> OptionsBuilder onOptsFiles f o = return $ o { optsFiles = f $ optsFiles o } -------- onOptsDBPath :: (FilePath -> FilePath) -> OptionsBuilder onOptsDBPath f o = return $ o { optsDBPath = f $ optsDBPath o } setOptsDBPath :: FilePath -> OptionsBuilder setOptsDBPath fp = onOptsDBPath $ const fp -------- onOptsDBFresh :: (Bool -> Bool) -> OptionsBuilder onOptsDBFresh f o = return $ o { optsDBFresh = f $ optsDBFresh o } setOptsDBFresh :: Bool -> OptionsBuilder setOptsDBFresh b = onOptsDBFresh $ const b -- }}} -- GetOpt {{{ type OptionsBuilder = HariharaOptions -> IO HariharaOptions parseOptions :: [String] -> IO HariharaOptions parseOptions args = case getOpt Permute testOpts args of (fs, ps, []) -> mkOpts fs ps defaultOptions (_ , _ , es) -> mapM_ putStrLn es >> usage usage :: IO a usage = do putStrLn "Usage:" putStr $ usageInfo "harihara [FLAGS] file1 file2 ..." testOpts exitFailure testOpts :: [OptDescr OptionsBuilder] testOpts = [ Option ['l'] ["log"] (ReqArg logArg "NUM") "Log level: 0/silent, 1/error, 2/warn, 3/info, 4/debug" , Option ['d'] ["database"] (ReqArg dbArg "FILE") "Path to database" , Option [] ["fresh-db"] (NoArg freshArg) "Drop the current database entirely" ] mkOpts :: [OptionsBuilder] -> [FilePath] -> OptionsBuilder mkOpts fs ps = appBldrs $ fileBldr : fs where fileBldr = appBldrs $ map fileArg ps appBldrs :: [OptionsBuilder] -> OptionsBuilder appBldrs = flip $ foldrM ($) freshArg :: OptionsBuilder freshArg = setOptsDBFresh True -- | Handle a file argument fileArg :: String -> OptionsBuilder fileArg fp o = do fileExists <- doesFileExist fp if fileExists then onOptsFiles (S.insert fp) o else throwIO $ FileDoesNotExist fp -- | Handle a log level argument logArg :: String -> OptionsBuilder logArg arg o = case arg of "0" -> setOptsLogLevel LogSilent o "silent" -> setOptsLogLevel LogSilent o "1" -> setOptsLogLevel LogError o "error" -> setOptsLogLevel LogError o "2" -> setOptsLogLevel LogWarn o "warn" -> setOptsLogLevel LogWarn o "3" -> setOptsLogLevel LogInfo o "info" -> setOptsLogLevel LogInfo o "4" -> setOptsLogLevel LogDebug o "debug" -> setOptsLogLevel LogDebug o _ -> throwIO $ NoFlagParse "LogLevel" arg -- | Handle a DB path argument dbArg :: String -> OptionsBuilder dbArg fp o = if isValid fp then setOptsDBPath fp o else throwIO $ InvalidPath fp -- }}}
kylcarte/harihara
src/Harihara/Options.hs
Haskell
bsd-3-clause
4,324
{-# LANGUAGE OverloadedStrings #-} module Application where import Web.Routes.Nested import Network.HTTP.Types defApp :: Application defApp _ respond = respond (textOnlyStatus status404 "404 Not Found")
athanclark/clark-mining-tech
src/Application.hs
Haskell
bsd-3-clause
213
module Test.Collision (testCollision) where import Test.Framework import Test.Framework.Providers.QuickCheck2 import Test.QuickCheck import Data.List hiding (insert) import Collision import Environment import Test.ArbitraryInstances testCollision = testGroup "Collision" [testProperty "same result as naive" pNaive] naiveDetection :: AABB -> [AABB] -> [AABB] naiveDetection aabb aabbs = filter (intersectAABB aabb) aabbs pNaive :: AABB -> [AABB] -> Property pNaive aabb aabbs = let qtree = foldr insert (empty worldSize 4) aabbs naiveResult = sort $ naiveDetection aabb aabbs qtreeResult = sort $ queryIntersecting aabb qtree showResults = do putStrLn $ "QuadTree: " ++ show qtree putStrLn $ "Expected: " ++ show naiveResult ++ ", actual: " ++ show qtreeResult in whenFail showResults $ naiveResult == qtreeResult
alexisVallet/haskell-shmup
Test/Collision.hs
Haskell
bsd-3-clause
872
{-# LANGUAGE FlexibleContexts #-} module LogMan.Processor ( run ) where import Control.Monad.State import LogMan.Filters import LogMan.LogEntry import LogMan.LogFile import LogMan.Options import LogMan.Output processEntries :: (MonadIO m, MonadState Options m) => [String] -> m () processEntries n = do es <- readLogEntries n applyFilters es >>= writeOutput run :: [String] -> IO () run argv = do (options, n) <- parseOptions argv runStateT (processEntries n) options return ()
cwmunn/logman
src/LogMan/Processor.hs
Haskell
bsd-3-clause
500
{-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE DeriveFunctor #-} module Parser241.Parser.ProductionRule.Internal.Manager where import Parser241.Parser.ProductionRule.Internal.Maker import Control.Monad.Writer (Writer(..), runWriter, tell, MonadWriter(..)) import Parser241.Parser.ProductionRule.Internal import Data.Set as S (Set) import Data.Map as M (Map, fromList) import qualified Data.Set as S (fromList) import Control.Monad (mzero) newtype Manager' a x = Manager { unManager :: Writer [Maker a] x } deriving (Functor, Applicative, Monad, MonadWriter [Maker a]) type Manager a = Manager' a () getMakers :: Manager a -> [Maker a] getMakers m = snd $ runWriter $ unManager m addMakers :: [Maker a] -> Manager a -> Manager a addMakers ls m = m >> tell ls empty :: Manager a empty = Manager $ tell [] singleton :: Maker a -> Manager a singleton a = addMakers [a] empty getRules :: (Ord a) => Manager a -> Set a -> [Rule a] getRules a nts = do maker <- getMakers a let (lhs, rhsLs) = unMaker maker return $ rule lhs [ reverse $ map (`setSym` nts) rhs | rhs <- rhsLs ] getNTs :: (Ord a) => Manager a -> Set a getNTs a = S.fromList $ do make <- getMakers a case unMaker make of (NT x, _) -> return x _ -> mzero -- | Collect the defined syntax and produces a list of production rules. rules :: (Ord a) => Manager a -> [Rule a] rules a = getRules a $ getNTs a -- | Collect the defined syntax and produces a map of production rules. -- -- This is equivalent to `Set.fromList . rules` ruleMap :: (Ord a) => Manager a -> RuleMap a ruleMap = M.fromList . rules instance FromMaker Manager' where fromMaker = singleton
YLiLarry/parser241-production-rule
src/Parser241/Parser/ProductionRule/Internal/Manager.hs
Haskell
bsd-3-clause
1,721
-- Copyright 2021 Google LLC -- -- Use of this source code is governed by a BSD-style -- license that can be found in the LICENSE file or at -- https://developers.google.com/open-source/licenses/bsd {-# LANGUAGE GeneralizedNewtypeDeriving #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE ConstraintKinds #-} module Err (Err (..), Errs (..), ErrType (..), Except (..), ErrCtx (..), SrcPosCtx, SrcTextCtx, SrcPos, Fallible (..), Catchable (..), catchErrExcept, FallibleM (..), HardFailM (..), CtxReader (..), runFallibleM, runHardFail, throw, throwErr, throwIf, addContext, addSrcContext, addSrcTextContext, catchIOExcept, liftExcept, liftMaybe, liftMaybeErr, assertEq, ignoreExcept, isSuccess, exceptToMaybe, pprint, docAsStr, asCompilerErr, FallibleApplicativeWrapper, traverseMergingErrs, liftFallibleM, SearcherM (..), Searcher (..), runSearcherM) where import Control.Exception hiding (throw) import Control.Applicative import Control.Monad import Control.Monad.Trans.Maybe import Control.Monad.Identity import Control.Monad.Writer.Strict import Control.Monad.State.Strict import Control.Monad.Reader import Data.Text (unpack) import Data.Text.Prettyprint.Doc.Render.Text import Data.Text.Prettyprint.Doc import GHC.Stack import System.Environment import System.IO.Unsafe -- === core API === data Err = Err ErrType ErrCtx String deriving (Show, Eq) newtype Errs = Errs [Err] deriving (Eq, Semigroup, Monoid) data ErrType = NoErr | ParseErr | TypeErr | KindErr | LinErr | VarDefErr | UnboundVarErr | AmbiguousVarErr | RepeatedVarErr | RepeatedPatVarErr | InvalidPatternErr | CompilerErr | IRVariantErr | NotImplementedErr | DataIOErr | MiscErr | RuntimeErr | ZipErr | EscapedNameErr | ModuleImportErr | MonadFailErr deriving (Show, Eq) type SrcPosCtx = Maybe SrcPos type SrcTextCtx = Maybe (Int, String) -- Int is the offset in the source file data ErrCtx = ErrCtx { srcTextCtx :: SrcTextCtx , srcPosCtx :: SrcPosCtx , messageCtx :: [String] , stackCtx :: Maybe [String] } deriving (Show, Eq) type SrcPos = (Int, Int) class MonadFail m => Fallible m where throwErrs :: Errs -> m a addErrCtx :: ErrCtx -> m a -> m a class Fallible m => Catchable m where catchErr :: m a -> (Errs -> m a) -> m a catchErrExcept :: Catchable m => m a -> m (Except a) catchErrExcept m = catchErr (Success <$> m) (\e -> return $ Failure e) -- We have this in its own class because IO and `Except` can't implement it -- (but FallibleM can) class Fallible m => CtxReader m where getErrCtx :: m ErrCtx -- We have this in its own class because StateT can't implement it -- (but FallibleM, Except and IO all can) class Fallible m => FallibleApplicative m where mergeErrs :: m a -> m b -> m (a, b) newtype FallibleM a = FallibleM { fromFallibleM :: ReaderT ErrCtx Except a } deriving (Functor, Applicative, Monad) instance Fallible FallibleM where throwErrs (Errs errs) = FallibleM $ ReaderT \ambientCtx -> throwErrs $ Errs [Err errTy (ambientCtx <> ctx) s | Err errTy ctx s <- errs] addErrCtx ctx (FallibleM m) = FallibleM $ local (<> ctx) m instance Catchable FallibleM where FallibleM m `catchErr` handler = FallibleM $ ReaderT \ctx -> case runReaderT m ctx of Failure errs -> runReaderT (fromFallibleM $ handler errs) ctx Success ans -> return ans instance FallibleApplicative FallibleM where mergeErrs (FallibleM (ReaderT f1)) (FallibleM (ReaderT f2)) = FallibleM $ ReaderT \ctx -> mergeErrs (f1 ctx) (f2 ctx) instance CtxReader FallibleM where getErrCtx = FallibleM ask instance Fallible IO where throwErrs errs = throwIO errs addErrCtx ctx m = do result <- catchIOExcept m liftExcept $ addErrCtx ctx result instance Catchable IO where catchErr cont handler = catchIOExcept cont >>= \case Success result -> return result Failure errs -> handler errs instance FallibleApplicative IO where mergeErrs m1 m2 = do result1 <- catchIOExcept m1 result2 <- catchIOExcept m2 liftExcept $ mergeErrs result1 result2 runFallibleM :: FallibleM a -> Except a runFallibleM m = runReaderT (fromFallibleM m) mempty -- === Except type === -- Except is isomorphic to `Either Errs` but having a distinct type makes it -- easier to debug type errors. data Except a = Failure Errs | Success a deriving (Show, Eq) instance Functor Except where fmap = liftM instance Applicative Except where pure = return liftA2 = liftM2 instance Monad Except where return = Success Failure errs >>= _ = Failure errs Success x >>= f = f x -- === FallibleApplicativeWrapper === -- Wraps a Fallible monad, presenting an applicative interface that sequences -- actions using the error-concatenating `mergeErrs` instead of the default -- abort-on-failure sequencing. newtype FallibleApplicativeWrapper m a = FallibleApplicativeWrapper { fromFallibleApplicativeWrapper :: m a } deriving (Functor) instance FallibleApplicative m => Applicative (FallibleApplicativeWrapper m) where pure x = FallibleApplicativeWrapper $ pure x liftA2 f (FallibleApplicativeWrapper m1) (FallibleApplicativeWrapper m2) = FallibleApplicativeWrapper $ fmap (uncurry f) (mergeErrs m1 m2) -- === HardFail === -- Implements Fallible by crashing. Used in type querying when we want to avoid -- work by trusting decl annotations and skipping the checks. newtype HardFailM a = HardFailM { runHardFail' :: Identity a } deriving (Functor, Applicative, Monad) runHardFail :: HardFailM a -> a runHardFail m = runIdentity $ runHardFail' m instance MonadFail HardFailM where fail s = error s instance Fallible HardFailM where throwErrs errs = error $ pprint errs addErrCtx _ cont = cont instance FallibleApplicative HardFailM where mergeErrs cont1 cont2 = (,) <$> cont1 <*> cont2 -- === convenience layer === throw :: Fallible m => ErrType -> String -> m a throw errTy s = throwErrs $ Errs [addCompilerStackCtx $ Err errTy mempty s] throwErr :: Fallible m => Err -> m a throwErr err = throwErrs $ Errs [addCompilerStackCtx err] addCompilerStackCtx :: Err -> Err addCompilerStackCtx (Err ty ctx msg) = Err ty ctx{stackCtx = compilerStack} msg #ifdef DEX_DEBUG where compilerStack = Just $! reverse $ unsafePerformIO currentCallStack #else where compilerStack = stackCtx ctx #endif throwIf :: Fallible m => Bool -> ErrType -> String -> m () throwIf True e s = throw e s throwIf False _ _ = return () addContext :: Fallible m => String -> m a -> m a addContext s m = addErrCtx (mempty {messageCtx = [s]}) m addSrcContext :: Fallible m => SrcPosCtx -> m a -> m a addSrcContext ctx m = addErrCtx (mempty {srcPosCtx = ctx}) m addSrcTextContext :: Fallible m => Int -> String -> m a -> m a addSrcTextContext offset text m = addErrCtx (mempty {srcTextCtx = Just (offset, text)}) m catchIOExcept :: MonadIO m => IO a -> m (Except a) catchIOExcept m = liftIO $ (liftM Success m) `catches` [ Handler \(e::Errs) -> return $ Failure e , Handler \(e::IOError) -> return $ Failure $ Errs [Err DataIOErr mempty $ show e] , Handler \(e::SomeException) -> return $ Failure $ Errs [Err CompilerErr mempty $ show e] ] liftMaybe :: MonadFail m => Maybe a -> m a liftMaybe Nothing = fail "" liftMaybe (Just x) = return x liftMaybeErr :: Fallible m => ErrType -> String -> Maybe a -> m a liftMaybeErr err s Nothing = throw err s liftMaybeErr _ _ (Just x) = return x liftExcept :: Fallible m => Except a -> m a liftExcept (Failure errs) = throwErrs errs liftExcept (Success ans) = return ans liftFallibleM :: Fallible m => FallibleM a -> m a liftFallibleM m = liftExcept $ runFallibleM m ignoreExcept :: HasCallStack => Except a -> a ignoreExcept (Failure e) = error $ pprint e ignoreExcept (Success x) = x isSuccess :: Except a -> Bool isSuccess (Success _) = True isSuccess (Failure _) = False exceptToMaybe :: Except a -> Maybe a exceptToMaybe (Success a) = Just a exceptToMaybe (Failure _) = Nothing assertEq :: (HasCallStack, Fallible m, Show a, Pretty a, Eq a) => a -> a -> String -> m () assertEq x y s = if x == y then return () else throw CompilerErr msg where msg = "assertion failure (" ++ s ++ "):\n" ++ pprint x ++ " != " ++ pprint y ++ "\n\n" ++ prettyCallStack callStack ++ "\n" -- TODO: think about the best way to handle these. This is just a -- backwards-compatibility shim. asCompilerErr :: Fallible m => m a -> m a asCompilerErr cont = addContext "(This is a compiler error!)" cont -- === search monad === infix 0 <!> class (Monad m, Alternative m) => Searcher m where -- Runs the second computation when the first yields an empty set of results. -- This is just `<|>` for greedy searchers like `Maybe`, but in other cases, -- like the list monad, it matters that the second computation isn't run if -- the first succeeds. (<!>) :: m a -> m a -> m a -- Adds an extra error case to `FallibleM` so we can give it an Alternative -- instance with an identity element. newtype SearcherM a = SearcherM { runSearcherM' :: MaybeT FallibleM a } deriving (Functor, Applicative, Monad) runSearcherM :: SearcherM a -> Except (Maybe a) runSearcherM m = runFallibleM $ runMaybeT (runSearcherM' m) instance MonadFail SearcherM where fail _ = SearcherM $ MaybeT $ return Nothing instance Fallible SearcherM where throwErrs e = SearcherM $ lift $ throwErrs e addErrCtx ctx (SearcherM (MaybeT m)) = SearcherM $ MaybeT $ addErrCtx ctx $ m instance Alternative SearcherM where empty = SearcherM $ MaybeT $ return Nothing SearcherM (MaybeT m1) <|> SearcherM (MaybeT m2) = SearcherM $ MaybeT do m1 >>= \case Just ans -> return $ Just ans Nothing -> m2 instance Searcher SearcherM where (<!>) = (<|>) instance CtxReader SearcherM where getErrCtx = SearcherM $ lift getErrCtx instance Searcher [] where [] <!> m = m m <!> _ = m instance (Monoid w, Searcher m) => Searcher (WriterT w m) where WriterT m1 <!> WriterT m2 = WriterT (m1 <!> m2) instance (Monoid w, Fallible m) => Fallible (WriterT w m) where throwErrs errs = lift $ throwErrs errs addErrCtx ctx (WriterT m) = WriterT $ addErrCtx ctx m instance Fallible [] where throwErrs _ = [] addErrCtx _ m = m instance Fallible Maybe where throwErrs _ = Nothing addErrCtx _ m = m -- === small pretty-printing utils === -- These are here instead of in PPrint.hs for import cycle reasons pprint :: Pretty a => a -> String pprint x = docAsStr $ pretty x docAsStr :: Doc ann -> String docAsStr doc = unpack $ renderStrict $ layoutPretty layout $ doc layout :: LayoutOptions layout = if unbounded then LayoutOptions Unbounded else defaultLayoutOptions where unbounded = unsafePerformIO $ (Just "1"==) <$> lookupEnv "DEX_PPRINT_UNBOUNDED" traverseMergingErrs :: (Traversable f, FallibleApplicative m) => (a -> m b) -> f a -> m (f b) traverseMergingErrs f xs = fromFallibleApplicativeWrapper $ traverse (\x -> FallibleApplicativeWrapper $ f x) xs -- === instances === instance MonadFail FallibleM where fail s = throw MonadFailErr s instance Fallible Except where throwErrs errs = Failure errs addErrCtx _ (Success ans) = Success ans addErrCtx ctx (Failure (Errs errs)) = Failure $ Errs [Err errTy (ctx <> ctx') s | Err errTy ctx' s <- errs] instance FallibleApplicative Except where mergeErrs (Success x) (Success y) = Success (x, y) mergeErrs x y = Failure (getErrs x <> getErrs y) where getErrs :: Except a -> Errs getErrs = \case Failure e -> e Success _ -> mempty instance MonadFail Except where fail s = Failure $ Errs [Err CompilerErr mempty s] instance Exception Errs instance Show Errs where show errs = pprint errs instance Pretty Err where pretty (Err e ctx s) = pretty e <> pretty s <> prettyCtx -- TODO: figure out a more uniform way to newlines where prettyCtx = case ctx of ErrCtx _ Nothing [] Nothing -> mempty _ -> hardline <> pretty ctx instance Pretty ErrCtx where pretty (ErrCtx maybeTextCtx maybePosCtx messages stack) = -- The order of messages is outer-scope-to-inner-scope, but we want to print -- them starting the other way around (Not for a good reason. It's just what -- we've always done.) prettyLines (reverse messages) <> highlightedSource <> prettyStack where highlightedSource = case (maybeTextCtx, maybePosCtx) of (Just (offset, text), Just (start, stop)) -> hardline <> pretty (highlightRegion (start - offset, stop - offset) text) _ -> mempty prettyStack = case stack of Nothing -> mempty Just s -> hardline <> "Compiler stack trace:" <> nest 2 (hardline <> prettyLines s) instance Pretty a => Pretty (Except a) where pretty (Success x) = "Success:" <+> pretty x pretty (Failure e) = "Failure:" <+> pretty e instance Pretty ErrType where pretty e = case e of -- NoErr tags a chunk of output that was promoted into the Err ADT -- by appending Results. NoErr -> "" ParseErr -> "Parse error:" TypeErr -> "Type error:" KindErr -> "Kind error:" LinErr -> "Linearity error: " IRVariantErr -> "Internal IR validation error: " VarDefErr -> "Error in (earlier) definition of variable: " UnboundVarErr -> "Error: variable not in scope: " AmbiguousVarErr -> "Error: ambiguous variable: " RepeatedVarErr -> "Error: variable already defined: " RepeatedPatVarErr -> "Error: variable already defined within pattern: " InvalidPatternErr -> "Error: not a valid pattern: " NotImplementedErr -> "Not implemented:" <> line <> "Please report this at github.com/google-research/dex-lang/issues\n" <> line CompilerErr -> "Compiler bug!" <> line <> "Please report this at github.com/google-research/dex-lang/issues\n" <> line DataIOErr -> "IO error: " MiscErr -> "Error:" RuntimeErr -> "Runtime error" ZipErr -> "Zipping error" EscapedNameErr -> "Leaked local variables:" ModuleImportErr -> "Module import error: " MonadFailErr -> "MonadFail error (internal error)" instance Fallible m => Fallible (ReaderT r m) where throwErrs errs = lift $ throwErrs errs addErrCtx ctx (ReaderT f) = ReaderT \r -> addErrCtx ctx $ f r instance Catchable m => Catchable (ReaderT r m) where ReaderT f `catchErr` handler = ReaderT \r -> f r `catchErr` \e -> runReaderT (handler e) r instance FallibleApplicative m => FallibleApplicative (ReaderT r m) where mergeErrs (ReaderT f1) (ReaderT f2) = ReaderT \r -> mergeErrs (f1 r) (f2 r) instance CtxReader m => CtxReader (ReaderT r m) where getErrCtx = lift getErrCtx instance Pretty Errs where pretty (Errs [err]) = pretty err pretty (Errs errs) = prettyLines errs instance Fallible m => Fallible (StateT s m) where throwErrs errs = lift $ throwErrs errs addErrCtx ctx (StateT f) = StateT \s -> addErrCtx ctx $ f s instance Catchable m => Catchable (StateT s m) where StateT f `catchErr` handler = StateT \s -> f s `catchErr` \e -> runStateT (handler e) s instance CtxReader m => CtxReader (StateT s m) where getErrCtx = lift getErrCtx instance Semigroup ErrCtx where ErrCtx text pos ctxStrs stk <> ErrCtx text' pos' ctxStrs' stk' = ErrCtx (leftmostJust text text') (rightmostJust pos pos' ) (ctxStrs <> ctxStrs') (leftmostJust stk stk') -- We usually extend errors form the right instance Monoid ErrCtx where mempty = ErrCtx Nothing Nothing [] Nothing -- === misc util stuff === leftmostJust :: Maybe a -> Maybe a -> Maybe a leftmostJust (Just x) _ = Just x leftmostJust Nothing y = y rightmostJust :: Maybe a -> Maybe a -> Maybe a rightmostJust = flip leftmostJust prettyLines :: (Foldable f, Pretty a) => f a -> Doc ann prettyLines xs = foldMap (\d -> pretty d <> hardline) xs highlightRegion :: (Int, Int) -> String -> String highlightRegion pos@(low, high) s | low > high || high > length s = error $ "Bad region: \n" ++ show pos ++ "\n" ++ s | otherwise = -- TODO: flag to control line numbers -- (disabling for now because it makes quine tests tricky) -- "Line " ++ show (1 + lineNum) ++ "\n" allLines !! lineNum ++ "\n" ++ take start (repeat ' ') ++ take (stop - start) (repeat '^') ++ "\n" where allLines = lines s (lineNum, start, stop) = getPosTriple pos allLines getPosTriple :: (Int, Int) -> [String] -> (Int, Int, Int) getPosTriple (start, stop) lines_ = (lineNum, start - offset, stop') where lineLengths = map ((+1) . length) lines_ lineOffsets = cumsum lineLengths lineNum = maxLT lineOffsets start offset = lineOffsets !! lineNum stop' = min (stop - offset) (lineLengths !! lineNum) cumsum :: [Int] -> [Int] cumsum xs = scanl (+) 0 xs maxLT :: Ord a => [a] -> a -> Int maxLT [] _ = 0 maxLT (x:xs) n = if n < x then -1 else 1 + maxLT xs n
google-research/dex-lang
src/lib/Err.hs
Haskell
bsd-3-clause
17,510
{-# LANGUAGE GADTs #-} {-# LANGUAGE KindSignatures #-} module Signal.Core.Stream where import Control.Monad.Operational.Compositional import Control.Applicative import Control.Monad import Prelude ((.), ($)) -------------------------------------------------------------------------------- -- * Streams -------------------------------------------------------------------------------- -- | Imperative model of co-iterative streams data Stream (instr :: (* -> *) -> * -> *) (a :: *) where Stream :: Program instr (Program instr a) -> Stream instr a -- | `Shorthand` for streams which produce values of type `exp a` type Str instr a = Stream instr (IExp instr a) -------------------------------------------------------------------------------- -- ** -- | ... repeat :: (e ~ IExp instr) => e a -> Str instr a repeat = Stream . return . return -- | ... map :: (e ~ IExp instr) => (e a -> e b) -> Str instr a -> Str instr b map f (Stream s) = Stream $ fmap (fmap f) s -------------------------------------------------------------------------------- -- ** -- | Run stream to produce transition action run :: Stream instr a -> Program instr a run (Stream init) = join init --------------------------------------------------------------------------------
markus-git/signal
src/Signal/Core/Stream.hs
Haskell
bsd-3-clause
1,276
module Sound.Synthesis (module X) where import Sound.Synthesis.Amplitude as X import Sound.Synthesis.Combinator as X import Sound.Synthesis.Constant as X import Sound.Synthesis.Envelope as X import Sound.Synthesis.Frequency as X import Sound.Synthesis.Interval as X import Sound.Synthesis.Type as X import Sound.Synthesis.Wave as X import Sound.Synthesis.Write as X
pskrz/Synthesis
src/Sound/Synthesis.hs
Haskell
bsd-3-clause
366
module OpenCV.Unsafe ( unsafeCoerceMat , unsafeCoerceMatM -- * Mutable Matrix , unsafeFreeze , unsafeThaw , unsafeRead , unsafeWrite ) where import "base" Foreign.Storable ( Storable, peek, poke ) import "primitive" Control.Monad.Primitive ( PrimMonad, PrimState, unsafePrimToPrim ) import "this" OpenCV.Internal.Core.Types.Mat import "this" OpenCV.Internal.Mutable unsafeRead :: (PrimMonad m, Storable value) => Mut (Mat shape channels depth) (PrimState m) -> [Int] -> m value unsafeRead matM pos = unsafePrimToPrim $ withMatData (unMut matM) $ \step dataPtr -> let elemPtr = matElemAddress dataPtr (fromIntegral <$> step) pos in peek elemPtr unsafeWrite :: (PrimMonad m, Storable value) => Mut (Mat shape channels depth) (PrimState m) -> [Int] -> value -> m () unsafeWrite matM pos value = unsafePrimToPrim $ withMatData (unMut matM) $ \step dataPtr -> let elemPtr = matElemAddress dataPtr (fromIntegral <$> step) pos in poke elemPtr value
Cortlandd/haskell-opencv
src/OpenCV/Unsafe.hs
Haskell
bsd-3-clause
1,043
-- | The main prover loop. {-# LANGUAGE RecordWildCards, MultiParamTypeClasses, GADTs, BangPatterns, OverloadedStrings, ScopedTypeVariables, GeneralizedNewtypeDeriving, PatternGuards, TypeFamilies #-} module Twee where import Twee.Base import Twee.Rule hiding (normalForms) import qualified Twee.Rule as Rule import Twee.Equation import qualified Twee.Proof as Proof import Twee.Proof(Axiom(..), Proof(..), ProvedGoal(..), provedGoal, certify, derivation) import Twee.CP hiding (Config) import qualified Twee.CP as CP import Twee.Join hiding (Config, defaultConfig) import qualified Twee.Join as Join import qualified Twee.Rule.Index as RuleIndex import Twee.Rule.Index(RuleIndex(..)) import qualified Twee.Index as Index import Twee.Index(Index) import Twee.Constraints import Twee.Utils import Twee.Task import qualified Twee.PassiveQueue as Queue import Twee.PassiveQueue(Queue, Passive(..)) import qualified Data.IntMap.Strict as IntMap import Data.IntMap(IntMap) import Data.Maybe import Data.List import Data.Function import qualified Data.Map.Strict as Map import Data.Map(Map) import Data.Int import Data.Ord import Control.Monad import Control.Monad.IO.Class import Control.Monad.Trans.Class import qualified Control.Monad.Trans.State.Strict as StateM ---------------------------------------------------------------------- -- * Configuration and prover state. ---------------------------------------------------------------------- -- | The prover configuration. data Config f = Config { cfg_accept_term :: Maybe (Term f -> Bool), cfg_max_critical_pairs :: Int64, cfg_max_cp_depth :: Int, cfg_simplify :: Bool, cfg_renormalise_percent :: Int, cfg_cp_sample_size :: Int, cfg_renormalise_threshold :: Int, cfg_set_join_goals :: Bool, cfg_always_simplify :: Bool, cfg_critical_pairs :: CP.Config, cfg_join :: Join.Config, cfg_proof_presentation :: Proof.Config f } -- | The prover state. data State f = State { st_rules :: !(RuleIndex f (ActiveRule f)), st_active_ids :: !(IntMap (Active f)), st_rule_ids :: !(IntMap (ActiveRule f)), st_joinable :: !(Index f (Equation f)), st_goals :: ![Goal f], st_queue :: !(Queue Params), st_next_active :: {-# UNPACK #-} !Id, st_next_rule :: {-# UNPACK #-} !RuleId, st_considered :: {-# UNPACK #-} !Int64, st_simplified_at :: {-# UNPACK #-} !Id, st_cp_sample :: ![Maybe (Overlap f)], st_cp_next_sample :: ![(Integer, Int)], st_num_cps :: !Integer, st_messages_rev :: ![Message f] } -- | The default prover configuration. defaultConfig :: Config f defaultConfig = Config { cfg_accept_term = Nothing, cfg_max_critical_pairs = maxBound, cfg_max_cp_depth = maxBound, cfg_simplify = True, cfg_renormalise_percent = 5, cfg_renormalise_threshold = 20, cfg_cp_sample_size = 100, cfg_set_join_goals = True, cfg_always_simplify = False, cfg_critical_pairs = CP.defaultConfig, cfg_join = Join.defaultConfig, cfg_proof_presentation = Proof.defaultConfig } -- | Does this configuration run the prover in a complete mode? configIsComplete :: Config f -> Bool configIsComplete Config{..} = isNothing (cfg_accept_term) && cfg_max_critical_pairs == maxBound && cfg_max_cp_depth == maxBound -- | The initial state. initialState :: Config f -> State f initialState Config{..} = State { st_rules = RuleIndex.empty, st_active_ids = IntMap.empty, st_rule_ids = IntMap.empty, st_joinable = Index.empty, st_goals = [], st_queue = Queue.empty, st_next_active = 1, st_next_rule = 0, st_considered = 0, st_simplified_at = 1, st_cp_sample = [], st_cp_next_sample = reservoir cfg_cp_sample_size, st_num_cps = 0, st_messages_rev = [] } ---------------------------------------------------------------------- -- * Messages. ---------------------------------------------------------------------- -- | A message which is produced by the prover when something interesting happens. data Message f = -- | A new rule. NewActive !(Active f) -- | A new joinable equation. | NewEquation !(Equation f) -- | A rule was deleted. | DeleteActive !(Active f) -- | The CP queue was simplified. | SimplifyQueue -- | The rules were reduced wrt each other. | Interreduce -- | Status update: how many queued critical pairs there are. | Status !Int instance Function f => Pretty (Message f) where pPrint (NewActive rule) = pPrint rule pPrint (NewEquation eqn) = text " (hard)" <+> pPrint eqn pPrint (DeleteActive rule) = text " (delete rule " <#> pPrint (active_id rule) <#> text ")" pPrint SimplifyQueue = text " (simplifying queued critical pairs...)" pPrint Interreduce = text " (simplifying rules with respect to one another...)" pPrint (Status n) = text " (" <#> pPrint n <+> text "queued critical pairs)" -- | Emit a message. message :: PrettyTerm f => Message f -> State f -> State f message !msg state@State{..} = state { st_messages_rev = msg:st_messages_rev } -- | Forget about all emitted messages. clearMessages :: State f -> State f clearMessages state@State{..} = state { st_messages_rev = [] } -- | Get all emitted messages. messages :: State f -> [Message f] messages state = reverse (st_messages_rev state) ---------------------------------------------------------------------- -- * The CP queue. ---------------------------------------------------------------------- data Params instance Queue.Params Params where type Score Params = Int type Id Params = RuleId type PackedId Params = Int32 type PackedScore Params = Int32 packScore _ = fromIntegral unpackScore _ = fromIntegral packId _ = fromIntegral unpackId _ = fromIntegral -- | Compute all critical pairs from a rule. {-# INLINEABLE makePassives #-} {-# SCC makePassives #-} makePassives :: Function f => Config f -> State f -> ActiveRule f -> [Passive Params] makePassives Config{..} State{..} rule = [ Passive (fromIntegral (score cfg_critical_pairs o)) (rule_rid rule1) (rule_rid rule2) (fromIntegral (overlap_pos o)) | (rule1, rule2, o) <- overlaps (Depth cfg_max_cp_depth) (index_oriented st_rules) rules rule ] where rules = IntMap.elems st_rule_ids -- | Turn a Passive back into an overlap. -- Doesn't try to simplify it. {-# INLINEABLE findPassive #-} {-# SCC findPassive #-} findPassive :: forall f. Function f => State f -> Passive Params -> Maybe (ActiveRule f, ActiveRule f, Overlap f) findPassive State{..} Passive{..} = do rule1 <- IntMap.lookup (fromIntegral passive_rule1) st_rule_ids rule2 <- IntMap.lookup (fromIntegral passive_rule2) st_rule_ids let !depth = 1 + max (the rule1) (the rule2) overlap <- overlapAt (fromIntegral passive_pos) depth (renameAvoiding (the rule2 :: Rule f) (the rule1)) (the rule2) return (rule1, rule2, overlap) -- | Renormalise a queued Passive. {-# INLINEABLE simplifyPassive #-} {-# SCC simplifyPassive #-} simplifyPassive :: Function f => Config f -> State f -> Passive Params -> Maybe (Passive Params) simplifyPassive Config{..} state@State{..} passive = do (_, _, overlap) <- findPassive state passive overlap <- simplifyOverlap (index_oriented st_rules) overlap return passive { passive_score = fromIntegral $ fromIntegral (passive_score passive) `intMin` score cfg_critical_pairs overlap } -- | Check if we should renormalise the queue. {-# INLINEABLE shouldSimplifyQueue #-} shouldSimplifyQueue :: Function f => Config f -> State f -> Bool shouldSimplifyQueue Config{..} State{..} = length (filter isNothing st_cp_sample) * 100 >= cfg_renormalise_threshold * cfg_cp_sample_size -- | Renormalise the entire queue. {-# INLINEABLE simplifyQueue #-} {-# SCC simplifyQueue #-} simplifyQueue :: Function f => Config f -> State f -> State f simplifyQueue config state = resetSample config state { st_queue = simp (st_queue state) } where simp = Queue.mapMaybe (simplifyPassive config state) -- | Enqueue a set of critical pairs. {-# INLINEABLE enqueue #-} {-# SCC enqueue #-} enqueue :: Function f => State f -> RuleId -> [Passive Params] -> State f enqueue state rule passives = state { st_queue = Queue.insert rule passives (st_queue state) } -- | Dequeue a critical pair. -- -- Also takes care of: -- -- * removing any orphans from the head of the queue -- * ignoring CPs that are too big {-# INLINEABLE dequeue #-} {-# SCC dequeue #-} dequeue :: Function f => Config f -> State f -> (Maybe (CriticalPair f, ActiveRule f, ActiveRule f), State f) dequeue Config{..} state@State{..} = case deq 0 st_queue of -- Explicitly make the queue empty, in case it e.g. contained a -- lot of orphans Nothing -> (Nothing, state { st_queue = Queue.empty }) Just (overlap, n, queue) -> (Just overlap, state { st_queue = queue, st_considered = st_considered + n }) where deq !n queue = do (passive, queue) <- Queue.removeMin queue case findPassive state passive of Just (rule1, rule2, overlap@Overlap{overlap_eqn = t :=: u}) | fromMaybe True (cfg_accept_term <*> pure t), fromMaybe True (cfg_accept_term <*> pure u), cp <- makeCriticalPair rule1 rule2 overlap -> return ((cp, rule1, rule2), n+1, queue) _ -> deq (n+1) queue ---------------------------------------------------------------------- -- * Active rewrite rules. ---------------------------------------------------------------------- data Active f = Active { active_id :: {-# UNPACK #-} !Id, active_depth :: {-# UNPACK #-} !Depth, active_rule :: {-# UNPACK #-} !(Rule f), active_top :: !(Maybe (Term f)), active_proof :: {-# UNPACK #-} !(Proof f), -- A model in which the rule is false (used when reorienting) active_model :: !(Model f), active_rules :: ![ActiveRule f] } active_cp :: Active f -> CriticalPair f active_cp Active{..} = CriticalPair { cp_eqn = unorient active_rule, cp_depth = active_depth, cp_top = active_top, cp_proof = derivation active_proof } -- An active oriented in a particular direction. data ActiveRule f = ActiveRule { rule_active :: {-# UNPACK #-} !Id, rule_rid :: {-# UNPACK #-} !RuleId, rule_depth :: {-# UNPACK #-} !Depth, rule_rule :: {-# UNPACK #-} !(Rule f), rule_positions :: !(Positions f) } instance PrettyTerm f => Symbolic (ActiveRule f) where type ConstantOf (ActiveRule f) = f termsDL ActiveRule{..} = termsDL rule_rule subst_ sub r@ActiveRule{..} = r { rule_rule = rule', rule_positions = positions (lhs rule') } where rule' = subst_ sub rule_rule instance Eq (Active f) where (==) = (==) `on` active_id instance Eq (ActiveRule f) where (==) = (==) `on` rule_rid instance Function f => Pretty (Active f) where pPrint Active{..} = pPrint active_id <#> text "." <+> pPrint (canonicalise active_rule) instance Has (ActiveRule f) Id where the = rule_active instance Has (ActiveRule f) RuleId where the = rule_rid instance Has (ActiveRule f) Depth where the = rule_depth instance f ~ g => Has (ActiveRule f) (Rule g) where the = rule_rule instance f ~ g => Has (ActiveRule f) (Positions g) where the = rule_positions newtype RuleId = RuleId Id deriving (Eq, Ord, Show, Num, Real, Integral, Enum) -- Add a new active. {-# INLINEABLE addActive #-} {-# SCC addActive #-} addActive :: Function f => Config f -> State f -> (Id -> RuleId -> RuleId -> Active f) -> State f addActive config state@State{..} active0 = let active@Active{..} = active0 st_next_active st_next_rule (succ st_next_rule) state' = message (NewActive active) $ addActiveOnly state{st_next_active = st_next_active+1, st_next_rule = st_next_rule+2} active in if subsumed st_joinable st_rules (unorient active_rule) then state else normaliseGoals config $ foldl' enqueueRule state' active_rules where enqueueRule state rule = sample config (length passives) passives $ enqueue state (the rule) passives where passives = makePassives config state rule -- Update the list of sampled critical pairs. {-# INLINEABLE sample #-} sample :: Function f => Config f -> Int -> [Passive Params] -> State f -> State f sample cfg m passives state@State{st_cp_next_sample = ((n, pos):rest), ..} | idx < fromIntegral m = sample cfg m passives state { st_cp_next_sample = rest, st_cp_sample = take pos st_cp_sample ++ [find (passives !! fromIntegral idx)] ++ drop (pos+1) st_cp_sample } | otherwise = state{st_num_cps = st_num_cps + fromIntegral m} where idx = n - st_num_cps find passive = do (_, _, overlap) <- findPassive state passive simplifyOverlap (index_oriented st_rules) overlap -- Reset the list of sampled critical pairs. {-# INLINEABLE resetSample #-} resetSample :: Function f => Config f -> State f -> State f resetSample cfg@Config{..} state@State{..} = foldl' sample1 state' (Queue.toList st_queue) where state' = state { st_num_cps = 0, st_cp_next_sample = reservoir cfg_cp_sample_size, st_cp_sample = [] } sample1 state (n, passives) = sample cfg n passives state -- Simplify the sampled critical pairs. -- (A sampled critical pair is replaced with Nothing if it can be -- simplified.) {-# INLINEABLE simplifySample #-} simplifySample :: Function f => State f -> State f simplifySample state@State{..} = state{st_cp_sample = map (>>= simp) st_cp_sample} where simp overlap = do overlap' <- simplifyOverlap (index_oriented st_rules) overlap guard (overlap_eqn overlap == overlap_eqn overlap') return overlap -- Add an active without generating critical pairs. Used in interreduction. {-# INLINEABLE addActiveOnly #-} addActiveOnly :: Function f => State f -> Active f -> State f addActiveOnly state@State{..} active@Active{..} = state { st_rules = foldl' insertRule st_rules active_rules, st_active_ids = IntMap.insert (fromIntegral active_id) active st_active_ids, st_rule_ids = foldl' insertRuleId st_rule_ids active_rules } where insertRule rules rule@ActiveRule{..} = RuleIndex.insert (lhs rule_rule) rule rules insertRuleId rules rule@ActiveRule{..} = IntMap.insert (fromIntegral rule_rid) rule rules -- Delete an active. Used in interreduction, not suitable for general use. {-# INLINE deleteActive #-} deleteActive :: Function f => State f -> Active f -> State f deleteActive state@State{..} Active{..} = state { st_rules = foldl' deleteRule st_rules active_rules, st_active_ids = IntMap.delete (fromIntegral active_id) st_active_ids, st_rule_ids = foldl' deleteRuleId st_rule_ids active_rules } where deleteRule rules rule = RuleIndex.delete (lhs (rule_rule rule)) rule rules deleteRuleId rules ActiveRule{..} = IntMap.delete (fromIntegral rule_rid) rules -- Try to join a critical pair. {-# INLINEABLE consider #-} consider :: Function f => Config f -> State f -> CriticalPair f -> State f consider config state cp = considerUsing (st_rules state) config state cp -- Try to join a critical pair, but using a different set of critical -- pairs for normalisation. {-# INLINEABLE considerUsing #-} {-# SCC considerUsing #-} considerUsing :: Function f => RuleIndex f (ActiveRule f) -> Config f -> State f -> CriticalPair f -> State f considerUsing rules config@Config{..} state@State{..} cp0 = -- Important to canonicalise the rule so that we don't get -- bigger and bigger variable indices over time let cp = canonicalise cp0 in case joinCriticalPair cfg_join st_joinable rules Nothing cp of Right (mcp, cps) -> let state' = foldl' (considerUsing rules config) state cps in case mcp of Just cp -> addJoinable state' (cp_eqn cp) Nothing -> state' Left (cp, model) -> foldl' (addCP config model) state (split cp) {-# INLINEABLE addCP #-} addCP :: Function f => Config f -> Model f -> State f -> CriticalPair f -> State f addCP config model state@State{..} CriticalPair{..} = let pf = certify cp_proof rule = orient cp_eqn (Proof.simpleLemma pf) makeRule n k r = ActiveRule { rule_active = n, rule_rid = k, rule_depth = cp_depth, rule_rule = r rule, rule_positions = positions (lhs (r rule)) } in addActive config state $ \n k1 k2 -> Active { active_id = n, active_depth = cp_depth, active_rule = rule, active_model = model, active_top = cp_top, active_proof = pf, active_rules = usortBy (comparing (canonicalise . rule_rule)) $ makeRule n k1 id: [ makeRule n k2 backwards | not (oriented (orientation rule)) ] } -- Add a new equation. {-# INLINEABLE addAxiom #-} addAxiom :: Function f => Config f -> State f -> Axiom f -> State f addAxiom config state axiom = consider config state $ CriticalPair { cp_eqn = axiom_eqn axiom, cp_depth = 0, cp_top = Nothing, cp_proof = Proof.axiom axiom } -- Record an equation as being joinable. {-# INLINEABLE addJoinable #-} addJoinable :: Function f => State f -> Equation f -> State f addJoinable state eqn@(t :=: u) = message (NewEquation eqn) $ state { st_joinable = Index.insert t (t :=: u) $ Index.insert u (u :=: t) (st_joinable state) } -- For goal terms we store the set of all their normal forms. -- Name and number are for information only. data Goal f = Goal { goal_name :: String, goal_number :: Int, goal_eqn :: Equation f, goal_lhs :: Map (Term f) (Reduction f), goal_rhs :: Map (Term f) (Reduction f) } -- Add a new goal. {-# INLINEABLE addGoal #-} addGoal :: Function f => Config f -> State f -> Goal f -> State f addGoal config state@State{..} goal = normaliseGoals config state { st_goals = goal:st_goals } -- Normalise all goals. {-# INLINEABLE normaliseGoals #-} normaliseGoals :: Function f => Config f -> State f -> State f normaliseGoals Config{..} state@State{..} = state { st_goals = map (goalMap (nf (rewrite reduces (index_all st_rules)))) st_goals } where goalMap f goal@Goal{..} = goal { goal_lhs = f (eqn_lhs goal_eqn) goal_lhs, goal_rhs = f (eqn_rhs goal_eqn) goal_rhs } nf reduce t0 goals | cfg_set_join_goals = Rule.normalForms reduce goals | otherwise = Map.fromList $ [ (result t0 q, q) | (t, r) <- Map.toList goals, let q = r `trans` Rule.normaliseWith (const True) reduce t ] -- Recompute all normal forms of all goals. Starts from the original goal term. -- Different from normalising all goals, because there may be an intermediate -- term on one of the reduction paths which we can now rewrite in a different -- way. {-# INLINEABLE recomputeGoals #-} recomputeGoals :: Function f => Config f -> State f -> State f recomputeGoals config state = -- Make this strict so that newTask can time it correctly forceList (map goal_lhs (st_goals state')) `seq` forceList (map goal_rhs (st_goals state')) `seq` state' where state' = normaliseGoals config (state { st_goals = map reset (st_goals state) }) reset goal@Goal{goal_eqn = t :=: u, ..} = goal { goal_lhs = Map.singleton t [], goal_rhs = Map.singleton u [] } forceList [] = () forceList (x:xs) = x `seq` forceList xs -- Create a goal. {-# INLINE goal #-} goal :: Int -> String -> Equation f -> Goal f goal n name (t :=: u) = Goal { goal_name = name, goal_number = n, goal_eqn = t :=: u, goal_lhs = Map.singleton t [], goal_rhs = Map.singleton u [] } ---------------------------------------------------------------------- -- Interreduction. ---------------------------------------------------------------------- -- Simplify all rules. {-# INLINEABLE interreduce #-} {-# SCC interreduce #-} interreduce :: Function f => Config f -> State f -> State f interreduce _ state@State{..} | st_simplified_at == st_next_active = state interreduce config@Config{..} state = let state' = foldl' (interreduce1 config) -- Clear out st_joinable, since we don't know which -- equations have made use of each active. state { st_joinable = Index.empty } (IntMap.elems (st_active_ids state)) in state' { st_joinable = st_joinable state, st_simplified_at = st_next_active state' } {-# INLINEABLE interreduce1 #-} interreduce1 :: Function f => Config f -> State f -> Active f -> State f interreduce1 config@Config{..} state active = -- Exclude the active from the rewrite rules when testing -- joinability, otherwise it will be trivially joinable. case joinCriticalPair cfg_join Index.empty -- (st_joinable state) (st_rules (deleteActive state active)) (Just (active_model active)) (active_cp active) of Right (_, cps) -> flip (foldl' (consider config)) cps $ message (DeleteActive active) $ deleteActive state active Left (cp, model) | cp_eqn cp `simplerThan` cp_eqn (active_cp active) -> flip (foldl' (consider config)) (split cp) $ message (DeleteActive active) $ deleteActive state active | model /= active_model active -> flip addActiveOnly active { active_model = model } $ deleteActive state active | otherwise -> state ---------------------------------------------------------------------- -- The main loop. ---------------------------------------------------------------------- data Output m f = Output { output_message :: Message f -> m () } {-# INLINE complete #-} complete :: (Function f, MonadIO m) => Output m f -> Config f -> State f -> m (State f) complete Output{..} config@Config{..} state = flip StateM.execStateT state $ do tasks <- sequence [newTask 10 (fromIntegral cfg_renormalise_percent / 100) $ do state <- StateM.get when (shouldSimplifyQueue config state) $ do lift $ output_message SimplifyQueue StateM.put $! simplifyQueue config state, newTask 1 0.05 $ do when cfg_simplify $ do lift $ output_message Interreduce state <- StateM.get StateM.put $! simplifySample $! interreduce config state, newTask 1 0.02 $ do state <- StateM.get StateM.put $! recomputeGoals config state, newTask 60 0.01 $ do State{..} <- StateM.get let !n = Queue.queueSize st_queue lift $ output_message (Status n)] let loop = do progress <- StateM.state (complete1 config) when cfg_always_simplify $ do lift $ output_message Interreduce state <- StateM.get StateM.put $! simplifySample $! interreduce config state state <- StateM.get lift $ mapM_ output_message (messages state) StateM.put (clearMessages state) mapM_ checkTask tasks when progress loop loop {-# INLINEABLE complete1 #-} complete1 :: Function f => Config f -> State f -> (Bool, State f) complete1 config@Config{..} state | st_considered state >= cfg_max_critical_pairs = (False, state) | solved state = (False, state) | otherwise = case dequeue config state of (Nothing, state) -> (False, state) (Just (overlap, _, _), state) -> (True, consider config state overlap) {-# INLINEABLE solved #-} solved :: Function f => State f -> Bool solved = not . null . solutions -- Return whatever goals we have proved and their proofs. {-# INLINEABLE solutions #-} {-# SCC solutions #-} solutions :: Function f => State f -> [ProvedGoal f] solutions State{..} = do Goal{goal_lhs = ts, goal_rhs = us, ..} <- st_goals let sols = Map.keys (Map.intersection ts us) guard (not (null sols)) let sol:_ = sols let t = ts Map.! sol u = us Map.! sol -- Strict so that we check the proof before returning a solution !p = Proof.certify $ reductionProof (eqn_lhs goal_eqn) t `Proof.trans` Proof.symm (reductionProof (eqn_rhs goal_eqn) u) return (provedGoal goal_number goal_name p) -- Return all current rewrite rules. {-# INLINEABLE rules #-} rules :: Function f => State f -> [Rule f] rules = map active_rule . IntMap.elems . st_active_ids ---------------------------------------------------------------------- -- For code which uses twee as a library. ---------------------------------------------------------------------- {-# INLINEABLE completePure #-} completePure :: Function f => Config f -> State f -> State f completePure cfg state | progress = completePure cfg (clearMessages state') | otherwise = state' where (progress, state') = complete1 cfg state {-# INLINEABLE normaliseTerm #-} normaliseTerm :: Function f => State f -> Term f -> Reduction f normaliseTerm State{..} t = normaliseWith (const True) (rewrite reduces (index_all st_rules)) t {-# INLINEABLE normalForms #-} normalForms :: Function f => State f -> Term f -> Map (Term f) (Reduction f) normalForms State{..} t = Rule.normalForms (rewrite reduces (index_all st_rules)) (Map.singleton t []) {-# INLINEABLE simplifyTerm #-} simplifyTerm :: Function f => State f -> Term f -> Term f simplifyTerm State{..} t = simplify (index_oriented st_rules) t
nick8325/kbc
src/Twee.hs
Haskell
bsd-3-clause
25,575
type Matrix a = [Row a] type Row a = [a] type Grid = Matrix Digit type Digit = Char digits :: [Char] digits = ['1' .. '9'] blank :: Digit -> Bool blank = (== '0') solve :: Grid -> [Grid] solve = filter valid . completions completions :: Grid -> [Grid] completions d = [] valid :: Grid -> Bool valid d = False
trymilix/cookbooks
Software/haskell/sudoku.hs
Haskell
apache-2.0
316
module EditWebhook where import Github.Repos.Webhooks import qualified Github.Auth as Auth import Github.Data.Definitions main :: IO () main = do let auth = Auth.OAuth "oauthtoken" let editWebhookDef = EditRepoWebhook { editRepoWebhookRemoveEvents = Just [WebhookWildcardEvent], editRepoWebhookAddEvents = Just [WebhookCommitCommentEvent, WebhookGollumEvent], editRepoWebhookConfig = Nothing, editRepoWebhookEvents = Nothing, editRepoWebhookActive = Just True } newWebhook <- editRepoWebhook' auth "repoOwner" "repoName" 123 editWebhookDef case newWebhook of (Left err) -> putStrLn $ "Error: " ++ (show err) (Right webhook) -> putStrLn $ formatRepoWebhook webhook formatRepoWebhook :: RepoWebhook -> String formatRepoWebhook (RepoWebhook _ _ _ name _ _ _ _ _ _) = show name
jwiegley/github
samples/Repos/Webhooks/EditWebhook.hs
Haskell
bsd-3-clause
839
module Jana.ErrorMessages where import Text.Printf import Jana.Error import Jana.Ast aliasError :: Ident -> Ident -> Message aliasError id1 id2 = Message $ printf "Identifiers `%s' and `%s' are aliases" (ident id1) (ident id2) unboundVar :: String -> Message unboundVar name = Message $ printf "Variable `%s' has not been declared" name alreadyBound :: String -> Message alreadyBound name = Message $ printf "Variable name `%s' is already bound" name typeError :: String -> Message typeError = Message typeMismatch :: [String] -> String -> Message typeMismatch expTypes actualType = Message $ printf "Couldn't match expected type %s\n\ \ with actual type `%s'" (join expTypes) actualType where join [] = "" join [x] = quote x join [x, y] = quote x ++ " or " ++ quote y join (x:xs) = quote x ++ ", " ++ join xs quote s = "`" ++ s ++ "'" swapTypeError :: String -> String -> Message swapTypeError typ1 typ2 = Message $ printf "Can't swap variables of type `%s' and `%s'" typ1 typ2 outOfBounds :: (PrintfArg a) => a -> a -> Message outOfBounds index size = Message $ printf "Array index `%d' was out of bounds (array size was %d)" index size emptyStack :: Message emptyStack = Message "Can't pop from empty stack" popToNonZero :: Ident -> Message popToNonZero id = Message $ printf "Can't pop to non-zero variable `%s'" (ident id) assertionFail :: String -> Message assertionFail s = Message $ "Assertion failed: " ++ s delocalNameMismatch :: Ident -> Ident -> Message delocalNameMismatch id1 id2 = Message $ printf "Variable names does not match in local declaration:\n\ \ `%s' in `local'\n\ \ `%s' in `delocal'\n\ \`delocal' statements must come in reverse order of the `local' statments" (ident id1) (ident id2) delocalTypeMismatch :: Ident -> String -> String -> Message delocalTypeMismatch id locType delocType = Message $ printf "Type of variable `%s' does not match local declaration:\n\ \ `%s' in `local'\n\ \ `%s' in `delocal'" (ident id) locType delocType wrongDelocalValue :: Ident -> String -> String -> Message wrongDelocalValue id expect actual = Message $ printf "Expected value to be `%s' for local variable `%s'\n\ \ but actual value is `%s'" expect (ident id) actual undefProc :: String -> Message undefProc name = Message $ printf "Procedure `%s' is not defined" name procDefined :: (Identifiable a) => a -> Message procDefined id = Message $ printf "Procedure `%s' is already defined" (ident id) callingMainError :: Message callingMainError = Message "It is not allowed to call the `main' procedure" argumentError :: (Identifiable a, PrintfArg b) => a -> b -> b -> Message argumentError id expect actual = Message $ printf "Procedure `%s' expects %d argument(s) but got %d" (ident id) expect actual arraySize :: Message arraySize = Message "Array size must be greater than or equal to one" arraySizeMissing :: Ident -> Message arraySizeMissing id = Message $ printf "Array size missing for variable `%s'" (ident id) arraySizeMismatch :: (PrintfArg a, PrintfArg b) => a -> b -> Message arraySizeMismatch exp actual = Message $ printf "Expecting array of size %d\n\ \ but got size %d" exp actual divisionByZero :: Message divisionByZero = Message "Division by zero" noMainProc :: Message noMainProc = Message "No main procedure has been defined" multipleMainProcs :: Message multipleMainProcs = Message "Multiple main procedures has been defined" procDuplicateArgs :: Proc -> Message procDuplicateArgs id = Message $ printf "Procedure `%s' has duplicate arguments" (ident id) userError :: String -> Message userError msg = Message $ "User error: " ++ msg printfTypeMismatch :: Char -> String -> String -> Message printfTypeMismatch char expected given = Message $ printf "Type mismatch for `%%%c' format specifier\n\ \Expected argument of type `%s'\n\ \ but actual type was `%s'" char expected given printfTooManyArgs :: Message printfTooManyArgs = Message $ "Not all arguments where used during string formatting" printfNotEnoughArgs :: Message printfNotEnoughArgs = Message $ "Not enough arguments for format string" printfUnrecognizedType :: Char -> Message printfUnrecognizedType char = Message $ printf "Unrecognized format specifier: `%%%c'" char
mbudde/jana
src/Jana/ErrorMessages.hs
Haskell
bsd-3-clause
4,465
{-# LANGUAGE Haskell98 #-} {-# LINE 1 "Data/Text/Internal/Encoding/Fusion.hs" #-} {-# LANGUAGE BangPatterns, CPP, Rank2Types #-} -- | -- Module : Data.Text.Internal.Encoding.Fusion -- Copyright : (c) Tom Harper 2008-2009, -- (c) Bryan O'Sullivan 2009, -- (c) Duncan Coutts 2009 -- -- License : BSD-style -- Maintainer : [email protected] -- Stability : experimental -- Portability : portable -- -- /Warning/: this is an internal module, and does not have a stable -- API or name. Functions in this module may not check or enforce -- preconditions expected by public modules. Use at your own risk! -- -- Fusible 'Stream'-oriented functions for converting between 'Text' -- and several common encodings. module Data.Text.Internal.Encoding.Fusion ( -- * Streaming streamASCII , streamUtf8 , streamUtf16LE , streamUtf16BE , streamUtf32LE , streamUtf32BE -- * Unstreaming , unstream , module Data.Text.Internal.Encoding.Fusion.Common ) where import Data.ByteString.Internal (ByteString(..), mallocByteString, memcpy) import Data.Text.Internal.Fusion (Step(..), Stream(..)) import Data.Text.Internal.Fusion.Size import Data.Text.Encoding.Error import Data.Text.Internal.Encoding.Fusion.Common import Data.Text.Internal.Unsafe.Char (unsafeChr, unsafeChr8, unsafeChr32) import Data.Text.Internal.Unsafe.Shift (shiftL, shiftR) import Data.Word (Word8, Word16, Word32) import Foreign.ForeignPtr (withForeignPtr, ForeignPtr) import Foreign.Storable (pokeByteOff) import qualified Data.ByteString as B import qualified Data.ByteString.Unsafe as B import qualified Data.Text.Internal.Encoding.Utf8 as U8 import qualified Data.Text.Internal.Encoding.Utf16 as U16 import qualified Data.Text.Internal.Encoding.Utf32 as U32 import Data.Text.Unsafe (unsafeDupablePerformIO) streamASCII :: ByteString -> Stream Char streamASCII bs = Stream next 0 (maxSize l) where l = B.length bs {-# INLINE next #-} next i | i >= l = Done | otherwise = Yield (unsafeChr8 x1) (i+1) where x1 = B.unsafeIndex bs i {-# DEPRECATED streamASCII "Do not use this function" #-} {-# INLINE [0] streamASCII #-} -- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using UTF-8 -- encoding. streamUtf8 :: OnDecodeError -> ByteString -> Stream Char streamUtf8 onErr bs = Stream next 0 (maxSize l) where l = B.length bs next i | i >= l = Done | U8.validate1 x1 = Yield (unsafeChr8 x1) (i+1) | i+1 < l && U8.validate2 x1 x2 = Yield (U8.chr2 x1 x2) (i+2) | i+2 < l && U8.validate3 x1 x2 x3 = Yield (U8.chr3 x1 x2 x3) (i+3) | i+3 < l && U8.validate4 x1 x2 x3 x4 = Yield (U8.chr4 x1 x2 x3 x4) (i+4) | otherwise = decodeError "streamUtf8" "UTF-8" onErr (Just x1) (i+1) where x1 = idx i x2 = idx (i + 1) x3 = idx (i + 2) x4 = idx (i + 3) idx = B.unsafeIndex bs {-# INLINE [0] streamUtf8 #-} -- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using little -- endian UTF-16 encoding. streamUtf16LE :: OnDecodeError -> ByteString -> Stream Char streamUtf16LE onErr bs = Stream next 0 (maxSize (l `shiftR` 1)) where l = B.length bs {-# INLINE next #-} next i | i >= l = Done | i+1 < l && U16.validate1 x1 = Yield (unsafeChr x1) (i+2) | i+3 < l && U16.validate2 x1 x2 = Yield (U16.chr2 x1 x2) (i+4) | otherwise = decodeError "streamUtf16LE" "UTF-16LE" onErr Nothing (i+1) where x1 = idx i + (idx (i + 1) `shiftL` 8) x2 = idx (i + 2) + (idx (i + 3) `shiftL` 8) idx = fromIntegral . B.unsafeIndex bs :: Int -> Word16 {-# INLINE [0] streamUtf16LE #-} -- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using big -- endian UTF-16 encoding. streamUtf16BE :: OnDecodeError -> ByteString -> Stream Char streamUtf16BE onErr bs = Stream next 0 (maxSize (l `shiftR` 1)) where l = B.length bs {-# INLINE next #-} next i | i >= l = Done | i+1 < l && U16.validate1 x1 = Yield (unsafeChr x1) (i+2) | i+3 < l && U16.validate2 x1 x2 = Yield (U16.chr2 x1 x2) (i+4) | otherwise = decodeError "streamUtf16BE" "UTF-16BE" onErr Nothing (i+1) where x1 = (idx i `shiftL` 8) + idx (i + 1) x2 = (idx (i + 2) `shiftL` 8) + idx (i + 3) idx = fromIntegral . B.unsafeIndex bs :: Int -> Word16 {-# INLINE [0] streamUtf16BE #-} -- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using big -- endian UTF-32 encoding. streamUtf32BE :: OnDecodeError -> ByteString -> Stream Char streamUtf32BE onErr bs = Stream next 0 (maxSize (l `shiftR` 2)) where l = B.length bs {-# INLINE next #-} next i | i >= l = Done | i+3 < l && U32.validate x = Yield (unsafeChr32 x) (i+4) | otherwise = decodeError "streamUtf32BE" "UTF-32BE" onErr Nothing (i+1) where x = shiftL x1 24 + shiftL x2 16 + shiftL x3 8 + x4 x1 = idx i x2 = idx (i+1) x3 = idx (i+2) x4 = idx (i+3) idx = fromIntegral . B.unsafeIndex bs :: Int -> Word32 {-# INLINE [0] streamUtf32BE #-} -- | /O(n)/ Convert a 'ByteString' into a 'Stream Char', using little -- endian UTF-32 encoding. streamUtf32LE :: OnDecodeError -> ByteString -> Stream Char streamUtf32LE onErr bs = Stream next 0 (maxSize (l `shiftR` 2)) where l = B.length bs {-# INLINE next #-} next i | i >= l = Done | i+3 < l && U32.validate x = Yield (unsafeChr32 x) (i+4) | otherwise = decodeError "streamUtf32LE" "UTF-32LE" onErr Nothing (i+1) where x = shiftL x4 24 + shiftL x3 16 + shiftL x2 8 + x1 x1 = idx i x2 = idx $ i+1 x3 = idx $ i+2 x4 = idx $ i+3 idx = fromIntegral . B.unsafeIndex bs :: Int -> Word32 {-# INLINE [0] streamUtf32LE #-} -- | /O(n)/ Convert a 'Stream' 'Word8' to a 'ByteString'. unstream :: Stream Word8 -> ByteString unstream (Stream next s0 len) = unsafeDupablePerformIO $ do let mlen = upperBound 4 len mallocByteString mlen >>= loop mlen 0 s0 where loop !n !off !s fp = case next s of Done -> trimUp fp n off Skip s' -> loop n off s' fp Yield x s' | off == n -> realloc fp n off s' x | otherwise -> do withForeignPtr fp $ \p -> pokeByteOff p off x loop n (off+1) s' fp {-# NOINLINE realloc #-} realloc fp n off s x = do let n' = n+n fp' <- copy0 fp n n' withForeignPtr fp' $ \p -> pokeByteOff p off x loop n' (off+1) s fp' {-# NOINLINE trimUp #-} trimUp fp _ off = return $! PS fp 0 off copy0 :: ForeignPtr Word8 -> Int -> Int -> IO (ForeignPtr Word8) copy0 !src !srcLen !destLen = do dest <- mallocByteString destLen withForeignPtr src $ \src' -> withForeignPtr dest $ \dest' -> memcpy dest' src' (fromIntegral srcLen) return dest decodeError :: forall s. String -> String -> OnDecodeError -> Maybe Word8 -> s -> Step s Char decodeError func kind onErr mb i = case onErr desc mb of Nothing -> Skip i Just c -> Yield c i where desc = "Data.Text.Internal.Encoding.Fusion." ++ func ++ ": Invalid " ++ kind ++ " stream"
phischu/fragnix
tests/packages/scotty/Data.Text.Internal.Encoding.Fusion.hs
Haskell
bsd-3-clause
7,791
-- | Create a bundle to be uploaded to Stackage Server. {-# LANGUAGE NoImplicitPrelude #-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE RecordWildCards #-} module Stackage.ServerBundle ( serverBundle , epochTime , bpAllPackages , docsListing ) where import qualified Codec.Archive.Tar as Tar import qualified Codec.Archive.Tar.Entry as Tar import qualified Codec.Compression.GZip as GZip import qualified Data.Map as M import qualified Data.Yaml as Y import Filesystem (isFile) import Foreign.C.Types (CTime (CTime)) import Stackage.BuildConstraints import Stackage.BuildPlan import Stackage.Prelude import qualified System.PosixCompat.Time as PC import qualified Text.XML as X import Text.XML.Cursor -- | Get current time epochTime :: IO Tar.EpochTime epochTime = (\(CTime t) -> fromIntegral t) <$> PC.epochTime -- | All package/versions in a build plan, including core packages. -- -- Note that this may include packages not available on Hackage. bpAllPackages :: BuildPlan -> Map PackageName Version bpAllPackages BuildPlan {..} = siCorePackages bpSystemInfo ++ map ppVersion bpPackages serverBundle :: Tar.EpochTime -> Text -- ^ title -> Text -- ^ slug -> BuildPlan -> LByteString serverBundle time title slug bp@BuildPlan {..} = GZip.compress $ Tar.write [ fe "build-plan.yaml" (fromStrict $ Y.encode bp) , fe "hackage" hackage , fe "slug" (fromStrict $ encodeUtf8 slug) , fe "desc" (fromStrict $ encodeUtf8 title) , fe "core" corePackagesList ] where fe name contents = case Tar.toTarPath False name of Left s -> error s Right name' -> (Tar.fileEntry name' contents) { Tar.entryTime = time } hackage = builderToLazy $ foldMap goPair $ mapToList packageMap -- need to remove some packages that don't exist on Hackage packageMap = foldr deleteMap (bpAllPackages bp) $ map PackageName [ "bin-package-db" , "ghc" , "rts" ] goPair (name, version) = toBuilder (display name) ++ toBuilder (asText "-") ++ toBuilder (display version) ++ toBuilder (asText "\n") corePackagesList = builderToLazy $ toBuilder $ unlines $ map (\(PackageName name) -> name) (M.keys $ siCorePackages bpSystemInfo) docsListing :: BuildPlan -> FilePath -- ^ docs directory -> IO ByteString docsListing bp docsDir = fmap (Y.encode . fold) $ mapM go $ mapToList $ bpAllPackages bp where go :: (PackageName, Version) -> IO (Map Text Y.Value) go (package, version) = do -- handleAny (const $ return mempty) $ do let dirname = fpFromText (concat [ display package , "-" , display version ]) indexFP = (docsDir </> dirname </> "index.html") ie <- isFile indexFP if ie then do doc <- flip X.readFile indexFP X.def { X.psDecodeEntities = X.decodeHtmlEntities } let cursor = fromDocument doc getPair x = take 1 $ do href <- attribute "href" x let name = concat $ x $// content guard $ not $ null name return (href, name) pairs = cursor $// attributeIs "class" "module" &/ laxElement "a" >=> getPair m <- fmap fold $ forM pairs $ \(href, name) -> do let suffix = dirname </> fpFromText href e <- isFile $ docsDir </> suffix return $ if e then asMap $ singletonMap name [fpToText dirname, href] else mempty return $ singletonMap (display package) $ Y.object [ "version" Y..= display version , "modules" Y..= m ] else return mempty
myfreeweb/stackage
Stackage/ServerBundle.hs
Haskell
mit
4,251
{- Copyright 2015 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -} {-# LANGUAGE PackageImports #-} {-# LANGUAGE NoImplicitPrelude #-} module GHC.Exception (module M) where import "base" GHC.Exception as M
Ye-Yong-Chi/codeworld
codeworld-base/src/GHC/Exception.hs
Haskell
apache-2.0
741
module WaiAppStatic.Types ( -- * Pieces Piece , toPiece , fromPiece , unsafeToPiece , Pieces , toPieces -- * Caching , MaxAge (..) -- * File\/folder serving , FolderName , Folder (..) , File (..) , LookupResult (..) , Listing -- * Settings , StaticSettings (..) ) where import Data.Text (Text) import qualified Network.HTTP.Types as H import qualified Network.Wai as W import Data.ByteString (ByteString) import System.Posix.Types (EpochTime) import qualified Data.Text as T import Data.ByteString.Builder (Builder) import Network.Mime (MimeType) -- | An individual component of a path, or of a filepath. -- -- This is the core type used by wai-app-static for doing lookups. It provides -- a smart constructor to avoid the possibility of constructing unsafe path -- segments (though @unsafeToPiece@ can get around that as necessary). -- -- Individual file lookup backends must know how to convert from a @Piece@ to -- their storage system. newtype Piece = Piece { fromPiece :: Text } deriving (Show, Eq, Ord) -- | Smart constructor for a @Piece@. Won\'t allow unsafe components, such as -- pieces beginning with a period or containing a slash. This /will/, however, -- allow null pieces. toPiece :: Text -> Maybe Piece toPiece t | T.null t = Just $ Piece t | T.head t == '.' = Nothing | T.any (== '/') t = Nothing | otherwise = Just $ Piece t -- | Construct a @Piece@ without input validation. unsafeToPiece :: Text -> Piece unsafeToPiece = Piece -- | Call @toPiece@ on a list. -- -- > toPieces = mapM toPiece toPieces :: [Text] -> Maybe Pieces toPieces = mapM toPiece -- | Request coming from a user. Corresponds to @pathInfo@. -- -- The root path is the empty list. type Pieces = [Piece] -- | Values for the max-age component of the cache-control response header. data MaxAge = NoMaxAge -- ^ no cache-control set | MaxAgeSeconds Int -- ^ set to the given number of seconds | MaxAgeForever -- ^ essentially infinite caching; in reality, probably one year -- | Just the name of a folder. type FolderName = Piece -- | Represent contents of a single folder, which can be itself either a file -- or a folder. data Folder = Folder { folderContents :: [Either FolderName File] } -- | Information on an individual file. data File = File { -- | Size of file in bytes fileGetSize :: Integer -- | How to construct a WAI response for this file. Some files are stored -- on the filesystem and can use @ResponseFile@, while others are stored -- in memory and should use @ResponseBuilder@. , fileToResponse :: H.Status -> H.ResponseHeaders -> W.Response -- | Last component of the filename. , fileName :: Piece -- | Calculate a hash of the contents of this file, such as for etag. , fileGetHash :: IO (Maybe ByteString) -- | Last modified time, used for both display in listings and if-modified-since. , fileGetModified :: Maybe EpochTime } -- | Result of looking up a file in some storage backend. -- -- The lookup is either a file or folder, or does not exist. data LookupResult = LRFile File | LRFolder Folder | LRNotFound -- | How to construct a directory listing page for the given request path and -- the resulting folder. type Listing = Pieces -> Folder -> IO Builder -- | All of the settings available to users for tweaking wai-app-static. -- -- Note that you should use the settings type approach for modifying values. -- See <http://www.yesodweb.com/book/settings-types> for more information. data StaticSettings = StaticSettings { -- | Lookup a single file or folder. This is how you can control storage -- backend (filesystem, embedded, etc) and where to lookup. ssLookupFile :: Pieces -> IO LookupResult -- | Determine the mime type of the given file. Note that this function -- lives in @IO@ in case you want to perform more complicated mimetype -- analysis, such as via the @file@ utility. , ssGetMimeType :: File -> IO MimeType -- | Ordered list of filenames to be used for indices. If the user -- requests a folder, and a file with the given name is found in that -- folder, that file is served. This supercedes any directory listing. , ssIndices :: [Piece] -- | How to perform a directory listing. Optional. Will be used when the -- user requested a folder. , ssListing :: Maybe Listing -- | Value to provide for max age in the cache-control. , ssMaxAge :: MaxAge -- | Given a requested path and a new destination, construct a string -- that will go there. Default implementation will use relative paths. , ssMkRedirect :: Pieces -> ByteString -> ByteString -- | If @True@, send a redirect to the user when a folder is requested -- and an index page should be displayed. When @False@, display the -- content immediately. , ssRedirectToIndex :: Bool -- | Prefer usage of etag caching to last-modified caching. , ssUseHash :: Bool -- | Force a trailing slash at the end of directories , ssAddTrailingSlash :: Bool -- | Optional `W.Application` to be used in case of 404 errors -- -- Since 3.1.3 , ss404Handler :: Maybe W.Application }
sordina/wai
wai-app-static/WaiAppStatic/Types.hs
Haskell
bsd-2-clause
5,370
{-# LANGUAGE RankNTypes #-} module Main (main) where import Common (commonMain) import Control.DeepSeq import Control.Monad.Identity import qualified Control.Monad.Trans.Reader as R import qualified Control.Monad.Trans.State.Strict as S import Criterion.Main import Data.Monoid import Pipes import Pipes.Lift defaultMax :: Int defaultMax = 10000 instance NFData a => NFData (Sum a) main :: IO () main = commonMain defaultMax liftBenchmarks iter :: forall m a . (Monad m , Ord a, Num a) => (a -> m a) -> a -> Effect m a iter a vmax = loop 0 where loop n | n > vmax = return vmax | otherwise = do x <- lift $ a n loop $! x s_bench :: Int -> Effect (S.StateT Int Identity) Int s_bench = iter (\n -> S.get >>= (\a -> S.put $! a + n) >> return (n + 1)) r_bench :: Int -> Effect (R.ReaderT Int Identity) Int r_bench = iter (\n -> R.ask >>= (\a -> return $ n + a)) -- Run before Proxy runB :: (a -> Effect Identity r) -> a -> r runB f a = runIdentity $ runEffect $ f a -- Run after Proxy runA :: (Monad m) => (m r -> Identity a) -> Effect m r -> a runA f a = runIdentity $ f (runEffect a) liftBenchmarks :: Int -> [Benchmark] liftBenchmarks vmax = let applyBench = map ($ vmax) in [ bgroup "ReaderT" $ let defT f = (\d -> f d 1) in applyBench [ bench "runReaderP_B" . whnf (runB (runReaderP 1) . r_bench) , bench "runReaderP_A" . whnf (runA (defT R.runReaderT) . r_bench) ] , bgroup "StateT" $ let defT f = (\s -> f s 0) in applyBench [ bench "runStateP_B" . nf (runB (runStateP 0) . s_bench) , bench "runStateP_A" . nf (runA (defT S.runStateT) . s_bench) , bench "evalStateP_B" . whnf (runB (evalStateP 0) . s_bench) , bench "evalStateP_A" . whnf (runA (defT S.evalStateT) . s_bench) , bench "execStateP_B" . whnf (runB (execStateP 0) . s_bench) , bench "execStateP_A" . whnf (runA (defT S.execStateT) . s_bench) ] ]
FranklinChen/Haskell-Pipes-Library
benchmarks/LiftBench.hs
Haskell
bsd-3-clause
2,050
{-# OPTIONS -w #-} module Plugin.Free.Theorem where import Plugin.Free.Type import Plugin.Free.Expr import Plugin.Free.Util data Theorem = ThForall Var Type Theorem | ThImplies Theorem Theorem | ThEqual Expr Expr | ThAnd Theorem Theorem deriving (Eq,Show) precIMPLIES, precAND :: Int precIMPLIES = 5 precAND = 3 instance Pretty Theorem where prettyP p t = prettyTheorem p False t prettyTheorem :: Int -> Bool -> Theorem -> Doc prettyTheorem p fa th@(ThForall v t p1) | fa = prettyForall p [v] p1 | otherwise = prettyP p p1 prettyTheorem p fa (ThImplies p1 p2) = prettyParenIndent (p > precIMPLIES) ( prettyTheorem (precIMPLIES+1) True p1 $$ nest (-1) (text "=>") $$ prettyTheorem precIMPLIES fa p2 ) prettyTheorem _ _ (ThEqual e1 e2) = prettyP 0 e1 <+> text "=" <+> prettyP 0 e2 prettyTheorem p fa (ThAnd e1 e2) = prettyParenIndent (p > precAND) ( prettyTheorem (precAND+1) fa e1 $$ text "&&" $$ prettyTheorem precAND fa e2 ) prettyForall :: Int -> [Var] -> Theorem -> Doc prettyForall p vs (ThForall v t p1) = prettyForall p (v:vs) p1 prettyForall p vs th = parens ( text "forall" <+> hsep [ text v | v <- reverse vs ] <> text "." <+> prettyTheorem 0 True th ) varInTheorem :: Var -> Theorem -> Bool varInTheorem v (ThForall v' t p) = v /= v' && varInTheorem v p varInTheorem v (ThImplies p1 p2) = varInTheorem v p1 || varInTheorem v p2 varInTheorem v (ThEqual e1 e2) = varInExpr v e1 || varInExpr v e2 varInTheorem v (ThAnd e1 e2) = varInTheorem v e1 || varInTheorem v e2 applySimplifierTheorem :: (Theorem -> Theorem) -> (Theorem -> Theorem) applySimplifierTheorem s (ThForall v t p) = ThForall v t (s p) applySimplifierTheorem s (ThImplies p1 p2) = ThImplies (s p1) (s p2) applySimplifierTheorem s p@(ThEqual _ _) = p applySimplifierTheorem s p@(ThAnd p1 p2) = ThAnd (s p1) (s p2) peepholeSimplifyTheorem :: Theorem -> Theorem peepholeSimplifyTheorem = peepholeSimplifyTheorem' . applySimplifierTheorem peepholeSimplifyTheorem peepholeSimplifyTheorem' :: Theorem -> Theorem peepholeSimplifyTheorem' (ThForall v t p) = case varInTheorem v p of True -> ThForall v t p False -> p peepholeSimplifyTheorem' p@(ThAnd e1 e2) = foldr1 ThAnd (flattenAnd e1 . flattenAnd e2 $ []) where flattenAnd (ThAnd e1 e2) = flattenAnd e1 . flattenAnd e2 flattenAnd e = (e:) peepholeSimplifyTheorem' p = p peepholeSimplifyExpr :: Expr -> Expr peepholeSimplifyExpr = peepholeSimplifyExpr' . applySimplifierExpr peepholeSimplifyExpr peepholeSimplifyExpr' :: Expr -> Expr peepholeSimplifyExpr' (EApp (EBuiltin BId) e2) = e2 peepholeSimplifyExpr' (EApp (EBuiltin (BMap _)) (EBuiltin BId)) = EBuiltin BId peepholeSimplifyExpr' e = e foldEquality :: Theorem -> Theorem foldEquality p@(ThForall _ _ _) = case foldEquality' p [] of Just p' -> p' Nothing -> applySimplifierTheorem foldEquality p where foldEquality' (ThForall v t p) vts = foldEquality' p ((v,t):vts) foldEquality' (ThImplies (ThEqual (EVar v) e2) p) vts | v `elem` map fst vts = foldEquality'' vts (theoremSubst v e2 p) foldEquality' (ThImplies (ThEqual e1 (EVar v)) p) vts | v `elem` map fst vts = foldEquality'' vts (theoremSubst v e1 p) foldEquality' _ vts = Nothing foldEquality'' [] e = Just e foldEquality'' ((v,t):vts) e = foldEquality'' vts (ThForall v t e) foldEquality p = applySimplifierTheorem foldEquality p tryCurrying :: Theorem -> Theorem tryCurrying p@(ThForall _ _ _) = case tryCurrying' p [] of Just p' -> p' Nothing -> applySimplifierTheorem tryCurrying p where tryCurrying' (ThForall v t p) vts = tryCurrying' p ((v,t):vts) tryCurrying' (ThEqual e1 e2) vts = case (traverseRight ECDot e1, traverseRight ECDot e2) of ((ctx1, EVar v1), (ctx2, EVar v2)) | v1 == v2 && v1 `elem` map fst vts && not (varInCtx v1 ctx1) && not (varInCtx v2 ctx2) -> tryCurrying'' vts (ThEqual (untraverse ctx1) (untraverse ctx2)) _ -> Nothing tryCurrying' _ _ = Nothing traverseRight ctx (EApp e1 e2) = traverseRight (ECAppR e1 ctx) e2 traverseRight ctx e = (ctx, e) untraverse ECDot = EBuiltin BId untraverse (ECAppR e1 ECDot) = e1 untraverse (ECAppR e1 ctx) = EApp (EApp (EVarOp FR 9 ".") (untraverse ctx)) e1 tryCurrying'' [] e = Just e tryCurrying'' ((v,t):vts) e = tryCurrying'' vts (ThForall v t e) tryCurrying p = applySimplifierTheorem tryCurrying p theoremSimplify :: Theorem -> Theorem theoremSimplify = iterateUntilFixpoint (foldEquality . iterateUntilFixpoint peephole . tryCurrying . iterateUntilFixpoint peephole ) where iterateUntilFixpoint s t = findFixpoint (iterate s t) peephole t = findFixpoint (iterate peepholeSimplifyTheorem t) findFixpoint (x1:xs@(x2:_)) | x1 == x2 = x2 | otherwise = findFixpoint xs theoremSubst :: Var -> Expr -> Theorem -> Theorem theoremSubst v e (ThForall f t p) = ThForall f t (theoremSubst v e p) theoremSubst v e (ThImplies p1 p2) = ThImplies (theoremSubst v e p1) (theoremSubst v e p2) theoremSubst v e (ThEqual e1 e2) = ThEqual (exprSubst v e e1) (exprSubst v e e2) theoremSubst v e (ThAnd p1 p2) = ThAnd (theoremSubst v e p1) (theoremSubst v e p2) -- vim: ts=4:sts=4:expandtab:ai
zeekay/lambdabot
Plugin/Free/Theorem.hs
Haskell
mit
6,106
{- (c) The University of Glasgow 2006 (c) The AQUA Project, Glasgow University, 1993-1998 This is useful, general stuff for the Native Code Generator. Provide trees (of instructions), so that lists of instructions can be appended in linear time. -} module OrdList ( OrdList, nilOL, isNilOL, unitOL, appOL, consOL, snocOL, concatOL, lastOL, mapOL, fromOL, toOL, foldrOL, foldlOL ) where import GhcPrelude import Outputable import Data.Semigroup ( Semigroup ) import qualified Data.Semigroup as Semigroup infixl 5 `appOL` infixl 5 `snocOL` infixr 5 `consOL` data OrdList a = None | One a | Many [a] -- Invariant: non-empty | Cons a (OrdList a) | Snoc (OrdList a) a | Two (OrdList a) -- Invariant: non-empty (OrdList a) -- Invariant: non-empty instance Outputable a => Outputable (OrdList a) where ppr ol = ppr (fromOL ol) -- Convert to list and print that instance Semigroup (OrdList a) where (<>) = appOL instance Monoid (OrdList a) where mempty = nilOL mappend = (Semigroup.<>) mconcat = concatOL instance Functor OrdList where fmap = mapOL instance Foldable OrdList where foldr = foldrOL instance Traversable OrdList where traverse f xs = toOL <$> traverse f (fromOL xs) nilOL :: OrdList a isNilOL :: OrdList a -> Bool unitOL :: a -> OrdList a snocOL :: OrdList a -> a -> OrdList a consOL :: a -> OrdList a -> OrdList a appOL :: OrdList a -> OrdList a -> OrdList a concatOL :: [OrdList a] -> OrdList a lastOL :: OrdList a -> a nilOL = None unitOL as = One as snocOL as b = Snoc as b consOL a bs = Cons a bs concatOL aas = foldr appOL None aas lastOL None = panic "lastOL" lastOL (One a) = a lastOL (Many as) = last as lastOL (Cons _ as) = lastOL as lastOL (Snoc _ a) = a lastOL (Two _ as) = lastOL as isNilOL None = True isNilOL _ = False None `appOL` b = b a `appOL` None = a One a `appOL` b = Cons a b a `appOL` One b = Snoc a b a `appOL` b = Two a b fromOL :: OrdList a -> [a] fromOL a = go a [] where go None acc = acc go (One a) acc = a : acc go (Cons a b) acc = a : go b acc go (Snoc a b) acc = go a (b:acc) go (Two a b) acc = go a (go b acc) go (Many xs) acc = xs ++ acc mapOL :: (a -> b) -> OrdList a -> OrdList b mapOL _ None = None mapOL f (One x) = One (f x) mapOL f (Cons x xs) = Cons (f x) (mapOL f xs) mapOL f (Snoc xs x) = Snoc (mapOL f xs) (f x) mapOL f (Two x y) = Two (mapOL f x) (mapOL f y) mapOL f (Many xs) = Many (map f xs) foldrOL :: (a->b->b) -> b -> OrdList a -> b foldrOL _ z None = z foldrOL k z (One x) = k x z foldrOL k z (Cons x xs) = k x (foldrOL k z xs) foldrOL k z (Snoc xs x) = foldrOL k (k x z) xs foldrOL k z (Two b1 b2) = foldrOL k (foldrOL k z b2) b1 foldrOL k z (Many xs) = foldr k z xs foldlOL :: (b->a->b) -> b -> OrdList a -> b foldlOL _ z None = z foldlOL k z (One x) = k z x foldlOL k z (Cons x xs) = foldlOL k (k z x) xs foldlOL k z (Snoc xs x) = k (foldlOL k z xs) x foldlOL k z (Two b1 b2) = foldlOL k (foldlOL k z b1) b2 foldlOL k z (Many xs) = foldl k z xs toOL :: [a] -> OrdList a toOL [] = None toOL xs = Many xs
shlevy/ghc
compiler/utils/OrdList.hs
Haskell
bsd-3-clause
3,258
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE helpset PUBLIC "-//Sun Microsystems Inc.//DTD JavaHelp HelpSet Version 2.0//EN" "http://java.sun.com/products/javahelp/helpset_2_0.dtd"> <helpset version="2.0" xml:lang="fa-IR"> <title>Alert Filters | ZAP Extension</title> <maps> <homeID>top</homeID> <mapref location="map.jhm"/> </maps> <view> <name>TOC</name> <label>Contents</label> <type>org.zaproxy.zap.extension.help.ZapTocView</type> <data>toc.xml</data> </view> <view> <name>Index</name> <label>Index</label> <type>javax.help.IndexView</type> <data>index.xml</data> </view> <view> <name>Search</name> <label>Search</label> <type>javax.help.SearchView</type> <data engine="com.sun.java.help.search.DefaultSearchEngine"> JavaHelpSearch </data> </view> <view> <name>Favorites</name> <label>Favorites</label> <type>javax.help.FavoritesView</type> </view> </helpset>
thc202/zap-extensions
addOns/alertFilters/src/main/javahelp/org/zaproxy/zap/extension/alertFilters/resources/help_fa_IR/helpset_fa_IR.hs
Haskell
apache-2.0
974