module Args where
import HakeTypes
+import TreeDB
data Args = Args {
- buildFunction :: [String] -> String -> Args -> HRule,
+ buildFunction :: TreeDB -> String -> Args -> HRule,
target :: String,
cFiles :: [String],
generatedCFiles :: [String],
allFlounderBackends
= [ "lmp", "ump", "ump_ipi", "loopback", "rpcclient", "msgbuf", "multihop", "ahci" ]
-defaultBuildFn :: [String] -> String -> Args -> HRule
+defaultBuildFn :: TreeDB -> String -> Args -> HRule
defaultBuildFn _ f _ =
Error ("Bad use of default Args in " ++ f)
import GHC.Paths (libdir)
import Control.Monad.Ghc
import DynFlags (defaultFatalMessager, defaultFlushOut,
- xopt_set, ExtensionFlag (Opt_DeriveDataTypeable))
+ xopt_set, ExtensionFlag(Opt_DeriveDataTypeable,
+ Opt_StandaloneDeriving))
-- We parse and pretty-print Hakefiles.
import Language.Haskell.Exts
import HakeTypes
import qualified Args
import qualified Config
+import TreeDB
data HakeError = HakeError String Int
instance Error HakeError
-- Walk the source tree and build a complete list of pathnames, loading any
-- Hakefiles.
listFiles :: FilePath -> IO ([FilePath], [(FilePath, String)])
-listFiles root = do
- isdir <- doesDirectoryExist root
- if isdir then do
- children <- getDirectoryContents root
- walkchildren children
- else
- return ([], [])
+listFiles root
+ | ignore (takeFileName root) = return ([], [])
+ | otherwise = do
+ isdir <- doesDirectoryExist root
+ if isdir then do
+ children <- getDirectoryContents root
+ walkchildren children
+ else do
+ hake <- maybeHake root
+ return ([root], hake)
where
+ -- Walk the child directories in parallel. This speeds things up
+ -- dramatically over NFS, with its high latency.
walkchildren :: [FilePath] -> IO ([FilePath], [(FilePath, String)])
walkchildren children = do
- -- Walk the child directories in parallel. This speeds things up
- -- dramatically over NFS, with its high latency.
children_async <- mapM (async.walkchild) children
results <- mapM wait children_async
return $ joinResults results
- where
- joinResults :: [([a],[b])] -> ([a],[b])
- joinResults [] = ([],[])
- joinResults ((as,bs):xs) =
- let (as',bs') = joinResults xs in
- (as ++ as', bs ++ bs')
+
+ joinResults :: [([a],[b])] -> ([a],[b])
+ joinResults [] = ([],[])
+ joinResults ((as,bs):xs) =
+ let (as',bs') = joinResults xs in
+ (as ++ as', bs ++ bs')
walkchild :: FilePath -> IO ([FilePath], [(FilePath, String)])
- walkchild child = do
- if ignore child
- then return ([], [])
- else do
- (allfiles, hakefiles) <- listFiles (root </> child)
- hake <- maybeHake child
- return $ ((root </> child) : allfiles,
- hake ++ hakefiles)
- where
- -- Load Hakfiles eagerly. This amounts to <1MB for
- -- Barrelfish (2015).
- maybeHake "Hakefile" = do
- contents <- readFile (root </> child)
- return [(root </> child, contents)]
- maybeHake _ = return []
+ walkchild child = listFiles (root </> child)
+
+ -- Load Hakfiles eagerly. This amounts to <1MB for Barrelfish (2015).
+ maybeHake path
+ | takeFileName path == "Hakefile" = do
+ contents <- readFile path
+ return [(path, contents)]
+ | otherwise = return []
-- Don't descend into revision-control or build directories.
ignore :: FilePath -> Bool
-- We invoke GHC to parse the Hakefiles in a preconfigured environment,
-- to implement the Hake DSL.
-evalHakeFiles :: Handle -> Opts -> [FilePath] -> [(FilePath, String)] ->
+evalHakeFiles :: Handle -> Opts -> TreeDB -> [(FilePath, String)] ->
IO (S.Set FilePath)
-evalHakeFiles makefile o allfiles hakefiles =
+evalHakeFiles makefile o srcDB hakefiles =
defaultErrorHandler defaultFatalMessager defaultFlushOut $
runGhc (Just libdir) $
- driveGhc makefile o allfiles hakefiles
+ driveGhc makefile o srcDB hakefiles
-- This is the code that executes in the GHC monad.
-driveGhc :: Handle -> Opts -> [FilePath] -> [(FilePath, String)] ->
+driveGhc :: Handle -> Opts -> TreeDB -> [(FilePath, String)] ->
Ghc (S.Set FilePath)
-driveGhc makefile o allfiles hakefiles = do
+driveGhc makefile o srcDB hakefiles = do
-- Set the RTS flags
dflags <- getSessionDynFlags
- let dflags' = foldl xopt_set dflags [ Opt_DeriveDataTypeable ]
+ let dflags' = foldl xopt_set dflags [ Opt_DeriveDataTypeable,
+ Opt_StandaloneDeriving ]
_ <- setSessionDynFlags dflags'{
importPaths = module_paths,
hiDir = Just "./hake",
where
module_paths = [ (opt_installdir o) </> "hake", ".",
(opt_bfsourcedir o) </> "hake" ]
- source_modules = [ "HakeTypes", "RuleDefs", "Args", "Config" ]
+ source_modules = [ "HakeTypes", "RuleDefs", "Args", "Config",
+ "TreeDB" ]
modules = [ "Prelude", "System.FilePath", "HakeTypes", "RuleDefs",
- "Args" ]
+ "Args", "TreeDB" ]
qualified_modules = [ "Config", "Data.List" ]
-- Evaluate one Hakefile, and emit its Makefile section. We collect
wrapHake hakepath hake_expr
-- Evaluate in GHC
- val <- dynCompileExpr $ hake_wrapped ++ " :: [String] -> HRule"
+ val <- dynCompileExpr $ hake_wrapped ++
+ " :: TreeDB -> HRule"
let rule = fromDyn val (\_ -> Error "failed")
-- Path resolution
let resolved_rule =
resolvePaths o (takeDirectory hakepath)
- (rule allfiles)
+ (rule srcDB)
return resolved_rule
Right hake_error -> do
return $ Error "failed"
wrapHake :: FilePath -> Exp -> Exp
wrapHake hakefile hake_exp =
Paren (
- Lambda dummy_loc [PVar (Ident "allfiles")] (
+ Lambda dummy_loc [PVar (Ident "sourceDB")] (
Let (BDecls
[FunBind [Match -- This is 'find'
dummy_loc
Nothing
(UnGuardedRhs
(Paren (App (App (App (Var (UnQual (Ident "fn")))
- (Var (UnQual (Ident "allfiles"))))
+ (Var (UnQual (Ident "sourceDB"))))
(Lit (String hakefile)))
(Var (UnQual (Ident "arg"))))))
(BDecls [])],
(UnGuardedRhs
(App (App (App (Paren (App (Var (UnQual (Ident "buildFunction")))
(Var (UnQual (Ident "a")))))
- (Var (UnQual (Ident "allfiles"))))
+ (Var (UnQual (Ident "sourceDB"))))
(Lit (String hakefile)))
(Var (UnQual (Ident "a")))))
(BDecls [])]
liftIO $ putStrLn "Reading directory tree..."
(allfiles, hakefiles) <- liftIO $ listFiles (opt_sourcedir opts)
let relfiles = map (makeRelative $ opt_sourcedir opts') allfiles
+ let srcDB = tdbBuild relfiles
-- Open the Makefile and write the preamble
liftIO $ putStrLn $ "Opening " ++ (opt_makefilename opts)
-- Evaluate Hakefiles
liftIO $ putStrLn $ "Evaluating " ++ show (length hakefiles) ++
" Hakefiles..."
- dirs <- liftIO $ evalHakeFiles makefile opts relfiles hakefiles
+ dirs <- liftIO $ evalHakeFiles makefile opts srcDB hakefiles
-- Emit directory rules
liftIO $ putStrLn $ "Generating build directory dependencies..."
module RuleDefs where
import Data.List (intersect, isSuffixOf, union, (\\), nub, sortBy, elemIndex)
+import Data.Maybe (fromMaybe)
import System.FilePath
import qualified X86_64
import qualified K1om
import HakeTypes
import qualified Args
import qualified Config
+import TreeDB
import Debug.Trace
-- enable debug spew
--
-- Look for a set of files: this is called using the "find" combinator
--
-withSuffix :: [String] -> String -> String -> [String]
-withSuffix af tf arg =
- [ takeFileName f | f <- af,
- takeDirectory f == takeDirectory tf,
- takeExtension f == arg ]
+withSuffix :: TreeDB -> String -> String -> [String]
+withSuffix srcDB hakepath extension =
+ fromMaybe [] $ tdbByDirExt (takeDirectory hakepath) extension srcDB
-withSuffices :: [String] -> String -> [String] -> [String]
-withSuffices af tf args =
- concat [ withSuffix af tf arg | arg <- args ]
+withSuffices :: TreeDB -> String -> [String] -> [String]
+withSuffices srcDB hakepath extensions =
+ concat [ withSuffix srcDB hakepath ext | ext <- extensions ]
--
-- Find files with a given suffix in a given dir
--
-inDir :: [String] -> String -> String -> String -> [String]
-inDir af tf dir suffix =
- -- Dummy is here so that we can find files in the same dir :-/
- let subdir = (if head dir == '/' then absdir else reldir) </> "dummy"
- absdir = if head tf == '/' then dir else '.':dir
- reldir = (takeDirectory tf) </> dir
- files = withSuffix af subdir suffix
- in
- [ dir </> f | f <- files ]
+inDir :: TreeDB -> String -> String -> String -> [String]
+inDir srcDB hakepath dir extension =
+ fromMaybe [] $ tdbByDirExt (takeDirectory hakepath </> dir) extension srcDB
+
+cInDir :: TreeDB -> String -> String -> [String]
+cInDir tdb tf dir = inDir tdb tf dir ".c"
+
+cxxInDir :: TreeDB -> String -> String -> [String]
+cxxInDir tdb tf dir = (inDir tdb tf dir ".cpp") ++ (inDir tdb tf dir ".cc")
-cInDir :: [String] -> String -> String -> [String]
-cInDir af tf dir = inDir af tf dir ".c"
-cxxInDir :: [String] -> String -> String -> [String]
-cxxInDir af tf dir = (inDir af tf dir ".cpp") ++ (inDir af tf dir ".cc")
-sInDir :: [String] -> String -> String -> [String]
-sInDir af tf dir = inDir af tf dir ".S"
+sInDir :: TreeDB -> String -> String -> [String]
+sInDir tdb tf dir = inDir tdb tf dir ".S"
-------------------------------------------------------------------------
--
application :: Args.Args
application = Args.defaultArgs { Args.buildFunction = applicationBuildFn }
-applicationBuildFn :: [String] -> String -> Args.Args -> HRule
-applicationBuildFn af tf args
+applicationBuildFn :: TreeDB -> String -> Args.Args -> HRule
+applicationBuildFn tdb tf args
| debugFlag && trace (Args.showArgs (tf ++ " Application ") args) False
= undefined
-applicationBuildFn af tf args =
- Rules [ appBuildArch af tf args arch | arch <- Args.architectures args ]
+applicationBuildFn tdb tf args =
+ Rules [ appBuildArch tdb tf args arch | arch <- Args.architectures args ]
appGetOptionsForArch arch args =
(options arch) { extraIncludes =
s <- Args.addGeneratedDependencies args]
}
-appBuildArch af tf args arch =
+appBuildArch tdb tf args arch =
let -- Fiddle the options
opts = appGetOptionsForArch arch args
csrcs = Args.cFiles args
arrakisapplication :: Args.Args
arrakisapplication = Args.defaultArgs { Args.buildFunction = arrakisApplicationBuildFn }
-arrakisApplicationBuildFn :: [String] -> String -> Args.Args -> HRule
-arrakisApplicationBuildFn af tf args
+arrakisApplicationBuildFn :: TreeDB -> String -> Args.Args -> HRule
+arrakisApplicationBuildFn tdb tf args
| debugFlag && trace (Args.showArgs (tf ++ " Arrakis Application ") args) False
= undefined
-arrakisApplicationBuildFn af tf args =
- Rules [ arrakisAppBuildArch af tf args arch | arch <- Args.architectures args ]
+arrakisApplicationBuildFn tdb tf args =
+ Rules [ arrakisAppBuildArch tdb tf args arch | arch <- Args.architectures args ]
arrakisAppGetOptionsForArch arch args =
(options arch) { extraIncludes =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
-arrakisAppBuildArch af tf args arch =
+arrakisAppBuildArch tdb tf args arch =
let -- Fiddle the options
opts = arrakisAppGetOptionsForArch arch args
csrcs = Args.cFiles args
library :: Args.Args
library = Args.defaultArgs { Args.buildFunction = libraryBuildFn }
-libraryBuildFn :: [String] -> String -> Args.Args -> HRule
-libraryBuildFn af tf args | debugFlag && trace (Args.showArgs (tf ++ " Library ") args) False = undefined
-libraryBuildFn af tf args =
- Rules [ libBuildArch af tf args arch | arch <- Args.architectures args ]
+libraryBuildFn :: TreeDB -> String -> Args.Args -> HRule
+libraryBuildFn tdb tf args | debugFlag && trace (Args.showArgs (tf ++ " Library ") args) False = undefined
+libraryBuildFn tdb tf args =
+ Rules [ libBuildArch tdb tf args arch | arch <- Args.architectures args ]
libGetOptionsForArch arch args =
(options arch) { extraIncludes =
[Dep BuildTree arch s | s <- Args.addGeneratedDependencies args]
}
-libBuildArch af tf args arch =
+libBuildArch tdb tf args arch =
let -- Fiddle the options
opts = libGetOptionsForArch arch args
csrcs = Args.cFiles args
Args.target = "cpu" }
-- CPU drivers are built differently
-cpuDriverBuildFn :: [String] -> String -> Args.Args -> HRule
-cpuDriverBuildFn af tf args = Rules []
+cpuDriverBuildFn :: TreeDB -> String -> Args.Args -> HRule
+cpuDriverBuildFn tdb tf args = Rules []
--
-- Build a platform
--- /dev/null
+module TreeDB(
+ DirList,
+ dlEmpty, dlByExt, dlAdd, dlAddByExt,
+
+ TreeDB,
+ tdbEmpty, tdbByDir, tdbAdd, tdbAddDir,
+ tdbBuild,
+
+ tdbByDirExt
+ )
+where
+
+import qualified Data.ByteString.Char8 as C
+import Data.Trie(Trie)
+import qualified Data.Trie as T
+import Data.Typeable
+
+import System.FilePath
+
+--
+-- The files in a directory, partitioned by extension.
+--
+type DirList = [(String, [String])]
+
+dlEmpty :: DirList
+dlEmpty = []
+
+-- Linear search for files by extension, in a single directory.
+dlByExt :: String -> DirList -> [String]
+dlByExt _ [] = []
+dlByExt ext ((ext', names) : dirlist)
+ | ext == ext' = names
+ | otherwise = dlByExt ext dirlist
+
+-- Insert a file, given its extension. Again linear.
+dlAdd :: FilePath -> DirList -> DirList
+dlAdd file dirList =
+ dlAddByExt (takeExtension file) (dropExtension file) dirList
+
+dlAddByExt :: String -> String -> DirList -> DirList
+dlAddByExt ext name [] = [(ext, [name])]
+dlAddByExt ext name ((ext', names):dirlist)
+ | ext == ext' = (ext', name:names):dirlist
+ | otherwise = (ext', names):(dlAddByExt ext name dirlist)
+
+--
+-- A map from directory to contents, excluding subdirectories.
+--
+type TreeDB = Trie DirList
+
+deriving instance Typeable1 Trie
+
+tdbEmpty :: TreeDB
+tdbEmpty = T.empty
+
+-- Get directory contents by directory path
+tdbByDir :: FilePath -> TreeDB -> Maybe DirList
+tdbByDir path treeDB = T.lookup (C.pack path) treeDB
+
+-- Add a file
+tdbAdd :: FilePath -> TreeDB -> TreeDB
+tdbAdd path treeDB
+ | T.member dirS treeDB =
+ T.adjust (\dirList -> dlAdd file dirList) dirS treeDB
+ | otherwise =
+ T.insert dirS (dlAdd file dlEmpty) treeDB
+ where
+ dir = takeDirectory path
+ file = takeFileName path
+ dirS = C.pack dir
+
+-- Add a directory, complete with (relative) contents
+tdbAddDir :: FilePath -> [FilePath] -> TreeDB -> TreeDB
+tdbAddDir dir files treeDB
+ | T.member dirS treeDB =
+ T.adjust (\dirList -> foldr dlAdd dirList files) dirS treeDB
+ | otherwise =
+ T.insert dirS (foldr dlAdd dlEmpty files) treeDB
+ where
+ dirS = C.pack dir
+
+tdbBuild :: [FilePath] -> TreeDB
+tdbBuild files = foldr tdbAdd tdbEmpty files
+
+--
+-- Combined queries
+--
+
+-- Find files by directory and extension
+tdbByDirExt :: FilePath -> String -> TreeDB -> Maybe [FilePath]
+tdbByDirExt path ext treeDB = do
+ dirList <- tdbByDir path treeDB
+ let basenames = dlByExt ext dirList
+ return [ path </> base <.> ext | base <- basenames ]