diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
new file mode 100644
index 0000000..0c611db
--- /dev/null
+++ b/.github/workflows/build.yml
@@ -0,0 +1,24 @@
+# This workflow will build a .NET project
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net
+
+name: Build and test
+
+on:
+ push:
+ branches: [ "dev" ]
+ pull_request:
+ branches: [ "dev" ]
+
+jobs:
+ build:
+ if: github.event_name != 'push' || !startsWith(github.ref, 'refs/tags/v')
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v3
+ with:
+ dotnet-version: 9.0.x
+ - name: Build & test
+ run: dotnet fsi build.fsx -- -- build test
\ No newline at end of file
diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml
new file mode 100644
index 0000000..18935ca
--- /dev/null
+++ b/.github/workflows/publish.yml
@@ -0,0 +1,44 @@
+# This workflow will build a .NET project
+# For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-net
+
+name: Publish package
+
+on:
+ push:
+ tags:
+ - 'v*'
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v3
+ - name: Setup .NET
+ uses: actions/setup-dotnet@v3
+ with:
+ dotnet-version: 9.0.x
+
+ - name: Extract version from tag
+ id: version
+ run: |
+ TAG_NAME="${{ github.ref_name }}"
+ VERSION="${TAG_NAME#v}" # Remove 'v' prefix
+ echo "version=${VERSION}.${{ github.run_number }}" >> $GITHUB_OUTPUT
+
+ - name: Build and test
+ run: dotnet fsi build.fsx -- -- build test
+
+ - name: Publish
+ run: dotnet fsi build.fsx -- -- pack push
+ env:
+ VERSION: ${{ steps.version.outputs.version }}
+ NUGET_KEY: ${{ secrets.NUGET_API_KEY }}
+
+ - name: Upload build log
+ uses: actions/upload-artifact@v4
+ if: always()
+ with:
+ name: build-log-${{ github.run_number }}.txt
+ path: build.log
+ retention-days: 5
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 8bbb6ff..931a514 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,6 +14,7 @@ _UpgradeReport*
packages
.nuget
.paket
+.ionide
# Ignore Visual Studio files
*.pdb
@@ -32,6 +33,8 @@ TestResult.*
.xake*
.fake
.vs/
+.vscode/
+.ionide/
samples/**/*.exe
samples/**/*.dll
samples/**/*.fsx.lock
diff --git a/.travis.yml b/.travis.yml
index d534405..32fcc2e 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,16 +1,12 @@
language: csharp
-mono: latest
-dotnet: 2.1.300
+dotnet: 7.0.202
env: VER=$(if [[ "${TRAVIS_TAG:0:1}" == "v" ]]; then echo ${TRAVIS_TAG:1}.${TRAVIS_BUILD_NUMBER}; else echo 1.0.0.${TRAVIS_BUILD_NUMBER}; fi;)
-install:
- - dotnet restore build.proj
script:
- - export FrameworkPathOverride=$(dirname $(which mono))/../lib/mono/4.5-api/
- - dotnet fake run build.fsx -- build test -ll Diag
+ - dotnet fsi build.fsx -- -- build test -ll Diag
deploy:
- provider: script
- script: dotnet fake run build.fsx -- pack push -ll Diag
+ script: dotnet fsi build.fsx -- -- pack push -ll Diag
skip_cleanup: true
on:
tags: true
- condition: "${TRAVIS_TAG:0:1} = v"
+ condition: "${TRAVIS_TAG:0:1} = v"
\ No newline at end of file
diff --git a/.vscode/settings.json b/.vscode/settings.json
deleted file mode 100644
index e2dfd84..0000000
--- a/.vscode/settings.json
+++ /dev/null
@@ -1,5 +0,0 @@
-// Place your settings in this file to overwrite default and user settings.
-{
- "editor.wordWrap": "wordWrapColumn",
- "editor.wordWrapColumn": 120
-}
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
index b46cf05..07818d6 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
The MIT License (MIT)
-Copyright (c) 2014 OlegZee
+Copyright (c) 2014-2024 OlegZee
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/build.cmd b/build.cmd
index 3986414..4d1719c 100644
--- a/build.cmd
+++ b/build.cmd
@@ -1,3 +1,2 @@
@echo off
-dotnet restore build.proj
-dotnet fake run build.fsx -- build
\ No newline at end of file
+dotnet fsi build.fsx -- -- build
\ No newline at end of file
diff --git a/build.fsx b/build.fsx
index 0e27744..c42980d 100644
--- a/build.fsx
+++ b/build.fsx
@@ -1,55 +1,31 @@
-#r "paket:
- nuget Xake ~> 1.1 prerelease //"
-
-#if !FAKE
-#load ".fake/build.fsx/intellisense.fsx"
-#endif
+#r "nuget: Xake, 2.2.0"
open Xake
open Xake.Tasks
-let frameworks = ["netstandard2.0"; "net46"]
+let frameworks = ["netstandard2.0" (*; "net46" *)]
let libtargets =
- [ for t in frameworks do
- for e in ["dll"; "xml"]
- -> sprintf "out/%s/Xake.%s" t e
+ [ for fwk in frameworks do
+ for ext in ["dll"; "xml"]
+ -> $"out/%s{fwk}/Xake.%s{ext}"
]
-let getVersion () = recipe {
- let! verVar = getVar("VER")
- let! verEnv = getEnv("VER")
- let ver = verVar |> Option.defaultValue (verEnv |> Option.defaultValue "0.0.1")
-
- let! verSuffix =
- getVar("SUFFIX")
- |> Recipe.map (
- function
- | None -> "-beta"
- | Some "" -> "" // this is release!
- | Some s -> "-" + s
- )
- return ver + verSuffix
-}
+let getVersion () = getEnv "VERSION" |> map (Option.defaultValue "0.0.1")
-let makePackageName = sprintf "Xake.%s.nupkg"
+let makePackageName version = $"Xake.%s{version}.nupkg"
-let dotnet arglist = recipe {
- do! shell {
+let dotnet arglist =
+ shell {
cmd "dotnet"
args arglist
failonerror
- } |> Recipe.Ignore
-}
+ } |> Ignore
do xakeScript {
- filelog "build.log" Verbosity.Diag
- // consolelog Verbosity.Normal
+ filelog "build.log" Diag
rules [
- "main" => recipe {
- do! need ["build"]
- do! need ["test"]
- }
+ "main" <<< ["build"; "test"]
"build" <== libtargets
"clean" => rm {dir "out"}
@@ -58,38 +34,33 @@ do xakeScript {
do! alwaysRerun()
let! where =
- getVar("FILTER")
- |> Recipe.map (function |Some clause -> ["--filter"; sprintf "Name~\"%s\"" clause] | None -> [])
-
- // in case of travis only run tests for standard runtime, eventually will add more
- let! limitFwk = getEnv("TRAVIS") |> Recipe.map (function | Some _ -> ["-f:netcoreapp2.0"] | _ -> [])
+ getVar "FILTER"
+ |> map (function |Some clause -> ["--filter"; $"Name~\"{clause}\""] | None -> [])
- do! dotnet <| ["test"; "src/tests"; "-c"; "Release"] @ where @ limitFwk
+ do! dotnet <| ["test"; "src/tests"; "-c"; "Release"] @ where
}
libtargets *..> recipe {
- let! allFiles
- = getFiles <| fileset {
- basedir "src/core"
- includes "Xake.fsproj"
- includes "**/*.fs"
- }
+ let! allFiles = getFiles <| fileset {
+ basedir "src/core"
+ includes "Xake.fsproj"
+ includes "**/*.fs"
+ }
do! needFiles allFiles
let! version = getVersion()
for framework in frameworks do
- do! dotnet
- [
- "build"
- "src/core"
- "/p:Version=" + version
- "--configuration"; "Release"
- "--framework"; framework
- "--output"; "../../out/" + framework
- "/p:DocumentationFile=Xake.xml"
- ]
+ do! dotnet [
+ "build"
+ "src/core"
+ "/p:Version=" + version
+ "--configuration"; "Release"
+ "--framework"; framework
+ "--output"; "./out/" + framework
+ "/p:DocumentationFile=Xake.xml"
+ ]
}
]
@@ -101,29 +72,27 @@ do xakeScript {
}
"out/Xake.(ver:*).nupkg" ..> recipe {
- let! ver = getRuleMatch("ver")
- do! dotnet
- [
- "pack"; "src/core"
- "-c"; "Release"
- "/p:Version=" + ver
- "--output"; "../../out/"
- "/p:DocumentationFile=Xake.xml"
- ]
+ let! ver = getRuleMatch "ver"
+ do! dotnet [
+ "pack"; "src/core"
+ "-c"; "Release"
+ $"/p:Version={ver}"
+ "--output"; "out/"
+ "/p:DocumentationFile=Xake.xml"
+ ]
}
// push need pack to be explicitly called in advance
"push" => recipe {
let! version = getVersion()
- let! nuget_key = getEnv("NUGET_KEY")
- do! dotnet
- [
- "nuget"; "push"
- "out" > makePackageName version
- "--source"; "https://www.nuget.org/api/v2/package"
- "--api-key"; nuget_key |> Option.defaultValue ""
- ]
+ let! nuget_key = getEnv "NUGET_KEY"
+ do! dotnet [
+ "nuget"; "push"
+ "out" > makePackageName version
+ "--source"; "https://www.nuget.org/api/v2/package"
+ "--api-key"; nuget_key |> Option.defaultValue ""
+ ]
}
]
}
diff --git a/build.fsx.lock b/build.fsx.lock
deleted file mode 100644
index 5497458..0000000
--- a/build.fsx.lock
+++ /dev/null
@@ -1,7 +0,0 @@
-STORAGE: NONE
-RESTRICTION: == netstandard2.0
-NUGET
- remote: https://api.nuget.org/v3/index.json
- FSharp.Core (4.3.4)
- Xake (1.1.0.413-alpha)
- FSharp.Core (>= 4.3.4)
diff --git a/build.proj b/build.proj
deleted file mode 100644
index 5483b72..0000000
--- a/build.proj
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
- netstandard2.0
-
-
-
-
-
-
diff --git a/build.sh b/build.sh
index f19bb05..359a510 100755
--- a/build.sh
+++ b/build.sh
@@ -1,3 +1,2 @@
#!/bin/bash
-dotnet restore build.proj
-dotnet fake run build.fsx -- build
\ No newline at end of file
+dotnet fsi build.fsx -- --
\ No newline at end of file
diff --git a/docs/cheatsheet.md b/docs/cheatsheet.md
new file mode 100644
index 0000000..1fed66a
--- /dev/null
+++ b/docs/cheatsheet.md
@@ -0,0 +1,9 @@
+## Operators ##
+
+### `<<<` - depends on targets (run sequentially)
+
+> `"main" <<< ["restore"; "build-debug"; "unit-test"]`
+
+### `<==` - depends on targets that are allowed to run in parallel
+> `"main" <<< ["build-debug"; "build-release"]`
+
diff --git a/docs/overview.md b/docs/overview.md
index 9abff59..339cdce 100644
--- a/docs/overview.md
+++ b/docs/overview.md
@@ -37,7 +37,7 @@ Xake script is just an F# script with some flavors.
The most simple, but structured script looks as follows:
```fsharp
-#r @".tools/Xake.Core.dll" // (1)
+#r "nuget: Xake, 1.1.4.427-beta" // (1)
open Xake // (2)
@@ -154,6 +154,7 @@ There're several forms of rules including:
* `rule ( => )` - creates a phony rule (the rule that does not create a file)
* `rule ( <== [targets])` - creates a phony rule which demands specified targets
+* `rule ( <<< [targets])` - the same as above, but the targets are requested one by one (non-parallel excution)
* `rule ( ..> )` - rule for single file or group of files matching the specified wildcards pattern. The file and an optional matching groups can be accessed via getTargetFile and getRuleMatch methods
* `rule ( ..?> )` - allows to use function instead of file name or wildcards
diff --git a/docs/todo.md b/docs/todo.md
index 68def05..f1cb4f5 100644
--- a/docs/todo.md
+++ b/docs/todo.md
@@ -2,13 +2,10 @@
* change the first page to a tutorial with script and usage examples
- * switch development to mono under windows
* idea: xake script as a task. Override/inherit variables. How to change variable on the fly is the original question. (we have got it out of the box, need more info)
- * accept filemasks in 'need' parameters (WHY I added it here?, the use case is very unclear)
* detect changes in build script (internal changes), e.g. new target added that was not in .xake database
* dependencies tracking mode: automatically rebuild when dependency is changed, execute triggers allowing to start/stop the processes which lock/hold artifacts
* in-memory artifact (string or stream). Say in Gulp file is processed in-memory
- * can the rules be abstract over artifacts
### Refactorings
@@ -25,10 +22,12 @@
* `rule "Viewer" -> fun folder -> action {need [folder <\\> "bin" <\\> folder <.> "exe"]...}`
* Filelist is not handy as it requires to cast all the time
* FileInfo is not good for the same reason: poorly composable and does not cover Directory well
-* wildcards phony actions
## Done (top is recent)
+ * wildcards phony actions
+ * support tasks in line with recipes and asyncs
+
* rules should accept #seq not just the list
* <<< for running tasks one by one. Current one runs in parallel only.
* complete copyFiles method
diff --git a/global.json b/global.json
new file mode 100644
index 0000000..a7d2245
--- /dev/null
+++ b/global.json
@@ -0,0 +1,6 @@
+{
+ "sdk": {
+ "rollForward": "major",
+ "version": "9.0.0"
+ }
+}
\ No newline at end of file
diff --git a/readme.md b/readme.md
index be9cbd1..0a222a5 100644
--- a/readme.md
+++ b/readme.md
@@ -1,16 +1,13 @@
Xake is a build utility that uses the full power of the F# programming language. Xake is inspired by [shake](https://github.com/ndmitchell/shake) build tool.
-[](https://travis-ci.org/xakebuild/Xake)
+[](https://github.com/OlegZee/Xake/actions/workflows/build.yml)
## Sample script
The simple script looks like:
```fsharp
-#r "paket:
- nuget Xake ~> 1.1 prerelease
- nuget Xake.Dotnet ~> 1.1 prerelease //"
-
+#r "nuget: Xake"
open Xake
open Xake.Dotnet
@@ -25,7 +22,9 @@ do xakeScript {
This script compiles helloworld assembly from helloworld.cs file.
-To run this script:
+## Getting started
+
+Make sure dotnet SDK 9.0+ is installed.
1. Clone the project:
@@ -36,10 +35,13 @@ To run this script:
```
cd samples
- dotnet restore dotnet-fake.csproj
- dotnet fake run gettingstarted.fsx
+ dotnet fsi gettingstarted.fsx
```
+ ```
+ dotnet fsi features.fsx
+ ```
+
## Further reading
* See [the features.fsx](https://github.com/xakebuild/Xake/blob/dev/samples/features.fsx) script for various samples.
@@ -51,12 +53,28 @@ To run this script:
Once you cloned the repository you are ready to compile and test the binaries:
```
-dotnet restore build.proj
-dotnet fake run build.fsx -- build test
+dotnet fsi build.fsx -- -- build test
```
... or use `build.cmd` (`build.sh`) in the root folder
+## Getting started for Mono on Linux/OSX
+
+> This is untested and mono nowadays is poorly explored territory for me.
+
+Make sure mono with F# is installed and root certificates are imported:
+
+```
+sudo apt-get install mono-complete
+sudo mozroots --import --sync
+```
+
+TBD
+
+## Documentation
+
+See [documentation](docs/overview.md) for more details.
+
## References
* [documentation](https://github.com/xakebuild/Xake/wiki)
diff --git a/samples/book/intro.fsx b/samples/book/intro.fsx
index 6bd4156..8c06c72 100644
--- a/samples/book/intro.fsx
+++ b/samples/book/intro.fsx
@@ -1,6 +1,6 @@
-#r "paket:
- nuget Xake ~> 1.1 prerelease
- nuget Xake.Dotnet ~> 1.1 prerelease //" // (1)
+#r "nuget: Xake, 1.1.4.427-beta"
+#r "nuget: Xake.Dotnet, 1.1.4.7-beta" (1)
+
open Xake // (2)
open Xake.Dotnet // (2.1)
diff --git a/samples/catch_errors.fsx b/samples/catch_errors.fsx
index 22f83ab..4f266a3 100644
--- a/samples/catch_errors.fsx
+++ b/samples/catch_errors.fsx
@@ -1,5 +1,4 @@
-#r "paket: nuget Xake ~> 1.1 prerelease //"
-
+#r "nuget: Xake, 2.0.0"
open Xake
do xakeScript {
diff --git a/samples/features.fsx b/samples/features.fsx
index b474bde..f58278e 100644
--- a/samples/features.fsx
+++ b/samples/features.fsx
@@ -1,19 +1,14 @@
-#r "paket:
- nuget Xake ~> 1.1 prerelease
- nuget Xake.Dotnet ~> 1.1 prerelease //"
-
-#if !FAKE
-#load ".fake/features.fsx/intellisense.fsx"
-#endif
+// #r "nuget: Xake, 2.0.0"
+#r "../out/netstandard2.0/Xake.dll"
+#r "nuget: Xake.Dotnet, 1.1.4.7-beta"
// This a sample Xake script to show off some features.
//
// USAGE:
-// * `fake run` or
-// * `dotnet restore && dotnet fake run`
+// * `dotnet fsi features.fsx` or
//
-// Running particular target:
-// * `dotnet fake run build.fsx -- clean`
+// Running particular targets:
+// * `dotnet fsi features.fsx -- -- clean main`
open Xake
open Xake.Tasks
@@ -39,13 +34,37 @@ do xakeScript {
// this is shorter way to express the same. See also `<==` and '<<<' operators.
"main" => need ["tracetest"; "temp/a.exe"]
- // .NET build rules
- // build .net executable using full .net framework (or mono under unix)
+ // "phony" rule that produces no file but just removes the files
+ // `rm` recipe (Xake.Tasks namespace) allow to remove files and folders
+ "clean" => recipe {
+ do! rm {file "paket-files/*.*"}
+ do! rm {dir "out"}
+ do! rm {files (fileset {
+ includes "samplefile*"
+ }); verbose
+ }
+ }
+
+ "dotnet-version" => recipe {
+ // this rule will run `dotnet --version` command and print the result
+ do! sh "dotnet --version" {}
+
+ // you can pass arguments and set options for the command
+ do! sh "dotnet" {
+ arg "--version"
+ logprefix "sh:dotnet-version"
+ }
- // define a "phony rule", which has goal to produce a file
- "clean" => rm {file "temp/a*"}
+ // Third option with `shellCmd` builder (wont fail on error by default)
+ let! error_code = shellCmd "dotnet" {
+ args [ "sdk"]
+ arg "check"
+ }
+ ()
+ }
- // rule to build an a.exe executable by using c# compiler
+ // .NET build rules
+ // build .net executable from C# sources using full .net framework (or mono under unix)
// notice there's no "out" parameter: csc recipe will use the target file as an output
"temp/a.exe" ..> csc {src (!!"temp/a.cs" + "temp/AssemblyInfo.cs")}
@@ -107,17 +126,6 @@ do xakeScript {
return ()
}
- // "phony" rule that produces no file but just removes the files
- // `rm` recipe (Xake.Tasks namespace) allow to remove files and folders
- "clean" => recipe {
- do! rm {file "paket-files/*.*"}
- do! rm {dir "out"}
- do! rm {files (fileset {
- includes "samplefile*"
- }); verbose
- }
- }
-
"libs" => recipe {
// this command will copy all dlls to `lib` (flat files)
do! cp {file "packages/mylib/net46/*.dll"; todir "lib"}
@@ -161,12 +169,12 @@ do xakeScript {
do! log "Fizz" // use let!, do! to call any recipe
try
- let j = ref 3
- while !j < 5 do
- do! log (sprintf "j=%i" !j)
- j := !j + 1
+ let mutable j = 3
+ while j < 5 do
+ do! log (sprintf "j=%i" j)
+ j <- j + 1
with _ ->
- do! trace Error "Exception occured!"
+ do! trace Error "Exception occurred!"
}
// working with filesets and dependencies
@@ -177,6 +185,9 @@ do xakeScript {
// `let! files...` above records the dependency of `fileset` target from the set of files matching `src/*.cs` pattern. Whenever file is added or removed the dependency will be triggered
// `do! needFiles` records that `fileset` depends on *contents* of each file matching the mask. It will trigger if file size or timestamp is changed
+
+ // shorter way to express the same
+ do! dependsOn !! "src/*.cs"
}
diff --git a/samples/gettingstarted.fsx b/samples/gettingstarted.fsx
index 4a25411..6ac0bd5 100644
--- a/samples/gettingstarted.fsx
+++ b/samples/gettingstarted.fsx
@@ -1,6 +1,5 @@
-#r "paket:
- nuget Xake ~> 1.1 prerelease
- nuget Xake.Dotnet ~> 1.1 prerelease //"
+#r "nuget: Xake, 2.0.0"
+#r "nuget: Xake.Dotnet, 1.1.4.7-beta"
open Xake
open Xake.Dotnet
diff --git a/samples/rmdir.fsx b/samples/rmdir.fsx
index 938760c..439d819 100644
--- a/samples/rmdir.fsx
+++ b/samples/rmdir.fsx
@@ -1,4 +1,4 @@
-#r "paket: nuget Xake ~> 1.1 prerelease //"
+#r "nuget: Xake, 1.1.4.427-beta"
open Xake
open Xake.Tasks
diff --git a/src/core/CommonLib.fs b/src/core/CommonLib.fs
index 0796fac..ec1488e 100644
--- a/src/core/CommonLib.fs
+++ b/src/core/CommonLib.fs
@@ -1,57 +1,55 @@
-namespace Xake
+[]
+module internal Xake.CommonLib
-[]
-module internal CommonLib =
+type private CacheKey<'K> = K of 'K
- type private CacheKey<'K> = K of 'K
+///
+/// Creates a memoized function with the same signature. Performs memoization by storing run results to a cache.
+///
+///
+let memoize f =
+ let cache = ref Map.empty
+ let lck = System.Object()
+ fun x ->
+ match !cache |> Map.tryFind (K x) with
+ | Some v -> v
+ | None ->
+ lock lck (fun () ->
+ match !cache |> Map.tryFind (K x) with
+ | Some v -> v
+ | None ->
+ let res = f x
+ cache := !cache |> Map.add (K x) res
+ res)
- ///
- /// Creates a memoized function with the same signature. Performs memoization by storing run results to a cache.
- ///
- ///
- let memoize f =
- let cache = ref Map.empty
- let lck = System.Object()
- fun x ->
- match !cache |> Map.tryFind (K x) with
- | Some v -> v
- | None ->
- lock lck (fun () ->
- match !cache |> Map.tryFind (K x) with
- | Some v -> v
- | None ->
- let res = f x
- cache := !cache |> Map.add (K x) res
- res)
-
- ///**Description**
- /// Memoizes the recursive function. Memoized function is passed as first argument to f.
- ///**Parameters**
- /// * `f` - parameter of type `('a -> 'b) -> 'a -> 'b` The function to be memoized.
- ///
- ///**Output Type**
- /// * `'a -> 'b`
- ///
- ///**Exceptions**
- ///
- let memoizeRec f =
- let rec fn x = f fm x
- and fm = fn |> memoize
- in
- fm
+///**Description**
+/// Memoizes the recursive function. Memoized function is passed as first argument to f.
+///**Parameters**
+/// * `f` - parameter of type `('a -> 'b) -> 'a -> 'b` The function to be memoized.
+///
+///**Output Type**
+/// * `'a -> 'b`
+///
+///**Exceptions**
+///
+let memoizeRec f =
+ let rec fn x = f fm x
+ and fm = fn |> memoize
+ in
+ fm
- ///
- /// Takes n first elements from a list.
- ///
- ///
- let rec take cnt = function |_ when cnt <= 0 -> [] |[] -> [] |a::rest -> a :: (take (cnt-1) rest)
+///
+/// Takes n first elements from a list.
+///
+///
+let rec take cnt = function |_ when cnt <= 0 -> [] |[] -> [] |a::rest -> a :: (take (cnt-1) rest)
- ///
- /// Returns a list of unique values for a specific list.
- ///
- ///
- let distinct ls =
- ls |>
- List.fold (fun map item -> if map |> Map.containsKey item then map else map |> Map.add item 1) Map.empty
- |> Map.toList |> List.map fst
+///
+/// Returns a list of unique values for a specific list.
+///
+///
+let distinct ls =
+ ls |>
+ List.fold (fun map item -> if map |> Map.containsKey item then map else map |> Map.add item 1) Map.empty
+ |> Map.toList |> List.map fst
diff --git a/src/core/Database.fs b/src/core/Database.fs
index d36dca3..1e45c6b 100644
--- a/src/core/Database.fs
+++ b/src/core/Database.fs
@@ -59,7 +59,7 @@ module Storage =
| FileDep _ -> 1
| EnvVar _ -> 2
| Var _ -> 3
- | AlwaysRerun _ -> 4
+ | AlwaysRerun -> 4
| GetFiles _ -> 5)
[| wrap (ArtifactDep, fun (ArtifactDep f | OtherwiseFail f) -> f) target
wrap (FileDep, fun (FileDep(f, ts) | OtherwiseFail (f, ts)) -> (f, ts)) (pair file date)
diff --git a/src/core/DependencyAnalysis.fs b/src/core/DependencyAnalysis.fs
index 2f747b6..efc152e 100644
--- a/src/core/DependencyAnalysis.fs
+++ b/src/core/DependencyAnalysis.fs
@@ -25,7 +25,8 @@ let getExecTime ctx target =
|> Option.fold (fun _ r -> r.Steps |> List.sumBy (fun s -> s.OwnTime)) 0
/// Gets single dependency state and reason of a change.
-let getDepState getVar getFileList (getChangedDeps: Target -> ChangeReason list) = function
+let getDepState getVar getFileList (getChangedDeps: Target -> ChangeReason list) x =
+ match x with
| FileDep (a:File, wrtime) when not((File.exists a) && abs((File.getLastWriteTime a - wrtime).TotalMilliseconds) < TimeCompareToleranceMs) ->
let dbgInfo = File.exists a |> function
| false -> "file does not exists"
diff --git a/src/core/ExecCore.fs b/src/core/ExecCore.fs
index 2e50fe8..7c9b192 100644
--- a/src/core/ExecCore.fs
+++ b/src/core/ExecCore.fs
@@ -1,381 +1,394 @@
-namespace Xake
-
-module internal ExecCore =
+module internal Xake.ExecCore
+
+open System.Text.RegularExpressions
+open DependencyAnalysis
+
+open Storage
+open WorkerPool
+
+/// Writes the message with formatting to a log
+let traceLog (level:Logging.Level) fmt =
+ let write s = recipe {
+ let! ctx = getCtx()
+ return ctx.Logger.Log level "%s" s
+ }
+ Printf.kprintf write fmt
+
+let wildcardsRegex = Regex(@"\*\*|\*|\?", RegexOptions.Compiled)
+let patternTagRegex = Regex(@"\((?'tag'\w+?)\:[^)]+\)", RegexOptions.Compiled)
+let replace (regex:Regex) (evaluator: Match -> string) text = regex.Replace(text, evaluator)
+let ifNone x = function |Some x -> x | _ -> x
+
+let (|Dump|Dryrun|Run|) (opts:ExecOptions) =
+ match opts with
+ | _ when opts.DumpDeps -> Dump
+ | _ when opts.DryRun -> Dryrun
+ | _ -> Run
+
+let applyWildcards = function
+ | None -> id
+ | Some matches ->
+ fun pat ->
+ let mutable i = 0
+ let evaluator m =
+ i <- i + 1
+ matches |> Map.tryFind (i.ToString()) |> ifNone ""
+ let evaluatorTag (m: Match) =
+ matches |> (Map.tryFind m.Groups.["tag"].Value) |> ifNone ""
+ pat
+ |> replace wildcardsRegex evaluator
+ |> replace patternTagRegex evaluatorTag
+
+// locates the rule
+let locateRule (Rules rules) projectRoot target =
+ let matchRule rule =
+ match rule, target with
+
+ |FileConditionRule (meetCondition,_), FileTarget file when file |> File.getFullName |> meetCondition ->
+ //writeLog Level.Debug "Found conditional pattern '%s'" name
+ // TODO let condition rule extracting named groups
+ Some (rule,[],[target])
+
+ |FileRule (pattern,_), FileTarget file ->
+ file
+ |> File.getFullName
+ |> Path.matchGroups pattern projectRoot
+ |> Option.map (fun groups -> rule,groups,[target])
+
+ |MultiFileRule (patterns, _), FileTarget file ->
+ let fname = file |> File.getFullName
+ patterns
+ |> List.tryPick(fun pattern ->
+ Path.matchGroups pattern projectRoot fname
+ |> Option.map(fun groups -> groups, pattern)
+ )
+ |> Option.map (fun (groups, _) ->
+ let generateName = applyWildcards (Map.ofList groups |> Some)
+
+ let targets = patterns |> List.map (generateName >> (>) projectRoot >> File.make >> FileTarget)
+ rule, groups, targets)
+
+ |PhonyRule (pattern,_), PhonyAction phony ->
+ // printfn $"Phony rule {phony}, pattern {pattern}"
+ // Some (rule, [], [target])
+ phony
+ |> Path.matchGroups pattern ""
+ |> Option.map (fun groups -> rule,groups,[target])
+
+ | _ -> None
+
+ rules |> List.tryPick matchRule
+
+// Ordinal of the task being added to a task pool
+let refTaskOrdinal = ref 0
+
+///
+/// Creates a context for a new task
+///
+let newTaskContext targets matches ctx =
+ let ordinal = System.Threading.Interlocked.Increment(refTaskOrdinal)
+ let prefix = ordinal |> sprintf "%i> "
+ in
+ {ctx with
+ Ordinal = ordinal; Logger = PrefixLogger prefix ctx.RootLogger
+ Targets = targets
+ RuleMatches = matches
+ }
+
+// executes single artifact
+let rec execOne ctx target =
+
+ let run ruleMatches action targets =
+ let primaryTarget = targets |> List.head
+ async {
+ match ctx.NeedRebuild targets with
+ | true ->
+ let taskContext = newTaskContext targets ruleMatches ctx
+ do ctx.Logger.Log Command "Started %s as task %i" primaryTarget.ShortName taskContext.Ordinal
- open System.Text.RegularExpressions
- open DependencyAnalysis
+ do Progress.TaskStart primaryTarget |> ctx.Progress.Post
- /// Default options
- []
- let XakeOptions = ExecOptions.Default
+ let startResult = {BuildLog.makeResult targets with Steps = [Step.start "all"]}
+ let! (result,_) = action (startResult, taskContext)
+ let result = Step.updateTotalDuration result
- open WorkerPool
- open Storage
+ Store result |> ctx.Db.Post
- /// Writes the message with formatting to a log
- let traceLog (level:Logging.Level) fmt =
- let write s = action {
- let! ctx = getCtx()
- return ctx.Logger.Log level "%s" s
+ do Progress.TaskComplete primaryTarget |> ctx.Progress.Post
+ do ctx.Logger.Log Command "Completed %s in %A ms (wait %A ms)" primaryTarget.ShortName (Step.lastStep result).OwnTime (Step.lastStep result).WaitTime
+ return Succeed
+ | false ->
+ do ctx.Logger.Log Command "Skipped %s (up to date)" primaryTarget.ShortName
+ return Skipped
}
- Printf.kprintf write fmt
-
- let wildcardsRegex = Regex(@"\*\*|\*|\?", RegexOptions.Compiled)
- let patternTagRegex = Regex(@"\((?'tag'\w+?)\:[^)]+\)", RegexOptions.Compiled)
- let replace (regex:Regex) (evaluator: Match -> string) text = regex.Replace(text, evaluator)
- let ifNone x = function |Some x -> x | _ -> x
-
- let (|Dump|Dryrun|Run|) (opts:ExecOptions) =
- match opts with
- | _ when opts.DumpDeps -> Dump
- | _ when opts.DryRun -> Dryrun
- | _ -> Run
-
- let applyWildcards = function
- | None -> id
- | Some matches ->
- fun pat ->
- let mutable i = 0
- let evaluator m =
- i <- i + 1
- matches |> Map.tryFind (i.ToString()) |> ifNone ""
- let evaluatorTag (m: Match) =
- matches |> (Map.tryFind m.Groups.["tag"].Value) |> ifNone ""
- pat
- |> replace wildcardsRegex evaluator
- |> replace patternTagRegex evaluatorTag
-
- // locates the rule
- let locateRule (Rules rules) projectRoot target =
- let matchRule rule =
- match rule, target with
-
- |FileConditionRule (meetCondition,_), FileTarget file when file |> File.getFullName |> meetCondition ->
- //writeLog Level.Debug "Found conditional pattern '%s'" name
- // TODO let condition rule extracting named groups
- Some (rule,[],[target])
-
- |FileRule (pattern,_), FileTarget file ->
- file
- |> File.getFullName
- |> Path.matchGroups pattern projectRoot
- |> Option.map (fun groups -> rule,groups,[target])
-
- |MultiFileRule (patterns, _), FileTarget file ->
- let fname = file |> File.getFullName
- patterns
- |> List.tryPick(fun pattern ->
- Path.matchGroups pattern projectRoot fname
- |> Option.map(fun groups -> groups, pattern)
- )
- |> Option.map (fun (groups, pattern) ->
- let generateName = applyWildcards (Map.ofList groups |> Some)
-
- let targets = patterns |> List.map (generateName >> (>) projectRoot >> File.make >> FileTarget)
- rule, groups, targets)
-
- |PhonyRule (name,_), PhonyAction phony when phony = name ->
- // writeLog Verbose "Found phony pattern '%s'" name
- Some (rule, [], [target])
-
- | _ -> None
-
- rules |> List.tryPick matchRule
- let reportError ctx error details =
- do ctx.Logger.Log Error "Error '%s'. See build.log for details" error
- do ctx.Logger.Log Verbose "Error details are:\n%A\n\n" details
-
- let raiseError ctx error details =
- do reportError ctx error details
- raise (XakeException(sprintf "Script failed (error code: %A)\n%A" error details))
-
- // Ordinal of the task being added to a task pool
- let refTaskOrdinal = ref 0
-
- ///
- /// Creates a context for a new task
- ///
- let newTaskContext targets matches ctx =
- let ordinal = System.Threading.Interlocked.Increment(refTaskOrdinal)
- let prefix = ordinal |> sprintf "%i> "
- in
- {ctx with
- Ordinal = ordinal; Logger = PrefixLogger prefix ctx.RootLogger
- Targets = targets
- RuleMatches = matches
+ let getAction = function
+ | FileRule (_, a)
+ | FileConditionRule (_, a)
+ | MultiFileRule (_, a)
+ | PhonyRule (_, a) -> a
+
+ // result expression is...
+ match target |> locateRule ctx.Rules ctx.Options.ProjectRoot with
+ | Some(rule,groups,targets) ->
+ let groupsMap = groups |> Map.ofSeq
+ let (Recipe action) = rule |> getAction
+ async {
+ let! waitTask = (fun channel -> Run(target, targets, run groupsMap action targets, channel)) |> ctx.TaskPool.PostAndAsyncReply
+ let! status = waitTask
+ return target, status, ArtifactDep target
}
+ | None ->
+ target |> function
+ | FileTarget file when File.exists file ->
+ async.Return <| (target, ExecStatus.JustFile, FileDep (file, File.getLastWriteTime file))
+ | _ ->
+ let errorText = sprintf "Neither rule nor file is found for '%s'" target.FullName
+ do ctx.Logger.Log Error "%s" errorText
+ raise (XakeException errorText)
+
+///
+/// Executes several artifacts in parallel.
+///
+and execParallel ctx = List.map (execOne ctx) >> Seq.ofList >> Async.Parallel
+
+///
+/// Gets the status of dependency artifacts (obtained from 'need' calls).
+///
+///
+/// ExecStatus.Succeed,... in case at least one dependency was rebuilt
+///
+and execNeed ctx targets : Async =
+ async {
+ let primaryTarget = ctx.Targets |> List.head
+ primaryTarget |> (Progress.TaskSuspend >> ctx.Progress.Post)
+
+ do ctx.Throttler.Release() |> ignore
+ let! statuses = targets |> execParallel ctx
+ do! ctx.Throttler.WaitAsync(-1) |> Async.AwaitTask |> Async.Ignore
+
+ primaryTarget |> (Progress.TaskResume >> ctx.Progress.Post)
+
+ let dependencies = statuses |> Array.map (fun (_,_,x) -> x) |> List.ofArray in
+ return
+ (match statuses |> Array.exists (fun (_,x,_) -> x = ExecStatus.Succeed) with
+ |true -> Succeed
+ |false -> Skipped), dependencies
+ }
+
+/// phony actions are detected by their name so if there's "clean" phony and file "clean" in `need` list if will choose first
+let makeTarget ctx name =
+ let (Rules rules) = ctx.Rules
+ let isPhonyRule nm = function
+ |PhonyRule (pattern,_) ->
+ nm |> Path.matchGroups pattern "" |> Option.isSome
+ | _ -> false
+ in
+ match rules |> List.exists (isPhonyRule name) with
+ | true -> PhonyAction name
+ | _ -> ctx.Options.ProjectRoot > name |> File.make |> FileTarget
+
+/// Implementation of "dry run"
+let dryRun ctx options (groups: string list list) =
+ let getDeps = getChangeReasons ctx |> memoizeRec
+
+ // getPlainDeps getDeps (getExecTime ctx)
+ do ctx.Logger.Log Command "Running (dry) targets %A" groups
+ let doneTargets = System.Collections.Hashtable()
+
+ let print f = ctx.Logger.Log Info f
+ let indent i = String.replicate i " "
+
+ let rec showDepStatus ii reasons =
+ reasons |> function
+ | Other reason ->
+ print "%sReason: %s" (indent ii) reason
+ | Depends t ->
+ print "%sDepends '%s' - changed target" (indent ii) t.ShortName
+ | DependsMissingTarget t ->
+ print "%sDepends on '%s' - missing target" (indent ii) t.ShortName
+ | FilesChanged (file:: rest) ->
+ print "%sFile is changed '%s' %s" (indent ii) file (if List.isEmpty rest then "" else sprintf " and %d more file(s)" <| List.length rest)
+ | reasons ->
+ do print "%sSome reason %A" (indent ii) reasons
+ ()
+ let rec displayNestedDeps ii =
+ function
+ | DependsMissingTarget t
+ | Depends t ->
+ showTargetStatus ii t
+ | _ -> ()
+ and showTargetStatus ii target =
+ if not <| doneTargets.ContainsKey(target) then
+ doneTargets.Add(target, 1)
+ let deps = getDeps target
+ if not <| List.isEmpty deps then
+ let execTimeEstimate = getExecTime ctx target
+ do ctx.Logger.Log Command "%sRebuild %A (~%Ams)" (indent ii) target.ShortName execTimeEstimate
+ deps |> List.iter (showDepStatus (ii+1))
+ deps |> List.iter (displayNestedDeps (ii+1))
+
+ let targetGroups = makeTarget ctx |> List.map |> List.map <| groups
+ let toSec v = float (v / 1) * 0.001
+ let endTime = Progress.estimateEndTime (getDurationDeps ctx getDeps) options.Threads targetGroups |> toSec
+
+ targetGroups |> List.collect id |> List.iter (showTargetStatus 0)
+ let alldeps = targetGroups |> List.collect id |> List.collect getDeps
+ if List.isEmpty alldeps then
+ ctx.Logger.Log Message "\n\n\tNo changed dependencies. Nothing to do.\n"
+ else
+ let parallelismMsg =
+ let endTimeTotal = Progress.estimateEndTime (getDurationDeps ctx getDeps) 1 targetGroups |> toSec
+ if options.Threads > 1 && endTimeTotal > endTime * 1.05 then
+ sprintf "\n\tTotal tasks duration is (estimate) in %As\n\tParallelist degree: %.2f" endTimeTotal (endTimeTotal / endTime)
+ else ""
+ ctx.Logger.Log Message "\n\n\tBuild will be completed (estimate) in %As%s\n" endTime parallelismMsg
+
+let rec unwindAggEx (e:System.Exception) = seq {
+ match e with
+ | :? System.AggregateException as a -> yield! a.InnerExceptions |> Seq.collect unwindAggEx
+ | a -> yield a
+ }
+
+let rec runSeq<'r> :Async<'r> list -> Async<'r list> =
+ List.fold
+ (fun rest i -> async {
+ let! tail = rest
+ let! head = i
+ return head::tail
+ })
+ (async {return []})
+
+let asyncMap f c = async.Bind(c, f >> async.Return)
+
+/// Runs the build (main function of xake)
+let runBuild ctx options groups =
+
+ let runTargets ctx options targets =
+ let getDeps = getChangeReasons ctx |> memoizeRec
+
+ let needRebuild (target: Target) =
+ getDeps >>
+ function
+ | [] -> false, ""
+ | Other reason::_ -> true, reason
+ | Depends t ::_ -> true, "Depends on target " + t.ShortName
+ | DependsMissingTarget t ::_ -> true, sprintf "Depends on target %s (missing)" t.ShortName
+ | FilesChanged (file::_) ::_ -> true, "File(s) changed " + file
+ | reasons -> true, sprintf "Some reason %A" reasons
+ >>
+ function
+ | false, _ -> false
+ | true, reason ->
+ do ctx.Logger.Log Info "Rebuild %A: %s" target.ShortName reason
+ true
+ <| target
+ // todo improve output by printing primary target
- // executes single artifact
- let rec execOne ctx target =
-
- let run ruleMatches action targets =
- let primaryTarget = targets |> List.head
- async {
- match ctx.NeedRebuild targets with
- | true ->
- let taskContext = newTaskContext targets ruleMatches ctx
- do ctx.Logger.Log Command "Started %s as task %i" primaryTarget.ShortName taskContext.Ordinal
-
- do Progress.TaskStart primaryTarget |> ctx.Progress.Post
-
- let startResult = {BuildLog.makeResult targets with Steps = [Step.start "all"]}
- let! (result,_) = action (startResult, taskContext)
- let result = Step.updateTotalDuration result
-
- Store result |> ctx.Db.Post
-
- do Progress.TaskComplete primaryTarget |> ctx.Progress.Post
- do ctx.Logger.Log Command "Completed %s in %A ms (wait %A ms)" primaryTarget.ShortName (Step.lastStep result).OwnTime (Step.lastStep result).WaitTime
- return ExecStatus.Succeed
- | false ->
- do ctx.Logger.Log Command "Skipped %s (up to date)" primaryTarget.ShortName
- return ExecStatus.Skipped
- }
-
- let getAction = function
- | FileRule (_, a)
- | FileConditionRule (_, a)
- | MultiFileRule (_, a)
- | PhonyRule (_, a) -> a
-
- // result expression is...
- match target |> locateRule ctx.Rules ctx.Options.ProjectRoot with
- | Some(rule,groups,targets) ->
- let groupsMap = groups |> Map.ofSeq
- let (Recipe action) = rule |> getAction
- async {
- let! waitTask = (fun channel -> Run(target, targets, run groupsMap action targets, channel)) |> ctx.TaskPool.PostAndAsyncReply
- let! status = waitTask
- return target, status, ArtifactDep target
- }
- | None ->
- target |> function
- | FileTarget file when File.exists file ->
- async.Return <| (target, ExecStatus.JustFile, FileDep (file, File.getLastWriteTime file))
- | _ -> raiseError ctx (sprintf "Neither rule nor file is found for '%s'" target.FullName) ""
-
- ///
- /// Executes several artifacts in parallel.
- ///
- and execParallel ctx = List.map (execOne ctx) >> Seq.ofList >> Async.Parallel
-
- ///
- /// Gets the status of dependency artifacts (obtained from 'need' calls).
- ///
- ///
- /// ExecStatus.Succeed,... in case at least one dependency was rebuilt
- ///
- and execNeed ctx targets : Async =
async {
- let primaryTarget = ctx.Targets |> List.head
- primaryTarget |> (Progress.TaskSuspend >> ctx.Progress.Post)
+ do ctx.Logger.Log Info "Build target list %A" targets
- do ctx.Throttler.Release() |> ignore
- let! statuses = targets |> execParallel ctx
- do! ctx.Throttler.WaitAsync(-1) |> Async.AwaitTask |> Async.Ignore
+ let progressSink = Progress.openProgress (getDurationDeps ctx getDeps) options.Threads targets options.Progress
+ let stepCtx = {ctx with NeedRebuild = List.exists needRebuild; Progress = progressSink}
- primaryTarget |> (Progress.TaskResume >> ctx.Progress.Post)
-
- let dependencies = statuses |> Array.map (fun (_,_,x) -> x) |> List.ofArray in
- return
- (match statuses |> Array.exists (fun (_,x,_) -> x = ExecStatus.Succeed) with
- |true -> ExecStatus.Succeed
- |false -> ExecStatus.Skipped), dependencies
+ try
+ return! targets |> execParallel stepCtx
+ finally
+ do Progress.Finish |> progressSink.Post
}
- /// phony actions are detected by their name so if there's "clean" phony and file "clean" in `need` list if will choose first
- let makeTarget ctx name =
- let (Rules rules) = ctx.Rules
- let isPhonyRule nm = function |PhonyRule (n,_) when n = nm -> true | _ -> false
- in
- match rules |> List.exists (isPhonyRule name) with
- | true -> PhonyAction name
- | _ -> ctx.Options.ProjectRoot > name |> File.make |> FileTarget
-
- /// Implementation of "dry run"
- let dryRun ctx options (groups: string list list) =
- let getDeps = getChangeReasons ctx |> memoizeRec
-
- // getPlainDeps getDeps (getExecTime ctx)
- do ctx.Logger.Log Command "Running (dry) targets %A" groups
- let doneTargets = System.Collections.Hashtable()
-
- let print f = ctx.Logger.Log Info f
- let indent i = String.replicate i " "
-
- let rec showDepStatus ii reasons =
- reasons |> function
- | ChangeReason.Other reason ->
- print "%sReason: %s" (indent ii) reason
- | ChangeReason.Depends t ->
- print "%sDepends '%s' - changed target" (indent ii) t.ShortName
- | ChangeReason.DependsMissingTarget t ->
- print "%sDepends on '%s' - missing target" (indent ii) t.ShortName
- | ChangeReason.FilesChanged (file:: rest) ->
- print "%sFile is changed '%s' %s" (indent ii) file (if List.isEmpty rest then "" else sprintf " and %d more file(s)" <| List.length rest)
- | reasons ->
- do print "%sSome reason %A" (indent ii) reasons
- ()
- let rec displayNestedDeps ii =
- function
- | ChangeReason.DependsMissingTarget t
- | ChangeReason.Depends t ->
- showTargetStatus ii t
- | _ -> ()
- and showTargetStatus ii target =
- if not <| doneTargets.ContainsKey(target) then
- doneTargets.Add(target, 1)
- let deps = getDeps target
- if not <| List.isEmpty deps then
- let execTimeEstimate = getExecTime ctx target
- do ctx.Logger.Log Command "%sRebuild %A (~%Ams)" (indent ii) target.ShortName execTimeEstimate
- deps |> List.iter (showDepStatus (ii+1))
- deps |> List.iter (displayNestedDeps (ii+1))
-
- let targetGroups = makeTarget ctx |> List.map |> List.map <| groups in
- let toSec v = float (v / 1) * 0.001
- let endTime = Progress.estimateEndTime (getDurationDeps ctx getDeps) options.Threads targetGroups |> toSec
-
- targetGroups |> List.collect id |> List.iter (showTargetStatus 0)
- let alldeps = targetGroups |> List.collect id |> List.collect getDeps
- if List.isEmpty alldeps then
- ctx.Logger.Log Message "\n\n\tNo changed dependencies. Nothing to do.\n"
- else
- let parallelismMsg =
- let endTimeTotal = Progress.estimateEndTime (getDurationDeps ctx getDeps) 1 targetGroups |> toSec
- if options.Threads > 1 && endTimeTotal > endTime * 1.05 then
- sprintf "\n\tTotal tasks duration is (estimate) in %As\n\tParallelist degree: %.2f" endTimeTotal (endTimeTotal / endTime)
- else ""
- ctx.Logger.Log Message "\n\n\tBuild will be completed (estimate) in %As%s\n" endTime parallelismMsg
-
- let rec unwindAggEx (e:System.Exception) = seq {
- match e with
- | :? System.AggregateException as a -> yield! a.InnerExceptions |> Seq.collect unwindAggEx
- | a -> yield a
+ groups |> List.map
+ (List.map (makeTarget ctx) >> (runTargets ctx options))
+ |> runSeq
+ |> asyncMap (Array.concat >> List.ofArray)
+
+/// Executes the build script
+let runScript options rules =
+ let logger = CombineLogger (ConsoleLogger options.ConLogLevel) options.CustomLogger
+ let logger =
+ match options.FileLog, options.FileLogLevel with
+ | null,_ | "",_
+ | _, Silent -> logger
+ | logFileName,level -> CombineLogger logger (FileLogger logFileName level)
+
+ let (throttler, pool) = WorkerPool.create logger options.Threads
+ let db = Storage.openDb (options.ProjectRoot > options.DbFileName) logger
+
+ let finalize () =
+ db.PostAndReply Storage.CloseWait
+ FlushLogs()
+
+ System.Console.CancelKeyPress
+ |> Event.add (fun _ ->
+ logger.Log Error "Build interrupted by user"
+ finalize()
+ exit 1)
+
+ let ctx = {
+ Ordinal = 0
+ TaskPool = pool; Throttler = throttler
+ Options = options; Rules = rules
+ Logger = logger; RootLogger = logger; Db = db
+ Progress = Progress.emptyProgress()
+ NeedRebuild = fun _ -> false
+ Targets = []
+ RuleMatches = Map.empty
}
- let rec runSeq<'r> :Async<'r> list -> Async<'r list> =
- List.fold
- (fun rest i -> async {
- let! tail = rest
- let! head = i
- return head::tail
- })
- (async {return []})
+ logger.Log Info "Options: %A" options
- let asyncMap f c = async.Bind(c, f >> async.Return)
+ // splits list of targets ["t1;t2"; "t3;t4"] into list of list.
+ let targetLists =
+ options.Targets |>
+ function
+ | [] ->
+ do logger.Log Level.Message "No target(s) specified. Defaulting to 'main'"
+ [["main"]]
+ | tt ->
+ tt |> List.map (fun (s: string) -> s.Split(';', '|') |> List.ofArray)
- /// Runs the build (main function of xake)
- let runBuild ctx options groups =
-
- let runTargets ctx options targets =
- let getDeps = getChangeReasons ctx |> memoizeRec
+ let reportError ctx error details =
+ do ctx.Logger.Log Error "Error '%s'. See build.log for details" error
+ do ctx.Logger.Log Verbose "Error details are:\n%A\n\n" details
- let needRebuild (target: Target) =
- getDeps >>
- function
- | [] -> false, ""
- | ChangeReason.Other reason::_ -> true, reason
- | ChangeReason.Depends t ::_ -> true, "Depends on target " + t.ShortName
- | ChangeReason.DependsMissingTarget t ::_ -> true, sprintf "Depends on target %s (missing)" t.ShortName
- | ChangeReason.FilesChanged (file::_) ::_ -> true, "File(s) changed " + file
- | reasons -> true, sprintf "Some reason %A" reasons
- >>
- function
- | false, _ -> false
- | true, reason ->
- do ctx.Logger.Log Info "Rebuild %A: %s" target.ShortName reason
- true
- <| target
- // todo improve output by printing primary target
-
- async {
- do ctx.Logger.Log Info "Build target list %A" targets
-
- let progressSink = Progress.openProgress (getDurationDeps ctx getDeps) options.Threads targets options.Progress
- let stepCtx = {ctx with NeedRebuild = List.exists needRebuild; Progress = progressSink}
-
- try
- return! targets |> execParallel stepCtx
- finally
- do Progress.Finish |> progressSink.Post
- }
-
- groups |> List.map
- (List.map (makeTarget ctx) >> (runTargets ctx options))
- |> runSeq
- |> asyncMap (Array.concat >> List.ofArray)
-
- /// Executes the build script
- let runScript options rules =
- let logger = CombineLogger (ConsoleLogger options.ConLogLevel) options.CustomLogger
-
- let logger =
- match options.FileLog, options.FileLogLevel with
- | null,_ | "",_
- | _,Verbosity.Silent -> logger
- | logFileName,level -> CombineLogger logger (FileLogger logFileName level)
-
- let (throttler, pool) = WorkerPool.create logger options.Threads
-
- let db = Storage.openDb (options.ProjectRoot > options.DbFileName) logger
-
- let ctx = {
- Ordinal = 0
- TaskPool = pool; Throttler = throttler
- Options = options; Rules = rules
- Logger = logger; RootLogger = logger; Db = db
- Progress = Progress.emptyProgress()
- NeedRebuild = fun _ -> false
- Targets = []
- RuleMatches = Map.empty
- }
-
- logger.Log Info "Options: %A" options
-
- // splits list of targets ["t1;t2"; "t3;t4"] into list of list.
- let targetLists =
- options.Targets |>
- function
- | [] ->
- do logger.Log Level.Message "No target(s) specified. Defaulting to 'main'"
- [["main"]]
- | tt ->
- tt |> List.map (fun (s: string) -> s.Split(';', '|') |> List.ofArray)
-
- try
- match options with
- | Dump ->
- do logger.Log Level.Command "Dumping dependencies for targets %A" targetLists
- targetLists |> List.iter (List.map (makeTarget ctx) >> (dumpDeps ctx))
- | Dryrun ->
- targetLists |> (dryRun ctx options)
- | _ ->
- let start = System.DateTime.Now
- try
- targetLists |> (runBuild ctx options) |> Async.RunSynchronously |> ignore
- ctx.Logger.Log Message "\n\n Build completed in %A\n" (System.DateTime.Now - start)
- with | exn ->
- let th = if options.FailOnError then raiseError else reportError
- let errors = exn |> unwindAggEx |> Seq.map (fun e -> e.Message) in
- th ctx (exn.Message + "\n" + (errors |> String.concat "\r\n ")) exn
- ctx.Logger.Log Message "\n\n\tBuild failed after running for %A\n" (System.DateTime.Now - start)
+ try
+ match options with
+ | Dump ->
+ do logger.Log Level.Command "Dumping dependencies for targets %A" targetLists
+ targetLists |> List.iter (List.map (makeTarget ctx) >> (dumpDeps ctx))
+ | Dryrun ->
+ targetLists |> (dryRun ctx options)
+ | _ ->
+ let start = System.DateTime.Now
+ try
+ targetLists |> (runBuild ctx options) |> Async.RunSynchronously |> ignore
+ ctx.Logger.Log Message "\n\n Build completed in %A\n" (System.DateTime.Now - start)
+ with | exn ->
+ let exceptions = exn |> unwindAggEx
+ let errors = exceptions |> Seq.map (fun e -> e.Message) in
+ let details = exceptions |> Seq.last |> fun e -> e.ToString()
+ let errorText = errors |> String.concat "\r\n"
+
+ do reportError ctx errorText details
+ ctx.Logger.Log Message "\n\n\tBuild failed after running for %A\n" (System.DateTime.Now - start)
+
+ if options.ThrowOnError then
+ raise (XakeException "Script failure. See log file for details.")
+ else
+ finalize()
exit 2
- finally
- db.PostAndReply Storage.CloseWait
- Logging.FlushLogs()
-
- /// "need" implementation
- let need targets =
- action {
- let startTime = System.DateTime.Now
-
- let! ctx = getCtx()
- let! _,deps = targets |> execNeed ctx
-
- let totalDuration = int (System.DateTime.Now - startTime).TotalMilliseconds * 1
- let! result = getResult()
- let result' = {result with Depends = result.Depends @ deps} |> (Step.updateWaitTime totalDuration)
- do! setResult result'
- }
+ finally
+ finalize()
+
+/// "need" implementation
+let need targets = recipe {
+ let startTime = System.DateTime.Now
+
+ let! ctx = getCtx()
+ let! _,deps = targets |> execNeed ctx
+
+ let totalDuration = int (System.DateTime.Now - startTime).TotalMilliseconds * 1
+ let! result = getResult()
+ let result' = {result with Depends = result.Depends @ deps} |> (Step.updateWaitTime totalDuration)
+ do! setResult result'
+}
diff --git a/src/core/ExecTypes.fs b/src/core/ExecTypes.fs
index 5f87557..2f27eba 100644
--- a/src/core/ExecTypes.fs
+++ b/src/core/ExecTypes.fs
@@ -1,6 +1,7 @@
namespace Xake
open System.Threading
+open Xake.WorkerPool
/// Script execution options
type ExecOptions = {
@@ -24,8 +25,9 @@ type ExecOptions = {
/// Global script variables
Vars: (string * string) list
- /// Defines whether `run` should throw exception if script fails
- FailOnError: bool
+ /// Defines whether `run` should throw exception if script fails.
+ /// Default is false which means to exit process with non-zero code.
+ ThrowOnError: bool
/// Ignores command line swithes
IgnoreCommandLine: bool
@@ -44,30 +46,28 @@ type ExecOptions = {
/// Dump dependencies only
Progress: bool
-} with
-static member Default =
- {
- ProjectRoot = System.IO.Directory.GetCurrentDirectory()
- Threads = System.Environment.ProcessorCount
- ConLogLevel = Normal
-
- CustomLogger = CustomLogger (fun _ -> false) ignore
- FileLog = "build.log"
- FileLogLevel = Chatty
- Targets = []
- FailOnError = false
- Vars = List.Empty
- IgnoreCommandLine = false
- Nologo = false
- DbFileName = ".xake"
- DryRun = false
- DumpDeps = false
- Progress = true
+} with static member Default = {
+ ProjectRoot = System.IO.Directory.GetCurrentDirectory()
+ Threads = System.Environment.ProcessorCount
+ ConLogLevel = Normal
+
+ CustomLogger = CustomLogger (fun _ -> false) ignore
+ FileLog = "build.log"
+ FileLogLevel = Chatty
+ Targets = []
+ ThrowOnError = false
+ Vars = List.Empty
+ IgnoreCommandLine = false
+ Nologo = false
+ DbFileName = ".xake"
+ DryRun = false
+ DumpDeps = false
+ Progress = true
}
end
-type internal ExecStatus = | Succeed | Skipped | JustFile
-type private TaskPool = Agent>
+type ExecStatus = | Succeed | Skipped | JustFile
+type TaskPool = Agent>
/// Script execution context
type ExecContext = {
diff --git a/src/core/File.fs b/src/core/File.fs
index 4dd6e0a..5d340e8 100644
--- a/src/core/File.fs
+++ b/src/core/File.fs
@@ -3,8 +3,8 @@
module private impl =
let compareNames : string -> string -> int =
- let isUnix = Env.isUnix
- fun a b -> System.String.Compare(a, b, isUnix)
+ let ignoreCase = Env.isUnix
+ fun a b -> System.String.Compare(a, b, ignoreCase)
let getFileHash : string -> int =
if Env.isUnix then
diff --git a/src/core/Fileset.fs b/src/core/Fileset.fs
index 9d481ba..9b5bb2e 100644
--- a/src/core/Fileset.fs
+++ b/src/core/Fileset.fs
@@ -1,297 +1,295 @@
-namespace Xake
+[]
+module Xake.Fileset
-[]
-module Fileset =
+open System.IO
+open Xake
- open System.IO
- open Xake
+///
+/// Defines interface to a file system
+///
+type FileSystemType = {
+ GetDisk: string -> string
+ GetDirRoot: string -> string
+ GetParent: string -> string
+ AllDirs: string -> string seq
+ ScanDirs: string -> string -> string seq // mask -> dir -> dirs
+ ScanFiles: string -> string -> string seq // mask -> dir -> files
+}
- ///
- /// Defines interface to a file system
- ///
- type FileSystemType = {
- GetDisk: string -> string
- GetDirRoot: string -> string
- GetParent: string -> string
- AllDirs: string -> string seq
- ScanDirs: string -> string -> string seq // mask -> dir -> dirs
- ScanFiles: string -> string -> string seq // mask -> dir -> files
- }
+type FilePattern = string
- type FilePattern = string
-
- /// Filesystem pattern
- type FilesetElement = | Includes of Path.PathMask | Excludes of Path.PathMask
-
- type FilesetOptions = {FailOnEmpty:bool; BaseDir:string option}
-
- // Fileset is either set of rules or list of files (materialized)
- type Fileset = Fileset of FilesetOptions * FilesetElement list
- type Filelist = Filelist of File list
-
- /// Default fileset options
- let DefaultOptions = {FilesetOptions.BaseDir = None; FailOnEmpty = false}
-
- let Empty = Fileset (DefaultOptions,[])
- let EmptyList = Filelist []
-
- /// Implementation module
- module private Impl =
-
- open Path
-
- let fullname (f:DirectoryInfo) = f.FullName
-
- let FileSystem = {
- GetDisk = fun d -> d + Path.DirectorySeparatorChar.ToString()
- GetDirRoot = fun x -> Directory.GetDirectoryRoot x
- GetParent = Directory.GetParent >> fullname
- AllDirs = fun dir -> Directory.EnumerateDirectories(dir, "*", SearchOption.AllDirectories)
- ScanDirs = fun mask dir -> Directory.EnumerateDirectories(dir, mask, SearchOption.TopDirectoryOnly)
- ScanFiles = fun mask dir -> Directory.EnumerateFiles(dir, mask)
- }
-
- ///
- /// Changes current directory
- ///
- /// File system implementation
- /// Starting path
- /// target path
- let cd (fs:FileSystemType) startIn (Path.PathMask path) =
- // TODO check path exists after each step
- let applyPart (path:string) = function
- | CurrentDir -> path
- | Disk d -> fs.GetDisk d
- | FsRoot -> path |> fs.GetDirRoot
- | Parent -> path |> fs.GetParent
- | Directory d -> Path.Combine(path, d)
- | _ -> failwith "ChDir could only contain disk or directory names"
- in
- (startIn, path) ||> List.fold applyPart
-
- let listFiles (fs:FileSystemType) startIn (Path.PathMask pat) =
-
- // The pattern without mask become "explicit" file reference which is always included in resulting file list, regardless file presence. See impl notes for details.
- let isExplicitRule = pat |> List.exists (function | DirectoryMask _ | FileMask _ | Recurse -> true | _ -> false) |> not
- let filterDir = if isExplicitRule then id else Seq.filter Directory.Exists
- let filterFile = if isExplicitRule then id else Seq.filter File.Exists
-
- // Recursively applies the pattern rules to every item is start list
- let applyPart (paths: seq) :_ -> seq = function
- | Disk d -> fs.GetDisk d |> Seq.singleton
- | FsRoot -> paths |> Seq.map fs.GetDirRoot
- | CurrentDir -> paths
- | Parent -> paths |> Seq.map fs.GetParent
- | Recurse -> paths |> Seq.collect fs.AllDirs |> Seq.append paths
- | DirectoryMask mask -> paths |> Seq.collect (fs.ScanDirs mask)
- | Directory d -> paths |> Seq.map (fun dir -> Path.Combine(dir, d)) |> filterDir
- | FileMask mask -> paths |> Seq.collect (fs.ScanFiles mask)
- | FileName f -> paths |> Seq.map (fun dir -> Path.Combine(dir, f)) |> filterFile
- in
- (startIn, pat) ||> List.fold applyPart
-
- let ifNone = Option.fold (fun _ -> id)
-
- /// Implementation of fileset execute
- /// "Materializes" fileset to a filelist
- let scan fileSystem root (Fileset (options,filesetItems)) =
-
- let startDirPat = options.BaseDir |> ifNone root |> Path.parseDir
- let startDir = startDirPat |> cd fileSystem "."
-
- // TODO check performance, build function
- let includes src = [startDir] |> (listFiles fileSystem) >> Seq.append src
- let excludes src pat =
- let matchFile = Path.join startDirPat pat |> Path.matchesPattern in
- src |> Seq.filter (matchFile >> not)
-
- let folditem i = function
- | Includes pat -> includes i pat
- | Excludes pat -> excludes i pat
-
- filesetItems |> Seq.ofList |> Seq.fold folditem Seq.empty |> Seq.map File.make |> List.ofSeq |> Filelist
-
- // combines two fileset options
- let combineOptions (o1:FilesetOptions) (o2:FilesetOptions) =
- {DefaultOptions with
- BaseDir =
- match o1.BaseDir,o2.BaseDir with
- | Some _, Some _ -> failwith "Cannot combine filesets with basedirs defined in both (not implemented)"
- | Some _, None -> o1.BaseDir
- | _ -> o2.BaseDir
- FailOnEmpty = o1.FailOnEmpty || o2.FailOnEmpty}
-
- // combines two filesets
- let combineWith (Fileset (o2, set2)) (Fileset (o1,set1)) = Fileset(combineOptions o1 o2, set1 @ set2)
-
- // Combines result of reading file to a fileset
- let combineWithFile map (file:FileInfo) (Fileset (opts,fs)) =
- let elements = File.ReadAllLines file.FullName |> Array.toList |> List.map map in
- Fileset (opts, fs @ elements)
- // TODO filter comments, empty lines? |> Array.filter
-
- let changeBasedir dir (Fileset (opts,ps)) = Fileset ({opts with BaseDir = Some dir}, ps)
- let changeFailonEmpty f (Fileset (opts,ps)) = Fileset ({opts with FailOnEmpty = f}, ps)
-
- /// Fileset persistance implementation
- module private PicklerImpl =
-
- open Pickler
-
- let filesetoptions =
- wrap(
- (fun(foe,bdir) -> {FilesetOptions.FailOnEmpty = foe; BaseDir = bdir}),
- fun o -> (o.FailOnEmpty, o.BaseDir))
- (pair bool (option str))
-
- let filesetElement =
- alt
- (function | Includes _ -> 0 | Excludes _ -> 1)
- [|
- wrap (Includes, fun (Includes p | OtherwiseFail p) -> p) Path.pickler
- wrap (Excludes, fun (Excludes p | OtherwiseFail p) -> p) Path.pickler
- |]
-
- let fileinfo = wrap(File.make, File.getFullName) str
-
- let fileset = wrap(Fileset, fun (Fileset (o,l)) -> o,l) (pair filesetoptions (list filesetElement))
- let filelist = wrap(Filelist, fun (Filelist l) -> l) (list fileinfo)
-
- open Impl
-
- /// Gets the pickler for fileset type
- let filesetPickler = PicklerImpl.fileset
- let filelistPickler = PicklerImpl.filelist
+/// Filesystem pattern
+type FilesetElement = | Includes of Path.PathMask | Excludes of Path.PathMask
- ///
- /// Creates a new fileset with default options.
- ///
- ///
- let ls (filePattern:FilePattern) =
- // TODO Path.parse is expected to handle trailing slash character
- let parse = match filePattern.EndsWith ("/") || filePattern.EndsWith ("\\") with | true -> Path.parseDir | _-> Path.parse
- Fileset (DefaultOptions, [filePattern |> parse |> Includes])
+type FilesetOptions = {FailOnEmpty:bool; BaseDir:string option}
- ///
- /// Create a file set for specific file mask. The same as "ls"
- ///
- let (!!) = ls
+// Fileset is either set of rules or list of files (materialized)
+type Fileset = Fileset of FilesetOptions * FilesetElement list
+type Filelist = Filelist of File list
- ///
- /// Defines the empty fileset with a specified base dir.
- ///
- ///
- let (~+) dir =
- Fileset ({DefaultOptions with BaseDir = Some dir}, [])
+/// Default fileset options
+let DefaultOptions = {FilesetOptions.BaseDir = None; FailOnEmpty = false}
- []
- let parseFileMask = Path.parse
+let Empty = Fileset (DefaultOptions,[])
+let EmptyList = Filelist []
- []
- let parseDirMask = Path.parseDir
+/// Implementation module
+module private Impl =
- // let matches filePattern projectRoot
- []
- let matches = Path.matches
+ open Path
- let FileSystem = Impl.FileSystem
+ let fullname (f:DirectoryInfo) = f.FullName
- ///
- /// "Materializes" fileset to a filelist
- ///
- let toFileList = Impl.scan Impl.FileSystem
-
- ///
- /// "Materializes" file mask to a list of files/paths
- ///
- let listByMask (root:string) = Impl.listFiles Impl.FileSystem [root]
+ let FileSystem = {
+ GetDisk = fun d -> d + Path.DirectorySeparatorChar.ToString()
+ GetDirRoot = fun x -> Directory.GetDirectoryRoot x
+ GetParent = Directory.GetParent >> fullname
+ AllDirs = fun dir -> Directory.EnumerateDirectories(dir, "*", SearchOption.AllDirectories)
+ ScanDirs = fun mask dir -> Directory.EnumerateDirectories(dir, mask, SearchOption.TopDirectoryOnly)
+ ScanFiles = fun mask dir -> Directory.EnumerateFiles(dir, mask)
+ }
///
- /// The same as toFileList but allows to provide file system adapter
+ /// Changes current directory
///
- let toFileList1 = Impl.scan
+ /// File system implementation
+ /// Starting path
+ /// target path
+ let cd (fs:FileSystemType) startIn (Path.PathMask path) =
+ // TODO check path exists after each step
+ let applyPart (path:string) = function
+ | CurrentDir -> path
+ | Disk d -> fs.GetDisk d
+ | FsRoot -> path |> fs.GetDirRoot
+ | Parent -> path |> fs.GetParent
+ | Directory d -> Path.Combine(path, d)
+ | _ -> failwith "ChDir could only contain disk or directory names"
+ in
+ (startIn, path) ||> List.fold applyPart
+
+ let listFiles (fs:FileSystemType) startIn (Path.PathMask pat) =
+
+ // The pattern without mask become "explicit" file reference which is always included in resulting file list, regardless file presence. See impl notes for details.
+ let isExplicitRule = pat |> List.exists (function | DirectoryMask _ | FileMask _ | Recurse -> true | _ -> false) |> not
+ let filterDir = if isExplicitRule then id else Seq.filter Directory.Exists
+ let filterFile = if isExplicitRule then id else Seq.filter File.Exists
+
+ // Recursively applies the pattern rules to every item is start list
+ let applyPart (paths: seq) :_ -> seq = function
+ | Disk d -> fs.GetDisk d |> Seq.singleton
+ | FsRoot -> paths |> Seq.map fs.GetDirRoot
+ | CurrentDir -> paths
+ | Parent -> paths |> Seq.map fs.GetParent
+ | Recurse -> paths |> Seq.collect fs.AllDirs |> Seq.append paths
+ | DirectoryMask mask -> paths |> Seq.collect (fs.ScanDirs mask)
+ | Directory d -> paths |> Seq.map (fun dir -> Path.Combine(dir, d)) |> filterDir
+ | FileMask mask -> paths |> Seq.collect (fs.ScanFiles mask)
+ | FileName f -> paths |> Seq.map (fun dir -> Path.Combine(dir, f)) |> filterFile
+ in
+ (startIn, pat) ||> List.fold applyPart
+
+ let ifNone = Option.fold (fun _ -> id)
+
+ /// Implementation of fileset execute
+ /// "Materializes" fileset to a filelist
+ let scan fileSystem root (Fileset (options,filesetItems)) =
+
+ let startDirPat = options.BaseDir |> ifNone root |> Path.parseDir
+ let startDir = startDirPat |> cd fileSystem "."
+
+ // TODO check performance, build function
+ let includes src = [startDir] |> (listFiles fileSystem) >> Seq.append src
+ let excludes src pat =
+ let matchFile = Path.join startDirPat pat |> Path.matchesPattern in
+ src |> Seq.filter (matchFile >> not)
+
+ let folditem i = function
+ | Includes pat -> includes i pat
+ | Excludes pat -> excludes i pat
+
+ filesetItems |> Seq.ofList |> Seq.fold folditem Seq.empty |> Seq.map File.make |> List.ofSeq |> Filelist
+
+ // combines two fileset options
+ let combineOptions (o1:FilesetOptions) (o2:FilesetOptions) =
+ {DefaultOptions with
+ BaseDir =
+ match o1.BaseDir,o2.BaseDir with
+ | Some _, Some _ -> failwith "Cannot combine filesets with basedirs defined in both (not implemented)"
+ | Some _, None -> o1.BaseDir
+ | _ -> o2.BaseDir
+ FailOnEmpty = o1.FailOnEmpty || o2.FailOnEmpty}
+
+ // combines two filesets
+ let combineWith (Fileset (o2, set2)) (Fileset (o1,set1)) = Fileset(combineOptions o1 o2, set1 @ set2)
+
+ // Combines result of reading file to a fileset
+ let combineWithFile map (file:FileInfo) (Fileset (opts,fs)) =
+ let elements = File.ReadAllLines file.FullName |> Array.toList |> List.map map in
+ Fileset (opts, fs @ elements)
+ // TODO filter comments, empty lines? |> Array.filter
+
+ let changeBasedir dir (Fileset (opts,ps)) = Fileset ({opts with BaseDir = Some dir}, ps)
+ let changeFailonEmpty f (Fileset (opts,ps)) = Fileset ({opts with FailOnEmpty = f}, ps)
+
+/// Fileset persistance implementation
+module private PicklerImpl =
+
+ open Pickler
+
+ let filesetoptions =
+ wrap(
+ (fun(foe,bdir) -> {FilesetOptions.FailOnEmpty = foe; BaseDir = bdir}),
+ fun o -> (o.FailOnEmpty, o.BaseDir))
+ (pair bool (option str))
+
+ let filesetElement =
+ alt
+ (function | Includes _ -> 0 | Excludes _ -> 1)
+ [|
+ wrap (Includes, fun (Includes p | OtherwiseFail p) -> p) Path.pickler
+ wrap (Excludes, fun (Excludes p | OtherwiseFail p) -> p) Path.pickler
+ |]
+
+ let fileinfo = wrap(File.make, File.getFullName) str
+
+ let fileset = wrap(Fileset, fun (Fileset (o,l)) -> o,l) (pair filesetoptions (list filesetElement))
+ let filelist = wrap(Filelist, fun (Filelist l) -> l) (list fileinfo)
+
+open Impl
+
+/// Gets the pickler for fileset type
+let filesetPickler = PicklerImpl.fileset
+let filelistPickler = PicklerImpl.filelist
+
+///
+/// Creates a new fileset with default options.
+///
+///
+let ls (filePattern:FilePattern) =
+ // TODO Path.parse is expected to handle trailing slash character
+ let parse = match filePattern.EndsWith ("/") || filePattern.EndsWith ("\\") with | true -> Path.parseDir | _-> Path.parse
+ Fileset (DefaultOptions, [filePattern |> parse |> Includes])
+
+///
+/// Create a file set for specific file mask. The same as "ls"
+///
+let (!!) = ls
- type ListDiffType<'a> = | Added of 'a | Removed of 'a
+///
+/// Defines the empty fileset with a specified base dir.
+///
+///
+let (~+) dir =
+ Fileset ({DefaultOptions with BaseDir = Some dir}, [])
- ///
- /// Compares two file lists and returns differences list.
- ///
- ///
- ///
- let compareFileList (Filelist list1) (Filelist list2) =
+[]
+let parseFileMask = Path.parse
- let setOfNames = List.map File.getFullName >> Set.ofList
+[]
+let parseDirMask = Path.parseDir
- let set1, set2 = setOfNames list1, setOfNames list2
+// let matches filePattern projectRoot
+[]
+let matches = Path.matches
- let removed = Set.difference set1 set2 |> List.ofSeq |> List.map (Removed)
- let added = Set.difference set2 set1 |> List.ofSeq |> List.map (Added)
+let FileSystem = Impl.FileSystem
- removed @ added
+///
+/// "Materializes" fileset to a filelist
+///
+let toFileList = Impl.scan Impl.FileSystem
- ///
- /// Defines various operations on Fieset type.
- ///
- type Fileset with
- static member (+) (fs1, fs2: Fileset) :Fileset = fs1 |> combineWith fs2
- static member (+) (fs1: Fileset, pat) = fs1 ++ pat
- static member (-) (fs1: Fileset, pat) = fs1 -- pat
- static member (@@) (fs1, basedir) = fs1 |> Impl.changeBasedir basedir
- static member (@@) (Fileset (_,lst), options) = Fileset (options,lst)
+///
+/// "Materializes" file mask to a list of files/paths
+///
+let listByMask (root:string) = Impl.listFiles Impl.FileSystem [root]
+
+///
+/// The same as toFileList but allows to provide file system adapter
+///
+let toFileList1 = Impl.scan
+
+type ListDiffType<'a> = | Added of 'a | Removed of 'a
+
+///
+/// Compares two file lists and returns differences list.
+///
+///
+///
+let compareFileList (Filelist list1) (Filelist list2) =
+
+ let setOfNames = List.map File.getFullName >> Set.ofList
+
+ let set1, set2 = setOfNames list1, setOfNames list2
+
+ let removed = Set.difference set1 set2 |> List.ofSeq |> List.map (Removed)
+ let added = Set.difference set2 set1 |> List.ofSeq |> List.map (Added)
+
+ removed @ added
+
+///
+/// Defines various operations on Fieset type.
+///
+type Fileset with
+ static member (+) (fs1, fs2: Fileset) :Fileset = fs1 |> combineWith fs2
+ static member (+) (fs1: Fileset, pat) = fs1 ++ pat
+ static member (-) (fs1: Fileset, pat) = fs1 -- pat
+ static member (@@) (fs1, basedir) = fs1 |> Impl.changeBasedir basedir
+ static member (@@) (Fileset (_,lst), options) = Fileset (options,lst)
- /// Conditional include/exclude operator
- static member (+?) (fs1: Fileset, (condition:bool,pat: FilePattern)) = if condition then fs1 ++ pat else fs1
- static member (+?) (fs1: Fileset, (condition:bool,fs2: Fileset)) :Fileset = if condition then fs1 |> combineWith fs2 else fs1
- static member (-?) (fs1: Fileset, (condition:bool,pat: FilePattern)) = if condition then fs1 -- pat else fs1
+ /// Conditional include/exclude operator
+ static member (+?) (fs1: Fileset, (condition:bool,pat: FilePattern)) = if condition then fs1 ++ pat else fs1
+ static member (+?) (fs1: Fileset, (condition:bool,fs2: Fileset)) :Fileset = if condition then fs1 |> combineWith fs2 else fs1
+ static member (-?) (fs1: Fileset, (condition:bool,pat: FilePattern)) = if condition then fs1 -- pat else fs1
- /// Adds includes pattern to a fileset.
- static member (++) ((Fileset (opts,pts)), includes) :Fileset =
- Fileset (opts, pts @ [includes |> Path.parse |> Includes])
+ /// Adds includes pattern to a fileset.
+ static member (++) ((Fileset (opts,pts)), includes) :Fileset =
+ Fileset (opts, pts @ [includes |> Path.parse |> Includes])
- /// Adds excludes pattern to a fileset.
- static member (--) (Fileset (opts,pts), excludes) =
- Fileset (opts, pts @ [excludes |> Path.parse |> Excludes])
- end
+ /// Adds excludes pattern to a fileset.
+ static member (--) (Fileset (opts,pts), excludes): Fileset =
+ Fileset (opts, pts @ [excludes |> Path.parse |> Excludes])
+end
- (******** builder ********)
- type FilesetBuilder() =
+(******** builder ********)
+type FilesetBuilder() =
- []
- member __.FailOnEmpty(fs,f) = fs |> changeFailonEmpty f
+ []
+ member __.FailOnEmpty(fs,f) = fs |> changeFailonEmpty f
- []
- member __.Basedir(fs,dir) = fs |> changeBasedir dir
+ []
+ member __.Basedir(fs,dir) = fs |> changeBasedir dir
- []
- member __.Includes(fs:Fileset,pattern) = fs ++ pattern
+ []
+ member __.Includes(fs:Fileset,pattern) = fs ++ pattern
- []
- member __.IncludesIf(fs:Fileset,condition, pattern:FilePattern) = fs +? (condition,pattern)
+ []
+ member __.IncludesIf(fs:Fileset,condition, pattern:FilePattern) = fs +? (condition,pattern)
- []
- member __.JoinFileset(fs1, fs2) = fs1 |> Impl.combineWith fs2
+ []
+ member __.JoinFileset(fs1, fs2) = fs1 |> Impl.combineWith fs2
- []
- member __.Excludes(fs:Fileset, pattern) = fs -- pattern
+ []
+ member __.Excludes(fs:Fileset, pattern) = fs -- pattern
- []
- member __.ExcludesIf(fs:Fileset, pattern) = fs -? pattern
+ []
+ member __.ExcludesIf(fs:Fileset, pattern) = fs -? pattern
- []
- member __.IncludeFile(fs, file) = (fs,file) ||> combineWithFile (Path.parse >> Includes)
+ []
+ member __.IncludeFile(fs, file) = (fs,file) ||> combineWithFile (Path.parse >> Includes)
- []
- member __.ExcludeFile(fs,file) = (fs,file) ||> combineWithFile (Path.parse >> Excludes)
+ []
+ member __.ExcludeFile(fs,file) = (fs,file) ||> combineWithFile (Path.parse >> Excludes)
- member __.Yield(()) = Empty
- member __.Return(pattern:FilePattern) = Empty ++ pattern
+ member __.Yield(()) = Empty
+ member __.Return(pattern:FilePattern) = Empty ++ pattern
- member __.Combine(fs1, fs2) = fs1 |> Impl.combineWith fs2
- member __.Delay(f) = f()
- member this.Zero() = this.Yield ( () )
+ member __.Combine(fs1, fs2) = fs1 |> Impl.combineWith fs2
+ member __.Delay(f) = f()
+ member this.Zero() = this.Yield ( () )
- member __.Bind(fs1:Fileset, f) = let fs2 = f() in fs1 |> Impl.combineWith fs2
- member x.For(fs, f) = x.Bind(fs, f)
- member x.Return(a) = x.Yield(a)
+ member __.Bind(fs1:Fileset, f) = let fs2 = f() in fs1 |> Impl.combineWith fs2
+ member x.For(fs, f) = x.Bind(fs, f)
+ member x.Return(a) = x.Yield(a)
- let fileset = FilesetBuilder()
+let fileset = FilesetBuilder()
diff --git a/src/core/Logging.fs b/src/core/Logging.fs
index a8124df..8783867 100644
--- a/src/core/Logging.fs
+++ b/src/core/Logging.fs
@@ -124,9 +124,10 @@ module private ConsoleSink =
let rec loop (progressMessage) =
let wipeProgressMessage () =
let len = progressMessage |> Option.fold (fun _ -> String.length) 0
- // printfn "cleft: %A len: %d" Console.CursorLeft len
- match len - Console.CursorLeft with
- | e when e > 0 -> System.Console.Write (String.replicate e " ")
+ Console.Out.Flush()
+ let cursorLeft = Console.CursorLeft
+ len - cursorLeft |> function
+ | e when e > 0 -> Console.Write (String.replicate e " ")
| _ -> ()
let renderProgress = function
| Some (outputString: string) ->
@@ -141,9 +142,9 @@ module private ConsoleSink =
Console.Write (sprintf "\r[%s] " level)
Console.ForegroundColor <- textColor
- System.Console.Write txt
+ Console.Write txt
wipeProgressMessage()
- System.Console.WriteLine()
+ Console.WriteLine()
async {
let! msg = mbox.Receive()
@@ -152,16 +153,9 @@ module private ConsoleSink =
match level |> levelToColor with
| Some colors ->
// in case of CRLF in the string make sure we washed out the progress message
- let rec writeLines = function
- | [] -> fun _ -> ()
- | (txt: string)::tail ->
- function
- | true ->
- renderLineWithInfo colors (LevelToString level) txt
- do writeLines tail false
- | false -> System.Console.WriteLine txt; do writeLines tail false
-
- writeLines (text.Split('\n') |> List.ofArray) true
+ text.Split('\n') |> Seq.iteri (function
+ | 0 -> renderLineWithInfo colors (LevelToString level)
+ | _ -> System.Console.WriteLine)
renderProgress progressMessage
| _ -> ()
@@ -180,8 +174,12 @@ module private ConsoleSink =
return! loop outputString
| Flush ch ->
+ Console.Write "\r"
wipeProgressMessage()
Console.Write "\r"
+
+ do! Console.Out.FlushAsync() |> Async.AwaitTask
+
ch.Reply ()
return! loop None
@@ -206,9 +204,7 @@ let private ConsoleLoggerBase (write: Level -> string -> unit) maxLevel =
/// Simplistic console logger.
let DumbConsoleLogger =
- ConsoleLoggerBase (
- fun level -> (LevelToString level) |> sprintf "[%s] %s" >> System.Console.WriteLine
- )
+ ConsoleLoggerBase (fun l -> l |> LevelToString |> sprintf "[%s] %s" >> System.Console.WriteLine)
/// Console logger with colors highlighting
let ConsoleLogger =
@@ -217,7 +213,7 @@ let ConsoleLogger =
/// Ensures all logs finished pending output.
let FlushLogs () =
try
- ConsoleSink.po.PostAndReply (ConsoleSink.Flush, 200) |> ignore
+ ConsoleSink.po.PostAndTryAsyncReply (ConsoleSink.Flush, 200) |> Async.RunSynchronously |> ignore
with _ -> ()
/// Draws a progress bar to console log.
@@ -233,9 +229,7 @@ let WriteConsoleProgress =
let CombineLogger (log1 : ILogger) (log2 : ILogger) =
{ new ILogger with
member __.Log level (fmt : Printf.StringFormat<'a, unit>) : 'a =
- let write s =
- log1.Log level "%s" s
- log2.Log level "%s" s
+ let write s = log1.Log level "%s" s; log2.Log level "%s" s
Printf.kprintf write fmt }
///
@@ -254,11 +248,11 @@ let PrefixLogger (prefix:string) (log : ILogger) =
///
///
let parseVerbosity = function
- | "Silent" -> Verbosity.Silent
- | "Quiet" -> Verbosity.Quiet
- | "Normal" -> Verbosity.Normal
- | "Loud" -> Verbosity.Loud
- | "Chatty" -> Verbosity.Chatty
- | "Diag" -> Verbosity.Diag
+ | "Silent" -> Silent
+ | "Quiet" -> Quiet
+ | "Normal" -> Normal
+ | "Loud" -> Loud
+ | "Chatty" -> Chatty
+ | "Diag" -> Diag
| s ->
failwithf "invalid verbosity: %s. Expected one of %s" s "Silent | Quiet | Normal | Loud | Chatty | Diag"
diff --git a/src/core/Path.fs b/src/core/Path.fs
index f85ef95..2ebd4a8 100644
--- a/src/core/Path.fs
+++ b/src/core/Path.fs
@@ -1,258 +1,240 @@
-namespace Xake
+module Xake.Path
open System.IO
open System.Text.RegularExpressions
-module Path =
-
- type Part =
- | FsRoot
- | Parent
- | CurrentDir
- | Disk of string
- | DirectoryMask of string
- | Directory of string
- | Recurse
- | FileMask of string
- | FileName of string
-
- type PathMask = PathMask of Part list
-
- type MatchResult =
- | Matches of (string*string) list
- | Nope
-
- module private impl =
-
- let notNullOrEmpty = System.String.IsNullOrEmpty >> not
-
- let driveRegex = Regex(@"^[A-Za-z]:$", RegexOptions.Compiled)
- let isMask (a:string) = a.IndexOfAny([|'*';'?'|]) >= 0
- let iif fn b c a = match fn a with | true -> b a | _ -> c a
-
- let isRoot = function | FsRoot::_ | Disk _::_ -> true | _ -> false
-
- ///
- /// Normalizes the pattern by resolving parent references and removing \.\
- ///
- let rec normalize = function
- | [] -> []
- | [x] -> [x]
- | x::tail ->
- match x::(normalize tail) with
- | Directory _::Parent::t -> t
- | CurrentDir::t -> t
- | rest -> rest
-
- ///
- /// Maps part of file path to a path part.
- ///
- ///
- let mapPart isLast = function
- | "**" -> Recurse
- | "." -> CurrentDir
- | ".." -> Parent (* works well now with Path.Combine() *)
- | a when a.EndsWith(":") && driveRegex.IsMatch(a) -> Disk(a)
- | a when not isLast -> a |> iif isMask DirectoryMask Directory
- | a -> a |> iif isMask FileMask FileName
-
- let parse isLastPart pattern =
-
- if notNullOrEmpty pattern then
- let parts = pattern.Split([|'\\'; '/'|], System.StringSplitOptions.RemoveEmptyEntries)
- let fsroot = pattern.[0] |> function | '\\' | '/' -> [FsRoot] | _ -> []
- in
- let isLast = isLastPart parts
- fsroot @ (parts |> Array.mapi (isLast >> mapPart) |> List.ofArray)
- |> normalize |> PathMask
- else
- PathMask []
-
- ///
- /// supplementary function for parsing directory
- ///
- let isLastPartForDir _ _ = false
- ///
- /// supplementary function for parsing file
- ///
- let isLastPartForFile (parts:_ array) = (=) (parts.Length-1)
-
- let dirSeparator = string Path.DirectorySeparatorChar
- let partToString =
- function
- | Directory s
- | FileName s
- | DirectoryMask s
- | FileMask s
- -> s
- | Parent -> ".."
- | Part.CurrentDir -> "."
- | Part.Disk d -> d + dirSeparator
- | Part.Recurse -> "**"
- | Part.FsRoot -> dirSeparator
-
-
- module private PicklerImpl =
-
- open Pickler
-
- let patternpart=
- alt(function
- | FsRoot -> 0
- | Parent -> 1
- | Disk _ -> 2
- | DirectoryMask _ -> 3
- | Directory _ -> 4
- | Recurse -> 5
- | FileMask _ -> 6
- | FileName _ -> 7
- | CurrentDir -> 8
- )
- [|
- wrap0 FsRoot
- wrap0 Parent
- wrap (Disk, fun (Disk d | OtherwiseFail d) -> d) str
- wrap (DirectoryMask, fun (DirectoryMask d | OtherwiseFail d) -> d) str
- wrap (Directory, fun (Directory d | OtherwiseFail d) -> d) str
- wrap0 Recurse
- wrap (FileMask, fun (FileMask m | OtherwiseFail m) -> m) str
- wrap (FileName, fun (FileName m | OtherwiseFail m) -> m) str
- wrap0 CurrentDir
- |]
-
- let pattern = wrap(PathMask, fun(PathMask pp) -> pp) (list patternpart)
-
- module internal matchImpl =
-
- let eq s1 s2 = System.StringComparer.OrdinalIgnoreCase.Equals(s1, s2)
-
- let wildcard2regexMap =
- ["**", "(.*)"
- "*", """([^/\\]*)"""
- "?", "([^/\\\\])"
- ".", "\\."; "$", "\\$"; "^", "\\^"; "[", "\\["; "]", "\\]"
- "+", "\\+"; "!", "\\!"; "=", "\\="; "{", "\\{"; "}", "\\}"
- ] |> dict
-
- let wildcardToRegex (m:Match) =
- match m.Groups.Item("tag") with
- | t when not t.Success ->
- match wildcard2regexMap.TryGetValue(m.Value) with
- | true, v -> v
- | _ -> m.Value
- | t -> "(?<" + t.Value + ">"
-
- let normalizeSlashes (pat: string) =
- pat.Replace('\\', '/')
-
- let maskToRegex (pattern:string) =
- let pat = Regex.Replace(pattern |> normalizeSlashes, @"\((?'tag'\w+?)\:|\*\*|([*.?$^+!={}])", wildcardToRegex)
- // TODO mask with sq brackets
- let ignoreCase = if Env.isUnix then RegexOptions.None else RegexOptions.IgnoreCase
- in
- Regex(@"^" + pat + "$", RegexOptions.Compiled + ignoreCase)
-
- let matchPart (mask:Part) (path:Part) =
- let matchByMask (rx:Regex) value = rx.Match(value).Success
- match mask,path with
- | (FsRoot, FsRoot) -> true
- | (Disk mask, Disk d) | (Directory mask, Directory d) | FileName mask, FileName d when eq mask d -> true
-
- | DirectoryMask mask, Directory d | FileMask mask, FileName d ->
- matchByMask (maskToRegex mask) d
+type Part =
+ | FsRoot
+ | Parent
+ | CurrentDir
+ | Disk of string
+ | DirectoryMask of string
+ | Directory of string
+ | Recurse
+ | FileMask of string
+ | FileName of string
- | _ -> false
+type PathMask = PathMask of Part list
- let rec matchPaths (mask:Part list) (p:Part list) =
- match mask,p with
- | [], [] -> true
- | [], _ | _, [] -> false
+type MatchResult =
+ | Matches of (string*string) list
+ | Nope
- | Directory _::Recurse::Parent::ms, _ -> matchPaths (Recurse::ms) p
- | Recurse::Parent::ms, _ -> matchPaths (Recurse::ms) p // ignore parent ref
+module private impl =
- | Recurse::ms, (FileName _)::_ -> matchPaths ms p
- | Recurse::ms, Directory _::xs -> (matchPaths mask xs) || (matchPaths ms p)
- | m::ms, x::xs ->
- matchPart m x && matchPaths ms xs
+ let notNullOrEmpty = System.String.IsNullOrEmpty >> not
- // API
- let pickler = PicklerImpl.pattern
+ let driveRegex = Regex(@"^[A-Za-z]:$", RegexOptions.Compiled)
+ let isMask (a:string) = a.IndexOfAny([|'*';'?'|]) >= 0
+ let iif fn b c a = match fn a with | true -> b a | _ -> c a
+
+ let isRoot = function | FsRoot::_ | Disk _::_ -> true | _ -> false
///
- /// Converts path to string representation (platform specific).
+ /// Normalizes the pattern by resolving parent references and removing \.\
///
- let toString =
- List.map impl.partToString
- >> List.fold (fun s ps -> Path.Combine (s, ps)) ""
+ let rec normalize = function
+ | [] -> []
+ | [x] -> [x]
+ | x::tail ->
+ match x::(normalize tail) with
+ | Directory _::Parent::t -> t
+ | CurrentDir::t -> t
+ | rest -> rest
///
- /// Joins two patterns.
+ /// Maps part of file path to a path part.
///
- ///
- ///
- let join (PathMask p1) (PathMask p2) =
- match impl.isRoot p2 with
- | true -> PathMask p2
- | _ -> p1 @ p2 |> impl.normalize |> PathMask
+ ///
+ let mapPart isLast = function
+ | "**" -> Recurse
+ | "." -> CurrentDir
+ | ".." -> Parent (* works well now with Path.Combine() *)
+ | a when a.EndsWith(":") && driveRegex.IsMatch(a) -> Disk(a)
+ | a when not isLast -> a |> iif isMask DirectoryMask Directory
+ | a -> a |> iif isMask FileMask FileName
+
+ let parse isLastPart pattern =
+
+ if notNullOrEmpty pattern then
+ let parts = pattern.Split([|'\\'; '/'|], System.StringSplitOptions.RemoveEmptyEntries)
+ let fsroot = pattern.[0] |> function | '\\' | '/' -> [FsRoot] | _ -> []
+ in
+ let isLast = isLastPart parts
+ fsroot @ (parts |> Array.mapi (isLast >> mapPart) |> List.ofArray)
+ |> normalize |> PathMask
+ else
+ PathMask []
///
- /// Converts Ant-style file pattern to a list of parts. Assumes the path specified
+ /// supplementary function for parsing directory
///
- let parseDir = impl.parse impl.isLastPartForDir
-
+ let isLastPartForDir _ _ = false
///
- /// Converts Ant-style file pattern to a PathMask.
+ /// supplementary function for parsing file
///
- let parse = impl.parse impl.isLastPartForFile
+ let isLastPartForFile (parts:_ array) = (=) (parts.Length-1)
+
+ let dirSeparator = string Path.DirectorySeparatorChar
+ let partToString =
+ function
+ | Directory s
+ | FileName s
+ | DirectoryMask s
+ | FileMask s
+ -> s
+ | Parent -> ".."
+ | Part.CurrentDir -> "."
+ | Part.Disk d -> d + dirSeparator
+ | Part.Recurse -> "**"
+ | Part.FsRoot -> dirSeparator
+
+
+module private PicklerImpl =
+
+ open Pickler
+
+ let patternpart=
+ alt(function
+ | FsRoot -> 0
+ | Parent -> 1
+ | Disk _ -> 2
+ | DirectoryMask _ -> 3
+ | Directory _ -> 4
+ | Recurse -> 5
+ | FileMask _ -> 6
+ | FileName _ -> 7
+ | CurrentDir -> 8
+ )
+ [|
+ wrap0 FsRoot
+ wrap0 Parent
+ wrap (Disk, fun (Disk d | OtherwiseFail d) -> d) str
+ wrap (DirectoryMask, fun (DirectoryMask d | OtherwiseFail d) -> d) str
+ wrap (Directory, fun (Directory d | OtherwiseFail d) -> d) str
+ wrap0 Recurse
+ wrap (FileMask, fun (FileMask m | OtherwiseFail m) -> m) str
+ wrap (FileName, fun (FileName m | OtherwiseFail m) -> m) str
+ wrap0 CurrentDir
+ |]
+
+ let pattern = wrap(PathMask, fun(PathMask pp) -> pp) (list patternpart)
+
+module internal matchImpl =
+
+ let eq s1 s2 = System.StringComparer.OrdinalIgnoreCase.Equals(s1, s2)
+
+ let wildcard2regexMap =
+ ["**", "(.*)"
+ "*", """([^/\\]*)"""
+ "?", "([^/\\\\])"
+ ".", "\\."; "$", "\\$"; "^", "\\^"; "[", "\\["; "]", "\\]"
+ "+", "\\+"; "!", "\\!"; "=", "\\="; "{", "\\{"; "}", "\\}"
+ ] |> dict
+
+ let wildcardToRegex (m:Match) =
+ match m.Groups.Item("tag") with
+ | t when not t.Success ->
+ match wildcard2regexMap.TryGetValue(m.Value) with
+ | true, v -> v
+ | _ -> m.Value
+ | t -> "(?<" + t.Value + ">"
+
+ let normalizeSlashes (pat: string) =
+ pat.Replace('\\', '/')
+
+ let maskToRegex (pattern:string) =
+ let pat = Regex.Replace(pattern |> normalizeSlashes, @"\((?'tag'\w+?)\:|\*\*|([*.?$^+!={}])", wildcardToRegex)
+ // TODO mask with sq brackets
+ let ignoreCase = if Env.isUnix then RegexOptions.None else RegexOptions.IgnoreCase
+ in
+ Regex(@"^" + pat + "$", RegexOptions.Compiled + ignoreCase)
+
+ let matchPart (mask:Part) (path:Part) =
+ let matchByMask (rx:Regex) value = rx.Match(value).Success
+ match mask,path with
+ | (FsRoot, FsRoot) -> true
+ | (Disk mask, Disk d) | (Directory mask, Directory d) | FileName mask, FileName d when eq mask d -> true
+
+ | DirectoryMask mask, Directory d | FileMask mask, FileName d ->
+ matchByMask (maskToRegex mask) d
+
+ | _ -> false
+
+ let rec matchPaths (mask:Part list) (p:Part list) =
+ match mask,p with
+ | [], [] -> true
+ | [], _ | _, [] -> false
+
+ | Directory _::Recurse::Parent::ms, _ -> matchPaths (Recurse::ms) p
+ | Recurse::Parent::ms, _ -> matchPaths (Recurse::ms) p // ignore parent ref
+
+ | Recurse::ms, (FileName _)::_ -> matchPaths ms p
+ | Recurse::ms, Directory _::xs -> (matchPaths mask xs) || (matchPaths ms p)
+ | m::ms, x::xs ->
+ matchPart m x && matchPaths ms xs
+
+// API
+let pickler = PicklerImpl.pattern
+
+///
+/// Converts path to string representation (platform specific).
+///
+let toString =
+ List.map impl.partToString
+ >> List.fold (fun s ps -> Path.Combine (s, ps)) ""
+
+///
+/// Joins two patterns.
+///
+///
+///
+let join (PathMask p1) (PathMask p2) =
+ match impl.isRoot p2 with
+ | true -> PathMask p2
+ | _ -> p1 @ p2 |> impl.normalize |> PathMask
+
+///
+/// Converts Ant-style file pattern to a list of parts. Assumes the path specified
+///
+let parseDir = impl.parse impl.isLastPartForDir
+
+///
+/// Converts Ant-style file pattern to a PathMask.
+///
+let parse = impl.parse impl.isLastPartForFile
(*
- ///
- /// Returns true if a file name (parsed to p) matches specific file mask.
- ///
- ///
- ///
- let matchesPattern (pattern:string) =
-
- let regex = matchImpl.maskToRegex pattern
- fun file -> regex.Match(matchImpl.normalizeSlashes file).Success
+///
+/// Returns true if a file name (parsed to p) matches specific file mask.
+///
+///
+///
+let matchesPattern (pattern:string) =
+
+ let regex = matchImpl.maskToRegex pattern
+ fun file -> regex.Match(matchImpl.normalizeSlashes file).Success
*)
- let matchesPattern (PathMask mask) file =
- let (PathMask fileParts) = file |> impl.parse impl.isLastPartForFile in
- matchImpl.matchPaths mask fileParts
-
- let matches filePattern rootPath =
- // IDEA: make relative path then match to pattern?
- // matches "src/**/*.cs" "c:\!\src\a\b\c.cs" -> true
-
- matchesPattern <| join (parseDir rootPath) (parse filePattern)
-
- /// file name match implementation for rules
- let matchGroups (pattern:string) rootPath =
-
- let regex = Path.Combine(rootPath, pattern) |> matchImpl.maskToRegex
- fun file ->
- let m = regex.Match(matchImpl.normalizeSlashes file)
- if m.Success then
- [for groupName in regex.GetGroupNames() do
- let group = m.Groups.[groupName]
- yield groupName, group.Value] |> Some
- else
- None
-
-[]
-module PathExt =
- ///
- /// Changes or appends file extension.
- ///
- let (-.) path ext = Path.ChangeExtension(path, ext)
+let matchesPattern (PathMask mask) file =
+ let (PathMask fileParts) = file |> impl.parse impl.isLastPartForFile in
+ matchImpl.matchPaths mask fileParts
- ///
- /// Combines two paths.
- ///
- let (>) path1 path2 = Path.Combine(path1, path2)
+let matches filePattern rootPath =
+ // IDEA: make relative path then match to pattern?
+ // matches "src/**/*.cs" "c:\!\src\a\b\c.cs" -> true
+
+ matchesPattern <| join (parseDir rootPath) (parse filePattern)
+
+/// file name match implementation for rules
+let matchGroups (pattern:string) rootPath =
+
+ let regex = Path.Combine(rootPath, pattern) |> matchImpl.maskToRegex
+ fun file ->
+ let m = regex.Match(matchImpl.normalizeSlashes file)
+ if m.Success then
+ [for groupName in regex.GetGroupNames() do
+ let group = m.Groups.[groupName]
+ yield groupName, group.Value] |> Some
+ else
+ None
- ///
- /// Appends the file extension.
- ///
- let (<.>) path ext = if System.String.IsNullOrWhiteSpace(ext) then path else path + "." + ext
diff --git a/src/core/PathExt.fs b/src/core/PathExt.fs
new file mode 100644
index 0000000..62d64e6
--- /dev/null
+++ b/src/core/PathExt.fs
@@ -0,0 +1,19 @@
+[]
+module Xake.PathExt
+
+open System.IO
+
+///
+/// Changes or appends file extension.
+///
+let (-.) path ext = Path.ChangeExtension(path, ext)
+
+///
+/// Combines two paths.
+///
+let (>) path1 path2 = Path.Combine(path1, path2)
+
+///
+/// Appends the file extension.
+///
+let (<.>) path ext = if System.String.IsNullOrWhiteSpace(ext) then path else path + "." + ext
diff --git a/src/core/Pickler.fs b/src/core/Pickler.fs
index c6560d8..092c33b 100644
--- a/src/core/Pickler.fs
+++ b/src/core/Pickler.fs
@@ -1,91 +1,89 @@
-namespace Xake
+module Xake.Pickler
open System
/// Pickler Combinators implementation
-module Pickler =
+type OutState = IO.BinaryWriter
+type InState = IO.BinaryReader
- type OutState = System.IO.BinaryWriter
- type InState = System.IO.BinaryReader
+///
+/// Main pickler type.
+///
+type 'a PU = { pickle: 'a -> OutState -> unit; unpickle: InState -> 'a }
- ///
- /// Main pickler type.
- ///
- type 'a PU = { pickle: 'a -> OutState -> unit; unpickle: InState -> 'a }
+///
+/// Unit pickler, does nothing.
+///
+let unit = {pickle = (fun () _ -> ()); unpickle = ignore}
- ///
- /// Unit pickler, does nothing.
- ///
- let unit = {pickle = (fun () _ -> ()); unpickle = ignore}
+///
+/// Translates pickler of one type into another's
+///
+let wrap (d:'a -> 'b, r: 'b -> 'a) (pu: PU<'a>) = {pickle = r >> pu.pickle; unpickle = pu.unpickle >> d}
- ///
- /// Translates pickler of one type into another's
- ///
- let wrap (d:'a -> 'b, r: 'b -> 'a) (pu: PU<'a>) = {pickle = r >> pu.pickle; unpickle = pu.unpickle >> d}
+///
+/// 'wrap' helper for argumentless variants
+///
+let wrap0 r = wrap ((fun () -> r), ignore) unit
- ///
- /// 'wrap' helper for argumentless variants
- ///
- let wrap0 r = wrap ((fun () -> r), ignore) unit
+let byte = {pickle = (fun (b:byte) st -> st.Write(b)); unpickle = fun st -> st.ReadByte()}
+let int = {pickle = (fun (i:Int32) st -> st.Write(i)); unpickle = fun st -> st.ReadInt32()}
+let int64 = {pickle = (fun (i:Int64) st -> st.Write(i)); unpickle = fun st -> st.ReadInt64()}
+let str = {pickle = (fun (s:string) st -> st.Write(s)); unpickle = fun st -> st.ReadString()}
+let float = {pickle = (fun (s:single) st -> st.Write(s)); unpickle = fun st -> st.ReadSingle()}
+let double = {pickle = (fun (s:float) st -> st.Write(s)); unpickle = fun st -> st.ReadDouble()}
- let byte = {pickle = (fun (b:byte) st -> st.Write(b)); unpickle = fun st -> st.ReadByte()}
- let int = {pickle = (fun (i:Int32) st -> st.Write(i)); unpickle = fun st -> st.ReadInt32()}
- let int64 = {pickle = (fun (i:Int64) st -> st.Write(i)); unpickle = fun st -> st.ReadInt64()}
- let str = {pickle = (fun (s:string) st -> st.Write(s)); unpickle = fun st -> st.ReadString()}
- let float = {pickle = (fun (s:single) st -> st.Write(s)); unpickle = fun st -> st.ReadSingle()}
- let double = {pickle = (fun (s:float) st -> st.Write(s)); unpickle = fun st -> st.ReadDouble()}
+let date = wrap (DateTime.FromBinary, fun (d:DateTime) -> d.Ticks) int64
+let bool = wrap ((<>) 0uy, function | true -> 1uy |false -> 0uy) byte
- let date = wrap (DateTime.FromBinary, fun (d:DateTime) -> d.Ticks) int64
- let bool = wrap ((<>) 0uy, function | true -> 1uy |false -> 0uy) byte
+/// Tuple picklers
+let pair pu1 pu2 = {
+ pickle = (fun (a,b) st -> (pu1.pickle a st : unit); (pu2.pickle b st))
+ unpickle = fun st -> pu1.unpickle st, pu2.unpickle st}
+let triple pu1 pu2 pu3 = {
+ pickle = (fun (a,b,c) st -> (pu1.pickle a st : unit); (pu2.pickle b st : unit); (pu3.pickle c st))
+ unpickle = fun st -> pu1.unpickle st, pu2.unpickle st, pu3.unpickle st}
- /// Tuple picklers
- let pair pu1 pu2 = {
- pickle = (fun (a,b) st -> (pu1.pickle a st : unit); (pu2.pickle b st))
- unpickle = fun st -> pu1.unpickle st, pu2.unpickle st}
- let triple pu1 pu2 pu3 = {
- pickle = (fun (a,b,c) st -> (pu1.pickle a st : unit); (pu2.pickle b st : unit); (pu3.pickle c st))
- unpickle = fun st -> pu1.unpickle st, pu2.unpickle st, pu3.unpickle st}
+let quad pu1 pu2 pu3 pu4 =
+ wrap ((fun ((a,b),(c,d)) -> (a,b,c,d)), fun(a,b,c,d) -> (a,b),(c,d)) <| pair (pair pu1 pu2) (pair pu3 pu4)
- let quad pu1 pu2 pu3 pu4 =
- wrap ((fun ((a,b),(c,d)) -> (a,b,c,d)), fun(a,b,c,d) -> (a,b),(c,d)) <| pair (pair pu1 pu2) (pair pu3 pu4)
+let private mux3 (a,b,c) x = (a x : unit); (b x : unit); (c x : unit)
+let private mux2 (a,b) x = (a x : unit); (b x : unit)
- let private mux3 (a,b,c) x = (a x : unit); (b x : unit); (c x : unit)
- let private mux2 (a,b) x = (a x : unit); (b x : unit)
+///
+/// List pickler.
+///
+///
+let list pu =
+ let rec listP f = function | [] -> byte.pickle 0uy | h :: t -> mux3 (byte.pickle 1uy, f h, listP f t)
+ let rec listUim f acc st = match byte.unpickle st with | 0uy -> List.rev acc | 1uy -> listUim f (f st :: acc) st | n -> failwithf "listU: found number %d" n
+ {
+ pickle = listP pu.pickle
+ unpickle = listUim pu.unpickle []
+ }
- ///
- /// List pickler.
- ///
- ///
- let list pu =
- let rec listP f = function | [] -> byte.pickle 0uy | h :: t -> mux3 (byte.pickle 1uy, f h, listP f t)
- let rec listUim f acc st = match byte.unpickle st with | 0uy -> List.rev acc | 1uy -> listUim f (f st :: acc) st | n -> failwithf "listU: found number %d" n
- {
- pickle = listP pu.pickle
- unpickle = listUim pu.unpickle []
- }
+///
+/// Variant (discriminated union) pickler.
+///
+/// Maps type to index in array of picklers.
+/// Array of picklers for each type.
+let alt<'a> (ftag: 'a -> Core.int) (puu: PU<'a> array): PU<'a> =
+ {
+ pickle = fun (a:'a) ->
+ let tag = ftag a in
+ mux2 (tag |> Convert.ToByte |> byte.pickle, puu.[tag].pickle a)
+ unpickle = fun st ->
+ let tag = st |> byte.unpickle |> Convert.ToInt32 in
+ (puu.[tag].unpickle st)
+ }
- ///
- /// Variant (discriminated union) pickler.
- ///
- /// Maps type to index in array of picklers.
- /// Array of picklers for each type.
- let alt<'a> (ftag: 'a -> Core.int) (puu: PU<'a> array): PU<'a> =
- {
- pickle = fun (a:'a) ->
- let tag = ftag a in
- mux2 (tag |> Convert.ToByte |> byte.pickle, puu.[tag].pickle a)
- unpickle = fun st ->
- let tag = st |> byte.unpickle |> Convert.ToInt32 in
- (puu.[tag].unpickle st)
- }
-
- ///
- /// Option type pickler.
- ///
- let option pu =
- alt
- (function | None _ -> 0 | Some _ -> 1)
- [|
- wrap ((fun () -> None), ignore) unit
- wrap (Some, Option.get) pu
- |]
+///
+/// Option type pickler.
+///
+let option pu =
+ alt
+ (function | None -> 0 | Some _ -> 1)
+ [|
+ wrap ((fun () -> None), ignore) unit
+ wrap (Some, Option.get) pu
+ |]
diff --git a/src/core/ProcessExec.fs b/src/core/ProcessExec.fs
index 5e53c39..0a53dd4 100644
--- a/src/core/ProcessExec.fs
+++ b/src/core/ProcessExec.fs
@@ -1,42 +1,41 @@
// common tasks
-namespace Xake
-
-module internal ProcessExec =
- open System.Diagnostics
-
- // internal implementation
- let pexec handleStd handleErr cmd args (envvars:(string * string) list) workDir =
- let pinfo =
- ProcessStartInfo
- (cmd, args,
- UseShellExecute = false, WindowStyle = ProcessWindowStyle.Hidden,
- RedirectStandardError = true, RedirectStandardOutput = true)
-
- for name,value in envvars do
- pinfo.EnvironmentVariables.[name] <- value
-
- match workDir with
- | Some path -> pinfo.WorkingDirectory <- path
- | _ -> ()
-
- let proc = new Process(StartInfo = pinfo)
-
- proc.ErrorDataReceived.Add(fun e -> if e.Data <> null then handleErr e.Data)
- proc.OutputDataReceived.Add(fun e -> if e.Data <> null then handleStd e.Data)
-
- do proc.Start() |> ignore
-
- do proc.BeginOutputReadLine()
- do proc.BeginErrorReadLine()
-
- // task might be completed by that time
- Async.RunSynchronously <|
- async {
- do! Async.Sleep 50
- if proc.HasExited then
- return proc.ExitCode
- else
- proc.EnableRaisingEvents <- true
- do! Async.AwaitEvent proc.Exited |> Async.Ignore
- return proc.ExitCode
- }
+module internal Xake.ProcessExec
+
+open System.Diagnostics
+
+// internal implementation
+let pexec handleStd handleErr cmd args (envvars:(string * string) list) workDir =
+ let pinfo =
+ ProcessStartInfo
+ (cmd, args,
+ UseShellExecute = false, WindowStyle = ProcessWindowStyle.Hidden,
+ RedirectStandardError = true, RedirectStandardOutput = true)
+
+ for name,value in envvars do
+ pinfo.EnvironmentVariables.[name] <- value
+
+ match workDir with
+ | Some path -> pinfo.WorkingDirectory <- path
+ | _ -> ()
+
+ let proc = new Process(StartInfo = pinfo)
+
+ proc.ErrorDataReceived.Add(fun e -> if e.Data <> null then handleErr e.Data)
+ proc.OutputDataReceived.Add(fun e -> if e.Data <> null then handleStd e.Data)
+
+ do proc.Start() |> ignore
+
+ do proc.BeginOutputReadLine()
+ do proc.BeginErrorReadLine()
+
+ // task might be completed by that time
+ Async.RunSynchronously <|
+ async {
+ do! Async.Sleep 50
+ if proc.HasExited then
+ return proc.ExitCode
+ else
+ proc.EnableRaisingEvents <- true
+ do! Async.AwaitEvent proc.Exited |> Async.Ignore
+ return proc.ExitCode
+ }
diff --git a/src/core/Program.fs b/src/core/Program.fs
index 887d0bc..a9ebf89 100644
--- a/src/core/Program.fs
+++ b/src/core/Program.fs
@@ -21,7 +21,7 @@ module internal ParseArgs = begin
| "-h" | "/h" | "--help" | "/help" | "/?" | "-?" ->
printf """
Usage:
- fsi