diff --git a/project/Docs.scala b/project/Docs.scala index aa54e40c4..ff2a42605 100644 --- a/project/Docs.scala +++ b/project/Docs.scala @@ -20,7 +20,8 @@ object Docs { // - src/nanoc/nanoc.yaml // - src/reference/template.properties lazy val targetSbtBinaryVersion = "1.x" - lazy val targetSbtFullVersion = "1.0.4" + lazy val targetSbtFullVersion = "1.1.0" + lazy val siteEmail = settingKey[String]("") val isGenerateSiteMap = settingKey[Boolean]("generates site map or not") @@ -247,6 +248,7 @@ object Docs { val syncLocalImpl = Def.task { // sync the generated site val repo = ghpagesUpdatedRepository.value + val versioned = repo / targetSbtBinaryVersion val git = GitKeys.gitRunner.value val s = streams.value diff --git a/project/build.properties b/project/build.properties index 394cb75cf..8b697bbb9 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.0.4 +sbt.version=1.1.0 diff --git a/src/nanoc/nanoc.yaml b/src/nanoc/nanoc.yaml index 5049a56f2..47b2b357f 100644 --- a/src/nanoc/nanoc.yaml +++ b/src/nanoc/nanoc.yaml @@ -1,7 +1,6 @@ -sbtVersion: 1.0.4 - +sbtVersion: 1.1.0 # Windows build version tends to be different from the rest -windowsBuild: 1.0.4 +windowsBuild: 1.1.0 sbtVersionForScalaDoc: 1.0.4 diff --git a/src/reference/00-Getting-Started/07A-Scopes.md b/src/reference/00-Getting-Started/07A-Scopes.md index f2aa33476..4e9e1e8ac 100644 --- a/src/reference/00-Getting-Started/07A-Scopes.md +++ b/src/reference/00-Getting-Started/07A-Scopes.md @@ -69,15 +69,17 @@ Similarly, a full scope in sbt is formed by a **tuple** of a subproject, a configuration, and a task value: ```scala -scalacOptions in (projA, Compile, console) +projA / Compile / console / scalacOptions ``` -To be more precise, it actually looks like this: +Which is the slash syntax, introduced in sbt 1.1, for: ```scala -scalacOptions in (Select(projA: Reference), - Select(Compile: ConfigKey), - Select(console.key)) +scalacOptions in ( + Select(projA: Reference), + Select(Compile: ConfigKey), + Select(console.key) +) ``` #### Scoping by the subproject axis @@ -134,21 +136,21 @@ The various tasks that build a package (`packageSrc`, `packageBin`, and `packageOptions`. Those keys can have distinct values for each packaging task. -#### Global scope component +#### Zero scope component -Each scope axis can be filled in with an instance of the axis type (for -example the task axis can be filled in with a task), or the axis can be -filled in with the special value `Global`, which is also written as `*`. So we can think of `Global` as `None`. +Each scope axis can be filled in with an instance of the axis type (analogous to `Some(_)`), +or the axis can be filled in with the special value `Zero`. +So we can think of `Zero` as `None`. -`*` is a universal fallback for all scope axes, +`Zero` is a universal fallback for all scope axes, but its direct use should be reserved to sbt and plugin authors in most cases. -To make the matter confusing, `someKey in Global` appearing in build definition implicitly converts to `someKey in (Global, Global, Global)`. +`Global` is a scope that sets `Zero` to all axes: `Zero / Zero / Zero`. In other words, `Global / someKey` is a shorthand for `Zero / Zero / Zero / someKey`. ### Referring to scopes in a build definition If you create a setting in `build.sbt` with a bare key, it will be scoped -to (current subproject, configuration `Global`, task `Global`): +to (current subproject / configuration `Zero` / task `Zero`): ```scala lazy val root = (project in file(".")) @@ -158,56 +160,42 @@ lazy val root = (project in file(".")) ``` Run sbt and `inspect name` to see that it's provided by -`{file:/home/hp/checkout/hello/}default-aea33a/*:name`, that is, the -project is `{file:/home/hp/checkout/hello/}default-aea33a`, the -configuration is `*` (means `Global`), and the task is not shown (which -also means `Global`). +`ProjectRef(uri("file:/private/tmp/hello/"), "root") / name`, that is, the +project is `ProjectRef(uri("file:/Users/xxx/hello/"), "root")`, and +neither configuration nor task scope are shown (which means `Zero`). A bare key on the right hand side is also scoped to -(current subproject, configuration `Global`, task `Global`): +(current subproject / configuration `Zero` / task `Zero`): -``` -organization := name.value -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scopes/build.sbt) { #unscoped } -Keys have an overloaded method called `.in` that is used to set the scope. -The argument to `.in(...)` can be an instance of any of the scope axes. So for -example, though there's no real reason to do this, you could set the -`name` scoped to the `Compile` configuration: +The types of any of the scope axes have been method enriched to have a `/` operator. +The argument to `/` can be a key or another scope axis. So for +example, though there's no good reason to do this, you could have an instance of the +`name` key scoped to the `Compile` configuration: -```scala -name in Compile := "hello" -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scopes/build.sbt) { #confScoped } or you could set the name scoped to the `packageBin` task (pointless! just an example): -```scala -name in packageBin := "hello" -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scopes/build.sbt) { #taskScoped } or you could set the `name` with multiple scope axes, for example in the `packageBin` task in the `Compile` configuration: -```scala -name in (Compile, packageBin) := "hello" -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scopes/build.sbt) { #confAndTaskScoped } -or you could use `Global` for all axes: +or you could use `Global`: -```scala -// same as concurrentRestrictions in (Global, Global, Global) -concurrentRestrictions in Global := Seq( - Tags.limitAll(1) -) -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scopes/build.sbt) { #global } -(`concurrentRestrictions in Global` implicitly converts to -`concurrentRestrictions in (Global, Global, Global)`, setting -all axes to `Global` scope component; the task and configuration are already -`Global` by default, so here the effect is to make the project `Global`, -that is, define `*/*:concurrentRestrictions` rather than -`{file:/home/hp/checkout/hello/}default-aea33a/*:concurrentRestrictions`) +(`Global / concurrentRestrictions` implicitly converts to +`Zero / Zero / Zero / concurrentRestrictions`, setting +all axes to `Zero` scope component; the task and configuration are already +`Zero` by default, so here the effect is to make the project `Zero`, +that is, define `Zero / Zero / Zero / concurrentRestrictions` rather than +`ProjectRef(uri("file:/tmp/hello/"), "root") / Zero / Zero / concurrentRestrictions`) ### Referring to scoped keys from the sbt shell @@ -215,16 +203,15 @@ On the command line and in the sbt shell, sbt displays (and parses) scoped keys like this: ``` -{}/config:intask::key +ref / Config / intask / key ``` -- `{}` identifies the subproject axis. The - `` part will be missing if the subproject axis has "entire build" scope. -- `config` identifies the configuration axis. +- `ref` identifies the subproject axis. It could be ``, `ProjectRef(uri("file:..."), "id")`, or `ThisBuild` that denotes the "entire build" scope. +- `Config` identifies the configuration axis using the capitalized Scala identifier. - `intask` identifies the task axis. - `key` identifies the key being scoped. -`*` can appear for each axis, referring to the `Global` scope. +`Zero` can appear for each axis. If you omit part of the scoped key, it will be inferred as follows: @@ -239,26 +226,23 @@ For more details, see [Interacting with the Configuration System][Inspecting-Set - `fullClasspath` specifies just a key, so the default scopes are used: current project, a key-dependent configuration, and global task scope. -- `test:fullClasspath` specifies the configuration, so this is - `fullClasspath` in the `test` configuration, with defaults for the other +- `Test / fullClasspath` specifies the configuration, so this is + `fullClasspath` in the `Test` configuration, with defaults for the other two scope axes. -- `*:fullClasspath` specifies `Global` for the configuration, rather than - the default configuration. -- `doc::fullClasspath` specifies the `fullClasspath` key scoped to the `doc` +- `root / fullClasspath` specifies the project `root`, where the project is + identified with the project id. +- `root / Zero / fullClasspath` specified the project `root`, and + specifies `Zero` for the configuration, rather than the default configuration. +- `doc / fullClasspath` specifies the `fullClasspath` key scoped to the `doc` task, with the defaults for the project and configuration axes. -- `{file:/home/hp/checkout/hello/}default-aea33a/test:fullClasspath` - specifies a project, `{file:/home/hp/checkout/hello/}default-aea33a`, - where the project is identified with the build - `{file:/home/hp/checkout/hello/}` and then a project id inside that - build `default-aea33a`. Also specifies configuration `test`, but leaves - the default task axis. -- `{file:/home/hp/checkout/hello/}/test:fullClasspath` sets the project - axis to "entire build" where the build is - `{file:/home/hp/checkout/hello/}`. -- `{.}/test:fullClasspath` sets the project axis to "entire build" where - the build is `{.}`. `{.}` can be written `ThisBuild` in Scala code. -- `{file:/home/hp/checkout/hello/}/compile:doc::fullClasspath` sets all - three scope axes. +- `ProjectRef(uri("file:/tmp/hello/"), "root") / Test / fullClasspath` + specifies a project `ProjectRef(uri("file:/tmp/hello/"), "root")`. + Also specifies configurtion Test, leaves the default task axis. +- `ThisBuild / version` sets the subproject axis to "entire build" where + the build is `ThisBuild`, with the default configuration. +- `Zero / fullClasspath` sets the subproject axis to `Zero`, + with the default configuration. +- `root / Compile / doc / fullClasspath` sets all three scope axes. ### Inspecting scopes @@ -267,38 +251,36 @@ keys and their scopes. Try `inspect test:fullClasspath`: ``` \$ sbt -> inspect test:fullClasspath -[info] Task: scala.collection.Seq[sbt.Attributed[java.io.File]] +sbt:Hello> inspect Test / fullClasspath +[info] Task: scala.collection.Seq[sbt.internal.util.Attributed[java.io.File]] [info] Description: [info] The exported classpath, consisting of build products and unmanaged and managed, internal and external dependencies. [info] Provided by: -[info] {file:/home/hp/checkout/hello/}default-aea33a/test:fullClasspath +[info] ProjectRef(uri("file:/tmp/hello/"), "root") / Test / fullClasspath +[info] Defined at: +[info] (sbt.Classpaths.classpaths) Defaults.scala:1639 [info] Dependencies: -[info] test:exportedProducts -[info] test:dependencyClasspath +[info] Test / dependencyClasspath +[info] Test / exportedProducts +[info] Test / fullClasspath / streams [info] Reverse dependencies: -[info] test:runMain -[info] test:run -[info] test:testLoader -[info] test:console +[info] Test / testLoader [info] Delegates: -[info] test:fullClasspath -[info] runtime:fullClasspath -[info] compile:fullClasspath -[info] *:fullClasspath -[info] {.}/test:fullClasspath -[info] {.}/runtime:fullClasspath -[info] {.}/compile:fullClasspath -[info] {.}/*:fullClasspath -[info] */test:fullClasspath -[info] */runtime:fullClasspath -[info] */compile:fullClasspath -[info] */*:fullClasspath +[info] Test / fullClasspath +[info] Runtime / fullClasspath +[info] Compile / fullClasspath +[info] fullClasspath +[info] ThisBuild / Test / fullClasspath +[info] ThisBuild / Runtime / fullClasspath +[info] ThisBuild / Compile / fullClasspath +[info] ThisBuild / fullClasspath +[info] Zero / Test / fullClasspath +[info] Zero / Runtime / fullClasspath +[info] Zero / Compile / fullClasspath +[info] Global / fullClasspath [info] Related: -[info] compile:fullClasspath -[info] compile:fullClasspath(for doc) -[info] test:fullClasspath(for doc) -[info] runtime:fullClasspath +[info] Compile / fullClasspath +[info] Runtime / fullClasspath ``` On the first line, you can see this is a task (as opposed to a setting, @@ -308,22 +290,22 @@ resulting from the task will have type "Provided by" points you to the scoped key that defines the value, in this case -`{file:/home/hp/checkout/hello/}default-aea33a/test:fullClasspath` (which -is the `fullClasspath` key scoped to the `test` configuration and the -`{file:/home/hp/checkout/hello/}default-aea33a` project). +`ProjectRef(uri("file:/tmp/hello/"), "root") / Test / fullClasspath` (which +is the `fullClasspath` key scoped to the `Test` configuration and the +`ProjectRef(uri("file:/tmp/hello/"), "root")` project). "Dependencies" was discussed in detail in the [previous page][Task-Graph]. We'll discuss "Delegates" later. Try `inspect fullClasspath` (as opposed to the above example, -inspect `test:fullClasspath`) to get a sense of the difference. Because -the configuration is omitted, it is autodetected as `compile`. -`inspect compile:fullClasspath` should therefore look the same as +inspect `Test / fullClasspath`) to get a sense of the difference. Because +the configuration is omitted, it is autodetected as `Compile`. +`inspect Compile / fullClasspath` should therefore look the same as `inspect fullClasspath`. -Try `inspect *:fullClasspath` for another contrast. `fullClasspath` is not -defined in the `Global` scope by default. +Try `inspect This / Zero / fullClasspath` for another contrast. `fullClasspath` is not +defined in the `Zero` configuration scope by default. Again, for more details, see [Interacting with the Configuration System][Inspecting-Settings]. @@ -334,7 +316,7 @@ For example, the `compile` task, by default, is scoped to `Compile` and `Test` configurations, and does not exist outside of those scopes. To change the value associated with the `compile` key, you need to write -`compile in Compile` or `compile in Test`. Using plain `compile` would define +`Compile / compile` or `Test / compile`. Using plain `compile` would define a new compile task scoped to the current project, rather than overriding the standard compile tasks which are scoped to a configuration. @@ -342,12 +324,12 @@ If you get an error like *"Reference to undefined setting"*, often you've failed to specify a scope, or you've specified the wrong scope. The key you're using may be defined in some other scope. sbt will try to suggest what you meant as part of the error message; look for "Did you -mean compile:compile?" +mean Compile / compile?" One way to think of it is that a name is only *part* of a key. In reality, all keys consist of both a name, and a scope (where the scope has three axes). The entire expression -`packageOptions in (Compile, packageBin)` is a key name, in other words. +`Compile / packageBin / packageOptions` is a key name, in other words. Simply `packageOptions` is also a key name, but a different one (for keys with no in, a scope is implicitly assumed: current project, global config, global task). @@ -371,7 +353,7 @@ lazy val root = (project in file(".")) .settings( inThisBuild(List( // Same as: - // organization in ThisBuild := "com.example" + // ThisBuild / organization := "com.example" organization := "com.example", scalaVersion := "$example_scala_version$", version := "0.1.0-SNAPSHOT" diff --git a/src/reference/00-Getting-Started/07B-Appending-Values.md b/src/reference/00-Getting-Started/07B-Appending-Values.md index 56e48f7b3..e5f5e6ab9 100644 --- a/src/reference/00-Getting-Started/07B-Appending-Values.md +++ b/src/reference/00-Getting-Started/07B-Appending-Values.md @@ -17,19 +17,19 @@ replacing it. - `+=` will append a single element to the sequence. - `++=` will concatenate another sequence. -For example, the key `sourceDirectories in Compile` has a `Seq[File]` as its +For example, the key `Compile / sourceDirectories` has a `Seq[File]` as its value. By default this key's value would include `src/main/scala`. If you wanted to also compile source code in a directory called source (since you just have to be nonstandard), you could add that directory: ```scala -sourceDirectories in Compile += new File("source") +Compile / sourceDirectories += new File("source") ``` Or, using the `file()` function from the sbt package for convenience: ```scala -sourceDirectories in Compile += file("source") +Compile / sourceDirectories += file("source") ``` (`file()` just creates a new `File`.) @@ -37,7 +37,7 @@ sourceDirectories in Compile += file("source") You could use `++=` to add more than one directory at a time: ```scala -sourceDirectories in Compile ++= Seq(file("sources1"), file("sources2")) +Compile / sourceDirectories ++= Seq(file("sources1"), file("sources2")) ``` Where `Seq(a, b, c, ...)` is standard Scala syntax to construct a @@ -47,7 +47,7 @@ To replace the default source directories entirely, you use `:=` of course: ```scala -sourceDirectories in Compile := Seq(file("sources1"), file("sources2")) +Compile / sourceDirectories := Seq(file("sources1"), file("sources2")) ``` #### When settings are undefined @@ -68,8 +68,8 @@ You can compute values of some tasks or settings to define or append a value for As a first example, consider appending a source generator using the project base directory and compilation classpath. ```scala -sourceGenerators in Compile += Def.task { - myGenerator(baseDirectory.value, (managedClasspath in Compile).value) +Compile / sourceGenerators += Def.task { + myGenerator(baseDirectory.value, (Compile / managedClasspath).value) } ``` diff --git a/src/reference/00-Getting-Started/07C-Scope-Delegation.md b/src/reference/00-Getting-Started/07C-Scope-Delegation.md index 1c98deafb..30d32b3eb 100644 --- a/src/reference/00-Getting-Started/07C-Scope-Delegation.md +++ b/src/reference/00-Getting-Started/07C-Scope-Delegation.md @@ -14,37 +14,22 @@ previous pages, [build definition][Basic-Def] and [scopes][Scopes]. Now that we've covered all the details of scoping, we can explain the `.value` lookup in detail. It's ok to skip this section if this is your first time reading this page. -Because the term `Global` is used for both a scope component `*`, -and as shorthand for the scope `(Global, Global, Global)`, -in this page we will use the symbol `*` when we mean it as the scope component. - To summarize what we've learned so far: - A scope is a tuple of components in three axes: the subproject axis, the configuration axis, and the task axis. -- There's a special scope component `*` (also called `Global`) for any of the scope axes. -- There's a special scope component `ThisBuild` (written as `{.}` in shell) for **the subprojects axis** only. +- There's a special scope component `Zero` for any of the scope axes. +- There's a special scope component `ThisBuild` for **the subprojects axis** only. - `Test` extends `Runtime`, and `Runtime` extends `Compile` configuration. -- A key placed in build.sbt is scoped to `(\${current subproject}, *, *)` by default. -- A key can be further scoped using `.in(...)` method. +- A key placed in build.sbt is scoped to `\${current subproject} / Zero / Zero` by default. +- A key can scoped using `/` operator. Now let's suppose we have the following build definition: -```scala -lazy val foo = settingKey[Int]("") -lazy val bar = settingKey[Int]("") - -lazy val projX = (project in file("x")) - .settings( - foo := { - (bar in Test).value + 1 - }, - bar in Compile := 1 - ) -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scope-delegation/x/build.sbt) { #fig1 } -Inside of `foo`'s setting body a dependency on the scoped key `(bar in Test)` is declared. -However, despite `bar in Test` being undefined in `projX`, -sbt is still able to resolve `(bar in Test)` to another scoped key, +Inside of `foo`'s setting body a dependency on the scoped key `Test / bar` is declared. +However, despite `Test / bar` being undefined in `projX`, +sbt is still able to resolve `Test / bar` to another scoped key, resulting in `foo` initialized as `2`. sbt has a well-defined fallback search path called *scope delegation*. @@ -57,11 +42,11 @@ Here are the rules for scope delegation: - Rule 1: Scope axes have the following precedence: the subproject axis, the configuration axis, and then the task axis. - Rule 2: Given a scope, delegate scopes are searched by substituting the task axis in the following order: - the given task scoping, and then `*` (`Global`), which is non-task scoped version of the scope. + the given task scoping, and then `Zero`, which is non-task scoped version of the scope. - Rule 3: Given a scope, delegate scopes are searched by substituting the configuration axis in the following order: - the given configuration, its parents, their parents and so on, and then `*` (`Global`, same as unscoped configuration axis). + the given configuration, its parents, their parents and so on, and then `Zero` (same as unscoped configuration axis). - Rule 4: Given a scope, delegate scopes are searched by substituting the subproject axis in the following order: - the given subproject, `ThisBuild`, and then `*` (`Global`). + the given subproject, `ThisBuild`, and then `Zero`. - Rule 5: A delegated scoped key and its dependent settings/tasks are evaluated without carrying the original context. We will look at each rule in the rest of this page. @@ -70,7 +55,7 @@ We will look at each rule in the rest of this page. - Rule 1: Scope axes have the following precedence: the subproject axis, the configuration axis, and then the task axis. -In other words, given two scopes candidates, if one has more specific value on the subproject axis, +In other words, given two scope candidates, if one has more specific value on the subproject axis, it will always win regardless of the configuration or the task scoping. Similarly, if subprojects are the same, one with more specific configuration value will always win regardless of the task scoping. We will see more rules to define *more specific*. @@ -78,10 +63,10 @@ of the task scoping. We will see more rules to define *more specific*. ### Rule 2: The task axis delegation - Rule 2: Given a scope, delegate scopes are searched by **substituting** the task axis in the following order: - the given task scoping, and then `*` (`Global`), which is non-task scoped version of the scope. + the given task scoping, and then `Zero`, which is non-task scoped version of the scope. Here we have a concrete rule for how sbt will generate delegate scopes given a key. -Remember, we are trying to show the search path given an arbitrary `(xxx in yyy).value`. +Remember, we are trying to show the search path given an arbitrary `(xxx / yyy).value`. **Exercise A**: Given the following build definition: @@ -89,61 +74,50 @@ Remember, we are trying to show the search path given an arbitrary `(xxx in yyy) lazy val projA = (project in file("a")) .settings( name := { - "foo-" + (scalaVersion in packageBin).value + "foo-" + (packageBin / scalaVersion).value }, scalaVersion := "2.11.11" ) ``` -What is the value of `name in projA` (`projA/name` in sbt shell)? +What is the value of `projA / name`? 1. `"foo-2.11.11"` 2. `"foo-$example_scala_version$"` 3. something else? The answer is `"foo-2.11.11"`. -Inside of `.settings(...)`, `scalaVersion` is automatically scoped to `(projA, *, *)`, -so `scalaVersion in packageBin` becomes `scalaVersion in (projA, *, packageBin)`. +Inside of `.settings(...)`, `scalaVersion` is automatically scoped to `projA / Zero / Zero`, +so `packageBin / scalaVersion` becomes `projA / Zero / packageBin / scalaVersion`. That particular scoped key is undefined. -By using Rule 2, sbt will substitute the task axis to `*` as `(projA, *, *)` (or `proj/scalaVersion` in shell). +By using Rule 2, sbt will substitute the task axis to `Zero` as `projA / Zero / Zero` (or `projA / scalaVersion`). That scoped key is defined to be `"2.11.11"`. ### Rule 3: The configuration axis search path - Rule 3: Given a scope, delegate scopes are searched by substituting the configuration axis in the following order: - the given configuration, its parents, their parents and so on, and then `*` (`Global`, same as unscoped configuration axis). + the given configuration, its parents, their parents and so on, and then `Zero` (same as unscoped configuration axis). The example for that is `projX` that we saw earlier: -```scala -lazy val foo = settingKey[Int]("") -lazy val bar = settingKey[Int]("") +@@snip [build.sbt]($root$/src/sbt-test/ref/scope-delegation/x/build.sbt) { #fig1 } -lazy val projX = (project in file("x")) - .settings( - foo := { - (bar in Test).value + 1 - }, - bar in Compile := 1 - ) -``` - -If we write out the full scope again, it's `(projX, Test, *)`. +If we write out the full scope again, it's `projX / Test / Zero`. Also recall that `Test` extends `Runtime`, and `Runtime` extends `Compile`. -`(bar in Test)` is undefined, but due to Rule 3 sbt will look for -`bar` scoped in `(projX, Test, *)`, `(projX, Runtime, *)`, and then -`(projX, Compile, *)`. The last one is found, which is `bar in Compile`. +`Test / bar` is undefined, but due to Rule 3 sbt will look for +`bar` scoped in `projX / Test / Zero`, `projX / Runtime / Zero`, and then +`projX / Compile / Zero`. The last one is found, which is `Compile / bar`. ### Rule 4: The subproject axis search path - Rule 4: Given a scope, delegate scopes are searched by substituting the subproject axis in the following order: - the given subproject, `ThisBuild`, and then `*` (`Global`). + the given subproject, `ThisBuild`, and then `Zero`. **Exercise B**: Given the following build definition: ```scala -organization in ThisBuild := "com.example" +ThisBuild / organization := "com.example" lazy val projB = (project in file("b")) .settings( @@ -152,59 +126,38 @@ lazy val projB = (project in file("b")) ) ``` -What is the value of `name in projB` (`projB/name` in shell)? +What is the value of `projB / name`? 1. `"abc-com.example"` 2. `"abc-org.tempuri"` 3. something else? The answer is `abc-org.tempuri`. -So based on Rule 4, the first search path is `organization` scoped to `(projB, *, *)`, +So based on Rule 4, the first search path is `organization` scoped to `projB / Zero / Zero`, which is defined in `projB` as `"org.tempuri"`. -This has higher precedence than the build-level setting `organization in ThisBuild`. +This has higher precedence than the build-level setting `ThisBuild / organization`. #### Scope axis precedence, again **Exercise C**: Given the following build definition: -```scala -scalaVersion in (ThisBuild, packageBin) := "2.12.2" - -lazy val projC = (project in file("c")) - .settings( - name := { - "foo-" + (scalaVersion in packageBin).value - }, - scalaVersion := "2.11.11" - ) -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scope-delegation/x/build.sbt) { #fig_c } -What is value of `name in projC`? +What is value of `projC / name`? 1. `"foo-2.12.2"` 2. `"foo-2.11.11"` 3. something else? The answer is `foo-2.11.11`. -`scalaVersion` scoped to `(projC, *, packageBin)` is undefined. -Rule 2 finds `(projC, *, *)`. Rule 4 finds `(ThisBuild, *, packageBin)`. +`scalaVersion` scoped to `projC / Zero / packageBin` is undefined. +Rule 2 finds `projC / Zero / Zero`. Rule 4 finds `ThisBuild / Zero / packageBin`. In this case Rule 1 dictates that more specific value on the subproject axis wins, -which is `(projC, *, *)` that is defined to `"2.11.11"`. +which is `projC / Zero / Zero` that is defined to `"2.11.11"`. **Exercise D**: Given the following build definition: -```scala -scalacOptions in ThisBuild += "-Ywarn-unused-import" - -lazy val projD = (project in file("d")) - .settings( - test := { - println((scalacOptions in (Compile, console)).value) - }, - scalacOptions in console -= "-Ywarn-unused-import", - scalacOptions in Compile := scalacOptions.value // added by sbt - ) -``` +@@snip [build.sbt]($root$/src/sbt-test/ref/scope-delegation/x/build.sbt) { #fig_d } What would you see if you ran `projD/test`? @@ -213,16 +166,16 @@ What would you see if you ran `projD/test`? 3. something else? The answer is `List(-Ywarn-unused-import)`. -Rule 2 finds `(projD, Compile, *)`, -Rule 3 finds `(projD, *, console)`, -and Rule 4 finds `(ThisBuild, *, *)`. -Rule 1 selects `(projD, Compile, *)` +Rule 2 finds `projD / Compile / Zero`, +Rule 3 finds `projD / Zero / console`, +and Rule 4 finds `ThisBuild / Zero / Zero`. +Rule 1 selects `projD / Compile / Zero` because it has the subproject axis `projD`, and the configuration axis has higher precedence over the task axis. -Next, `scalacOptions in Compile` refers to `scalacOptions.value`, -we next need to find a delegate for `(projD, *, *)`. -Rule 4 finds `(ThisBuild, *, *)` and thus it resolves to `List(-Ywarn-unused-import)`. +Next, `Compile / scalacOptions` refers to `scalacOptions.value`, +we next need to find a delegate for `projD / Zero / Zero`. +Rule 4 finds `ThisBuild / Zero / Zero` and thus it resolves to `List(-Ywarn-unused-import)`. ### Inspect command lists the delegates @@ -230,43 +183,42 @@ You might want to look up quickly what is going on. This is where `inspect` can be used. ``` -Hello> inspect projD/compile:console::scalacOptions +sbt:projd> inspect projD / Compile / console / scalacOptions [info] Task: scala.collection.Seq[java.lang.String] [info] Description: [info] Options for the Scala compiler. [info] Provided by: -[info] {file:/Users/xxxx/}projD/compile:scalacOptions +[info] ProjectRef(uri("file:/tmp/projd/"), "projD") / Compile / scalacOptions [info] Defined at: -[info] /Users/xxxx/build.sbt:47 +[info] /tmp/projd/build.sbt:9 [info] Reverse dependencies: -[info] projD/compile:console -[info] projD/*:test +[info] projD / test +[info] projD / Compile / console [info] Delegates: -[info] projD/compile:console::scalacOptions -[info] projD/compile:scalacOptions -[info] projD/*:console::scalacOptions -[info] projD/*:scalacOptions -[info] {.}/compile:console::scalacOptions -[info] {.}/compile:scalacOptions -[info] {.}/*:console::scalacOptions -[info] {.}/*:scalacOptions -[info] */compile:console::scalacOptions -[info] */compile:scalacOptions -[info] */*:console::scalacOptions -[info] */*:scalacOptions -.... +[info] projD / Compile / console / scalacOptions +[info] projD / Compile / scalacOptions +[info] projD / console / scalacOptions +[info] projD / scalacOptions +[info] ThisBuild / Compile / console / scalacOptions +[info] ThisBuild / Compile / scalacOptions +[info] ThisBuild / console / scalacOptions +[info] ThisBuild / scalacOptions +[info] Zero / Compile / console / scalacOptions +[info] Zero / Compile / scalacOptions +[info] Zero / console / scalacOptions +[info] Global / scalacOptions ``` -Note how "Provided by" shows that `projD/compile:console::scalacOptions` -is provided by `projD/compile:scalacOptions`. +Note how "Provided by" shows that `projD / Compile / console / scalacOptions` +is provided by `projD / Compile / scalacOptions`. Also under "Delegates", *all* of the possible delegate candidates listed in the order of precedence! - All the scopes with `projD` scoping on the subproject axis are listed first, - then `ThisBuild` (`{.}`), and `*`. + then `ThisBuild`, and `Zero`. - Within a subproject, scopes with `Compile` scoping on the configuration axis - are listed first, then falls back to `*`. -- Finally, the task axis scoping lists the given task scoping `console::` and the one without. + are listed first, then falls back to `Zero`. +- Finally, the task axis scoping lists the given task scoping `console /` and the one without. ### .value lookup vs dynamic dispatch @@ -300,35 +252,35 @@ lazy val projE = (project in file("e")) ) ``` -What will `projE/version` return? +What will `projE / version` return? 1. `"2.12.2_0.1.0"` 2. `"2.11.11_0.1.0"` 3. something else? The answer is `2.12.2_0.1.0`. -`projD/version` delegates to `version in ThisBuild`, -which depends on `scalaVersion in ThisBuild`. +`projD / version` delegates to `ThisBuild / version`, +which depends on `ThisBuild / scalaVersion`. Because of this reason, build level setting should be limited mostly to simple value assignments. **Exercise F**: Given the following build definition: ```scala -scalacOptions in ThisBuild += "-D0" +ThisBuild / scalacOptions += "-D0" scalacOptions += "-D1" lazy val projF = (project in file("f")) .settings( - scalacOptions in compile += "-D2", - scalacOptions in Compile += "-D3", - scalacOptions in (Compile, compile) += "-D4", + compile / scalacOptions += "-D2", + Compile / scalacOptions += "-D3", + Compile / compile / scalacOptions += "-D4", test := { - println("bippy" + (scalacOptions in (Compile, compile)).value.mkString) + println("bippy" + (Compile / compile / scalacOptions).value.mkString) } ) ``` -What will `projF/test` show? +What will `projF / test` show? 1. `"bippy-D4"` 2. `"bippy-D2-D4"` @@ -352,37 +304,37 @@ it will go to another scoped key. Let's get rid of `+=` first, and annotate the delegates for old values: ```scala -scalacOptions in ThisBuild := { - // scalacOptions in Global <- Rule 4 - val old = (scalacOptions in ThisBuild).value +ThisBuild / scalacOptions := { + // Global / scalacOptions <- Rule 4 + val old = (ThisBuild / scalacOptions).value old :+ "-D0" } scalacOptions := { - // scalacOptions in ThisBuild <- Rule 4 + // ThisBuild / scalacOptions <- Rule 4 val old = scalacOptions.value old :+ "-D1" } lazy val projF = (project in file("f")) .settings( - scalacOptions in compile := { - // scalacOptions in ThisBuild <- Rules 2 and 4 - val old = (scalacOptions in compile).value + compile / scalacOptions := { + // ThisBuild / scalacOptions <- Rules 2 and 4 + val old = (compile / scalacOptions).value old :+ "-D2" }, - scalacOptions in Compile := { - // scalacOptions in ThisBuild <- Rules 3 and 4 - val old = (scalacOptions in Compile).value + Compile / scalacOptions := { + // ThisBuild / scalacOptions <- Rules 3 and 4 + val old = (Compile / scalacOptions).value old :+ "-D3" }, - scalacOptions in (Compile, compile) := { - // scalacOptions in (projF, Compile) <- Rules 1 and 2 - val old = (scalacOptions in (Compile, compile)).value + Compile / compile / scalacOptions := { + // projF / Compile / scalacOptions <- Rules 1 and 2 + val old = (Compile / compile / scalacOptions).value old :+ "-D4" }, test := { - println("bippy" + (scalacOptions in (Compile, compile)).value.mkString) + println("bippy" + (Compile / compile / scalacOptions).value.mkString) } ) ``` @@ -390,7 +342,7 @@ lazy val projF = (project in file("f")) This becomes: ```scala -scalacOptions in ThisBuild := { +ThisBuild / scalacOptions := { Nil :+ "-D0" } @@ -400,11 +352,11 @@ scalacOptions := { lazy val projF = (project in file("f")) .settings( - scalacOptions in compile := List("-D0") :+ "-D2", - scalacOptions in Compile := List("-D0") :+ "-D3", - scalacOptions in (Compile, compile) := List("-D0", "-D3") :+ "-D4", + compile / scalacOptions := List("-D0") :+ "-D2", + Compile / scalacOptions := List("-D0") :+ "-D3", + Compile / compile / scalacOptions := List("-D0", "-D3") :+ "-D4", test := { - println("bippy" + (scalacOptions in (Compile, compile)).value.mkString) + println("bippy" + (Compile / compile / scalacOptions).value.mkString) } ) ``` diff --git a/src/reference/00-Getting-Started/08-Library-Dependencies.md b/src/reference/00-Getting-Started/08-Library-Dependencies.md index 4987bb3c3..7c9113a87 100644 --- a/src/reference/00-Getting-Started/08-Library-Dependencies.md +++ b/src/reference/00-Getting-Started/08-Library-Dependencies.md @@ -62,7 +62,7 @@ something else complex, you might need to replace the whole `Compile` configuration regardless of the files in `lib` directory: ```scala -unmanagedJars in Compile := Seq.empty[sbt.Attributed[java.io.File]] +Compile / unmanagedJars := Seq.empty[sbt.Attributed[java.io.File]] ``` ### Managed Dependencies diff --git a/src/reference/00-Getting-Started/09-Multi-Project.md b/src/reference/00-Getting-Started/09-Multi-Project.md index 80f54a3b9..97848e9a4 100644 --- a/src/reference/00-Getting-Started/09-Multi-Project.md +++ b/src/reference/00-Getting-Started/09-Multi-Project.md @@ -112,13 +112,13 @@ the `update` task: lazy val root = (project in file(".")) .aggregate(util, core) .settings( - aggregate in update := false + update / aggregate := false ) [...] ``` -`aggregate in update` is the aggregate key scoped to the `update` task. (See +`update / aggregate` is the aggregate key scoped to the `update` task. (See [scopes][Scopes].) Note: aggregation will run the aggregated tasks in parallel and with no diff --git a/src/reference/01-General-Info/90-Changes/45-Migrating-from-sbt-0.13.x.md b/src/reference/01-General-Info/90-Changes/45-Migrating-from-sbt-0.13.x.md index eaa88cd18..c71faf7ee 100644 --- a/src/reference/01-General-Info/90-Changes/45-Migrating-from-sbt-0.13.x.md +++ b/src/reference/01-General-Info/90-Changes/45-Migrating-from-sbt-0.13.x.md @@ -36,6 +36,59 @@ object PluginCompat { Now `subMissingOk(...)` function can be implemented in sbt version specific way. +### Migrating to slash syntax + +In sbt 0.13 keys were scoped with 2 different syntaxes: one for sbt's shell and one for in code. + +* sbt 0.13 shell: `/config:intask::key` +* sbt 0.13 code: `key in (, Config, intask)` + +Starting sbt 1.1.0, the syntax for scoping keys has been unified for both the shell and the build definitions to +the **slash syntax** as follows: + +* ` / Config / intask / key` + +Here are some examples: + +```scala +lazy val root = (project in file(".")) + .settings( + name := "hello", + version in ThisBuild := "1.0.0-SNAPSHOT", + scalacOptions in Compile += "-Xlint", + scalacOptions in (Compile, console) --= Seq("-Ywarn-unused", "-Ywarn-unused-import"), + fork in Test := true + ) +``` + +They are now written as: + +```scala +lazy val root = (project in file(".")) + .settings( + name := "hello", + ThisBuild / version := "1.0.0-SNAPSHOT", + Compile / scalacOptions += "-Xlint", + Compile / console / scalacOptions --= Seq("-Ywarn-unused", "-Ywarn-unused-import"), + Test / fork := true + ) +``` + +And now the same syntax in sbt's shell: + +``` +sbt:hello> name +[info] hello +sbt:hello> ThisBuild / version +[info] 1.0.0-SNAPSHOT +sbt:hello> show Compile / scalacOptions +[info] * -Xlint +sbt:hello> show Compile / console / scalacOptions +[info] * -Xlint +sbt:hello> Test / fork +[info] true +``` + ### Migrating from sbt 0.12 style Before sbt 0.13 (sbt 0.9 to 0.12) it was very common to see in builds the usage of three aspects of sbt: @@ -147,16 +200,16 @@ Where you previous would define things as: sourceGenerators in Compile <+= buildInfo ``` -for sbt 0.13.15+, you define them as: +for sbt 1, you define them as: ```scala -sourceGenerators in Compile += buildInfo +Compile / sourceGenerators += buildInfo ``` or in general, ```scala -sourceGenerators in Compile += Def.task { List(file1, file2) } +Compile / sourceGenerators += Def.task { List(file1, file2) } ``` #### Migrating with `InputKey` diff --git a/src/reference/02-DetailTopics/01-Using-sbt/07-sbt-server.md b/src/reference/02-DetailTopics/01-Using-sbt/07-sbt-server.md new file mode 100644 index 000000000..638bbcb83 --- /dev/null +++ b/src/reference/02-DetailTopics/01-Using-sbt/07-sbt-server.md @@ -0,0 +1,245 @@ +--- +out: sbt-server.html +--- + +sbt Server +---------- + +sbt server is a feature that is newly introduced in sbt 1.x, and it's still a work in progress. +You might at first imagine server to be something that runs on remote servers, and does great things, but for now sbt server is not that. + +Actually, sbt server just adds network access to sbt's shell command so, +in addition to accepting input from the terminal, server also to accepts input from the network. +This allows multiple clients to connect to a _single session_ of sbt. +The primary use case we have in mind for the client is tooling integration such as editors and IDEs. +As a proof of concept, we created a Visual Studio Code extension called [Scala (sbt)][vscode-sbt-scala]. + +### Language Server Protocol 3.0 + +The wire protocol we use is [Language Server Protocol 3.0][lsp] (LSP), which in turn is based on [JSON-RPC][jsonrpc]. + +The base protocol consists of a header and a content part (comparable to HTTP). The header and content part are separated by a `\r\n`. + +Currently the following header fields are supported: + +- `Content-Length`: The length of the content part in bytes. If you don't provide this header, we'll read until the end of the line. +- `Content-Type`: Must be set to `application/vscode-jsonrpc; charset=utf-8` or omit it. + +Here is an example: + +``` +Content-Type: application/vscode-jsonrpc; charset=utf-8\r\n +Content-Length: ...\r\n +\r\n +{ + "jsonrpc": "2.0", + "id": 1, + "method": "textDocument/didSave", + "params": { + ... + } +} +``` + +A JSON-RPC request consists of an `id` number, a `method` name, and an optional `params` object. +So all LSP requests are pairs of method name and `params` JSON. + +An example response to the JSON-RPC request is: + +``` +Content-Type: application/vscode-jsonrpc; charset=utf-8\r\n +Content-Length: ...\r\n +\r\n +{ + "jsonrpc": "2.0", + "id": 1, + "result": { + ... + } +} +``` + +Or the server might return an error response: + +``` +Content-Type: application/vscode-jsonrpc; charset=utf-8\r\n +Content-Length: ...\r\n +\r\n +{ + "jsonrpc": "2.0", + "id": 1, + "error": { + "code": -32602, + "message": "some error message" + } +} +``` + +In addition to the responses, the server might also send events ("notifications" in LSP terminology). + +``` +Content-Type: application/vscode-jsonrpc; charset=utf-8\r\n +Content-Length: ...\r\n +\r\n +{ + "jsonrpc": "2.0", + "method": "textDocument/publishDiagnostics", + "params": { + ... + } +} +``` + +### Server discovery and authentication + +To discover a running server and to prevent unauthorized access to the sbt server, we use a *port file* and a *token file*. + +By default, sbt server will be running when a sbt shell session is active. When the server is up, it will create two files called the *port file* and the *token file*. The port file is located at `./project/target/active.json` relative to a build and contains something like: + +```json +{ + "uri":"tcp://127.0.0.1:5010", + "tokenfilePath":"/Users/xxx/.sbt/1.0/server/0845deda85cb41abdb9f/token.json", + "tokenfileUri":"file:/Users/xxx/.sbt/1.0/server/0845deda85cb41abdb9f/token.json" +} +``` + +This gives us three pieces of information: + +1. That the server is (likely) running. +2. That the server is running on port 5010. +3. The location of the token file. + +The location of the token file uses a SHA-1 hash of the build path, so it will not change between the runs. +The token file should contain JSON like the following: + +```json +{ + "uri":"tcp://127.0.0.1:5010", + "token":"12345678901234567890123456789012345678" +} +``` + +The `uri` field is the same, and the `token` field contains a 128-bits non-negative integer. + +### Initialize request + +To initiate communication with sbt server, the client (such as a tool like VS Code) must first send an [`initialize` request][lsp_initialize]. This means that the client must send a request with method set to "initialize" and the `InitializeParams` datatype as the `params` field. + +To authenticate yourself, you must pass in the token in `initializationOptions` as follows: + +``` +type InitializationOptionsParams { + token: String! +} +``` + +On telnet it would look as follows: + +``` +\$ telnet 127.0.0.1 5010 +Content-Type: application/vscode-jsonrpc; charset=utf-8 +Content-Length: 149 + +{ "jsonrpc": "2.0", "id": 1, "method": "initialize", "params": { "initializationOptions": { "token": "84046191245433876643612047032303751629" } } } +``` + +After sbt receives the request, it will send an [`initialized` event][lsp_initialized]. + +### `textDocument/publishDiagnostics` event + +The compiler warnings and errors are sent to the client using the `textDocument/publishDiagnostics` event. + +- method: `textDocument/publishDiagnostics` +- params: [`PublishDiagnosticsParams`][lsp_publishdiagnosticsparams] + +Here's an example output (with JSON-RPC headers omitted): + +``` +{ + "jsonrpc": "2.0", + "method": "textDocument/publishDiagnostics", + "params": { + "uri": "file:/Users/xxx/work/hellotest/Hello.scala", + "diagnostics": [ + { + "range": { + "start": { + "line": 2, + "character": 0 + }, + "end": { + "line": 2, + "character": 1 + } + }, + "severity": 1, + "source": "sbt", + "message": "')' expected but '}' found." + } + ] + } +} +``` + +### `textDocument/didSave` event + +As of sbt 1.1.0-M1, sbt will execute the `compile` task upon receiving a `textDocument/didSave` notification. +This behavior is subject to change. + +### `sbt/exec` request + +A `sbt/exec` request emulates the user typing into the shell. + +- method: `sbt/exec` +- params: + +``` +type SbtExecParams { + commandLine: String! +} +``` + +On telnet it would look as follows: + +``` +Content-Length: 91 + +{ "jsonrpc": "2.0", "id": 2, "method": "sbt/exec", "params": { "commandLine": "clean" } } +``` + +Note that there might be other commands running on the build, so in that case the request will be queued up. + +### `sbt/setting` request + +A `sbt/setting` request can be used to query settings. + +- method: `sbt/setting` +- params: + +``` +type SettingQuery { + setting: String! +} +``` + +On telnet it would look as follows: + +``` +Content-Length: 102 + +{ "jsonrpc": "2.0", "id": 3, "method": "sbt/setting", "params": { "setting": "root/scalaVersion" } } +Content-Length: 87 +Content-Type: application/vscode-jsonrpc; charset=utf-8 + +{"jsonrpc":"2.0","id":"3","result":{"value":"2.12.2","contentType":"java.lang.String"}} +``` + +Unlike the command execution, this will respond immediately. + + [lsp]: https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md + [jsonrpc]: http://www.jsonrpc.org/specification + [vscode-sbt-scala]: https://marketplace.visualstudio.com/items?itemName=lightbend.vscode-sbt-scala + [lsp_initialize]: https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#initialize + [lsp_initialized]: https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#initialized + [lsp_publishdiagnosticsparams]: https://github.com/Microsoft/language-server-protocol/blob/master/protocol.md#publishdiagnostics-notification diff --git a/src/reference/02-DetailTopics/01-Using-sbt/07-Incremental-Recompilation.md b/src/reference/02-DetailTopics/01-Using-sbt/50-Incremental-Recompilation.md similarity index 100% rename from src/reference/02-DetailTopics/01-Using-sbt/07-Incremental-Recompilation.md rename to src/reference/02-DetailTopics/01-Using-sbt/50-Incremental-Recompilation.md diff --git a/src/reference/02-DetailTopics/02-Configuration/03-Configuring-Scala.md b/src/reference/02-DetailTopics/02-Configuration/03-Configuring-Scala.md index 879956f0c..c8268b411 100644 --- a/src/reference/02-DetailTopics/02-Configuration/03-Configuring-Scala.md +++ b/src/reference/02-DetailTopics/02-Configuration/03-Configuring-Scala.md @@ -115,10 +115,8 @@ For example, ```scala managedScalaInstance := false - scalaInstance := ... - -unmanagedJars in Compile += scalaInstance.value.libraryJar +Compile / unmanagedJars += scalaInstance.value.libraryJar ``` #### Switching to a local Scala version @@ -174,7 +172,7 @@ Scala distribution. For example, to add all jars in the Scala home ```scala scalaHome := Some(file("/home/user/scala-2.10/")) -unmanagedJars in Compile ++= scalaInstance.value.jars +Compile / unmanagedJars ++= scalaInstance.value.jars ``` To add only some jars, filter the jars from `scalaInstance` before diff --git a/src/reference/02-DetailTopics/02-Configuration/04-Forking.md b/src/reference/02-DetailTopics/02-Configuration/04-Forking.md index 668d01e7e..7285e099e 100644 --- a/src/reference/02-DetailTopics/02-Configuration/04-Forking.md +++ b/src/reference/02-DetailTopics/02-Configuration/04-Forking.md @@ -25,34 +25,31 @@ The `fork` setting controls whether forking is enabled (true) or not in the `test` scope to only fork `test` commands. To fork all test tasks (`test`, `testOnly`, and `testQuick`) and run -tasks (`run`, `runMain`, `test:run`, and `test:runMain`), +tasks (`run`, `runMain`, `Test / run`, and `Test / runMain`), ```scala fork := true ``` -To enable forking `run` tasks only, set `fork` to `true` in the `run` -scope. +To enable forking `run` tasks only, set `Comile / run / fork` to `true`. ```scala -fork in run := true +Compile / run / fork := true ``` -To only fork `test:run` and `test:runMain`: +To only fork `Test / run` and `Test / runMain`: ```scala -fork in (Test, run) := true +Test / run / fork := true ``` -Similarly, set `fork in (Compile,run) := true` to only fork the main -`run` tasks. `run` and `runMain` share the same configuration and cannot -be configured separately. + `run` and `runMain` share the same configuration and cannot be configured separately. To enable forking all `test` tasks only, set `fork` to `true` in the -`test` scope: +`Test` scope: ```scala -fork in test := true +Test / fork := true ``` See [Testing][Testing] for more control over how tests are assigned to JVMs and @@ -65,16 +62,16 @@ or `baseDirectory in test`: ```scala // sets the working directory for all `run`-like tasks -baseDirectory in run := file("/path/to/working/directory/") +run / baseDirectory := file("/path/to/working/directory/") // sets the working directory for `run` and `runMain` only -baseDirectory in (Compile,run) := file("/path/to/working/directory/") +Compile / run / baseDirectory := file("/path/to/working/directory/") -// sets the working directory for `test:run` and `test:runMain` only -baseDirectory in (Test,run) := file("/path/to/working/directory/") +// sets the working directory for `Test / run` and `Test / runMain` only +Test / run / baseDirectory := file("/path/to/working/directory/") // sets the working directory for `test`, `testQuick`, and `testOnly` -baseDirectory in test := file("/path/to/working/directory/") +Test / baseDirectory := file("/path/to/working/directory/") ``` ### Forked JVM options @@ -82,20 +79,20 @@ baseDirectory in test := file("/path/to/working/directory/") To specify options to be provided to the forked JVM, set `javaOptions`: ```scala -javaOptions in run += "-Xmx8G" +run / javaOptions += "-Xmx8G" ``` or specify the configuration to affect only the main or test `run` tasks: ```scala -javaOptions in (Test,run) += "-Xmx8G" +Test / run / javaOptions += "-Xmx8G" ``` or only affect the `test` tasks: ```scala -javaOptions in test += "-Xmx8G" +Test / javaOptions += "-Xmx8G" ``` ### Java Home @@ -111,7 +108,7 @@ used to compile Java sources. You can restrict it to running only by setting it in the `run` scope: ```scala -javaHome in run := Some(file("/path/to/jre/")) +run / javaHome := Some(file("/path/to/jre/")) ``` As with the other settings, you can specify the configuration to affect @@ -148,7 +145,7 @@ the forked process. To enable this, configure the `connectInput` setting: ```scala -connectInput in run := true +run / connectInput := true ``` ### Direct Usage diff --git a/src/reference/02-DetailTopics/02-Configuration/06-Java-Sources.md b/src/reference/02-DetailTopics/02-Configuration/06-Java-Sources.md index 7d87de8d6..d6c8e6349 100644 --- a/src/reference/02-DetailTopics/02-Configuration/06-Java-Sources.md +++ b/src/reference/02-DetailTopics/02-Configuration/06-Java-Sources.md @@ -48,10 +48,10 @@ by configuration: ```scala // Java then Scala for main sources -compileOrder in Compile := CompileOrder.JavaThenScala +Compile / compileOrder := CompileOrder.JavaThenScala // allow circular dependencies for test sources -compileOrder in Test := CompileOrder.Mixed +Test / compileOrder := CompileOrder.Mixed ``` Note that in an incremental compilation setting, it is not practical to diff --git a/src/reference/02-DetailTopics/02-Configuration/10-Paths.md b/src/reference/02-DetailTopics/02-Configuration/10-Paths.md index a8b9a960f..8ab7b0283 100644 --- a/src/reference/02-DetailTopics/02-Configuration/10-Paths.md +++ b/src/reference/02-DetailTopics/02-Configuration/10-Paths.md @@ -66,7 +66,7 @@ directory of the build, irrespective of the project the setting is defined in: ```scala -historyPath := Some( (baseDirectory in ThisBuild).value / ".history"), +historyPath := Some( (ThisBuild / baseDirectory).value / ".history"), ``` ### Path Finders diff --git a/src/reference/02-DetailTopics/02-Configuration/11-Parallel-Execution.md b/src/reference/02-DetailTopics/02-Configuration/11-Parallel-Execution.md index 1ccdbf3ee..a2cbbc46e 100644 --- a/src/reference/02-DetailTopics/02-Configuration/11-Parallel-Execution.md +++ b/src/reference/02-DetailTopics/02-Configuration/11-Parallel-Execution.md @@ -56,7 +56,7 @@ Prior to sbt 0.12, user control over this process was restricted to: 1. Enabling or disabling all parallel execution (parallelExecution := false, for example). 2. Enabling or disabling mapping tests to their own tasks - (parallelExecution in Test := false, for example). + (Test / parallelExecution := false, for example). (Although never exposed as a setting, the maximum number of tasks running at a given time was internally configurable as well.) @@ -116,10 +116,10 @@ download := downloadImpl.value Once tasks are tagged, the `concurrentRestrictions` setting sets restrictions on the tasks that may be concurrently executed based on the weighted tags of those tasks. This is necessarily a global set of rules, -so it must be scoped `in Global`. For example, +so it must be scoped `Global /`. For example, ```scala -concurrentRestrictions in Global := Seq( +Global / concurrentRestrictions := Seq( Tags.limit(Tags.CPU, 2), Tags.limit(Tags.Network, 10), Tags.limit(Tags.Test, 1), @@ -230,7 +230,7 @@ tags to each child task created for each test class. The default rules provide the same behavior as previous versions of sbt: ```scala -concurrentRestrictions in Global := { +Global / concurrentRestrictions := { val max = Runtime.getRuntime.availableProcessors Tags.limitAll(if(parallelExecution.value) max else 1) :: Nil } @@ -241,7 +241,7 @@ to separate tasks. To restrict the number of concurrently executing tests in all projects, use: ```scala -concurrentRestrictions in Global += Tags.limit(Tags.Test, 1) +Global / concurrentRestrictions += Tags.limit(Tags.Test, 1) ``` #### Custom Tags @@ -258,9 +258,9 @@ Then, use this tag as any other tag. For example: ```scala def aImpl = Def.task { ... } tag(Custom) -aCustomTask := aImpl.value +aCustomTask := aImpl.value -concurrentRestrictions in Global += +Global / concurrentRestrictions += Tags.limit(Custom, 1) ``` @@ -280,7 +280,7 @@ def myCompileTask = Def.task { ... } tag(Tags.CPU, Tags.Compile) compile := myCompileTask.value -compile := { +compile := { val result = compile.value ... do some post processing ... } diff --git a/src/reference/02-DetailTopics/02-Configuration/14-Testing.md b/src/reference/02-DetailTopics/02-Configuration/14-Testing.md index 240771acf..e500cf799 100644 --- a/src/reference/02-DetailTopics/02-Configuration/14-Testing.md +++ b/src/reference/02-DetailTopics/02-Configuration/14-Testing.md @@ -87,14 +87,14 @@ still be manually written out and run using `testOnly`. #### Other tasks Tasks that are available for main sources are generally available for -test sources, but are prefixed with `test:` on the command line and are -referenced in Scala code with `in Test`. These tasks include: +test sources, but are prefixed with `Test /` on the command line and are +referenced in Scala code with `Test /` as well. These tasks include: -- `test:compile` -- `test:console` -- `test:consoleQuick` -- `test:run` -- `test:runMain` +- `Test / compile` +- `Test / console` +- `Test / consoleQuick` +- `Test / run` +- `Test / runMain` See [Running][Running] for details on these tasks. @@ -105,7 +105,7 @@ tests for that file complete. This can be disabled by setting `logBuffered`: ```scala -logBuffered in Test := false +Test / logBuffered := false ``` #### Test Reports @@ -115,7 +115,7 @@ the build, located in the `target/test-reports` directory for a project. This can be disabled by disabling the `JUnitXmlReportPlugin` ```scala -val myProject = (project in file(".")).disablePlugins(plugins.JUnitXmlReportPlugin) +val myProject = (project in file(".")).disablePlugins(plugins.JUnitXmlReportPlugin) ``` ### Options @@ -133,13 +133,13 @@ To specify test framework arguments as part of the build, add options constructed by `Tests.Argument`: ```scala -testOptions in Test += Tests.Argument("-verbosity", "1") +Test / testOptions += Tests.Argument("-verbosity", "1") ``` To specify them for a specific test framework only: ```scala -testOptions in Test += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "1") +Test / testOptions += Tests.Argument(TestFrameworks.ScalaCheck, "-verbosity", "1") ``` #### Setup and Cleanup @@ -158,13 +158,10 @@ framework classes. Examples: ```scala -testOptions in Test += Tests.Setup( () => println("Setup") ) - -testOptions in Test += Tests.Cleanup( () => println("Cleanup") ) - -testOptions in Test += Tests.Setup( loader => ... ) - -testOptions in Test += Tests.Cleanup( loader => ... ) +Test / testOptions += Tests.Setup( () => println("Setup") ) +Test / testOptions += Tests.Cleanup( () => println("Cleanup") ) +Test / testOptions += Tests.Setup( loader => ... ) +Test / testOptions += Tests.Cleanup( loader => ... ) ``` #### Disable Parallel Execution of Tests @@ -174,7 +171,7 @@ Because each test is mapped to a task, tests are also run in parallel by default To make tests within a given project execute serially: : ```scala -parallelExecution in Test := false +Test / parallelExecution := false ``` `Test` can be replaced with `IntegrationTest` to only execute @@ -187,7 +184,7 @@ If you want to only run test classes whose name ends with "Test", use `Tests.Filter`: ```scala -testOptions in Test := Seq(Tests.Filter(s => s.endsWith("Test"))) +Test / testOptions := Seq(Tests.Filter(s => s.endsWith("Test"))) ``` #### Forking tests @@ -195,7 +192,7 @@ testOptions in Test := Seq(Tests.Filter(s => s.endsWith("Test"))) The setting: ```scala -fork in Test := true +Test / fork := true ``` specifies that all tests will be executed in a single external JVM. See @@ -229,7 +226,7 @@ In addition, forked tests can optionally be run in parallel within the forked JVM(s), using the following setting: ```scala -testForkedParallel in Test := true +Test / testForkedParallel := true ``` @@ -293,7 +290,7 @@ The standard testing tasks are available, but must be prefixed with `it:`. For example, ``` -> it:testOnly org.example.AnIntegrationTest +> IntegrationTest / testOnly org.example.AnIntegrationTest ``` Similarly the standard settings may be configured for the @@ -302,7 +299,7 @@ Similarly the standard settings may be configured for the example, if test options are specified as: ```scala -testOptions in Test += ... +Test / testOptions += ... ``` then these will be picked up by the `Test` configuration and in turn by @@ -311,14 +308,14 @@ for integration tests by putting them in the `IntegrationTest` configuration: ```scala -testOptions in IntegrationTest += ... +IntegrationTest / testOptions += ... ``` Or, use `:=` to overwrite any existing options, declaring these to be the definitive integration test options: ```scala -testOptions in IntegrationTest := Seq(...) +IntegrationTest / testOptions := Seq(...) ``` #### Custom test configuration @@ -368,13 +365,13 @@ The comments in the integration test section hold, except with `FunTest`: ```scala -testOptions in FunTest += ... +FunTest / testOptions += ... ``` Test tasks are run by prefixing them with `fun:` ``` -> fun:test +> FunTest / test ``` #### Additional test configurations with shared sources @@ -414,18 +411,18 @@ The key differences are: packaging tasks and settings. - We filter the tests to be run for each configuration. -To run standard unit tests, run `test` (or equivalently, `test:test`): +To run standard unit tests, run `test` (or equivalently, `Test / test`): ``` > test ``` -To run tests for the added configuration (here, `"fun"`), prefix it with +To run tests for the added configuration (here, `"FunTest"`), prefix it with the configuration name as before: ``` -> fun:test -> fun:testOnly org.example.AFunTest +> FunTest / test +> FunTest / testOnly org.example.AFunTest ``` ##### Application to parallel execution diff --git a/src/reference/02-DetailTopics/03-Dependency-Management/03-Library-Management.md b/src/reference/02-DetailTopics/03-Dependency-Management/03-Library-Management.md index e2a80fc3b..5c9e30e2c 100644 --- a/src/reference/02-DetailTopics/03-Dependency-Management/03-Library-Management.md +++ b/src/reference/02-DetailTopics/03-Dependency-Management/03-Library-Management.md @@ -56,14 +56,14 @@ task, which ultimately provides the manual dependencies to sbt. The default implementation is roughly: ```scala -unmanagedJars in Compile := (baseDirectory.value ** "*.jar").classpath +Compile / unmanagedJars := (baseDirectory.value ** "*.jar").classpath ``` If you want to add jars from multiple directories in addition to the default directory, you can do: ```scala -unmanagedJars in Compile ++= { +Compile / unmanagedJars ++= { val base = baseDirectory.value val baseDirectories = (base / "libA") +++ (base / "b" / "lib") +++ (base / "libC") val customJars = (baseDirectories ** "*.jar") +++ (base / "d" / "my.jar") @@ -388,15 +388,15 @@ the *checksums* setting. To disable checksum checking during update: ```scala -checksums in update := Nil +update / checksums := Nil ``` To disable checksum creation during artifact publishing: ```scala -checksums in publishLocal := Nil +publishLocal / checksums := Nil -checksums in publish := Nil +publish / checksums := Nil ``` The default value is: @@ -592,7 +592,7 @@ ivyConfigurations += JS libraryDependencies += "jquery" % "jquery" % "3.2.1" % "js->default" from "https://code.jquery.com/jquery-3.2.1.min.js" -resources in Compile ++= update.value.select(configurationFilter("js")) +Compile / resources ++= update.value.select(configurationFilter("js")) ``` The `config` method defines a new configuration with name `"js"` and @@ -652,7 +652,7 @@ classpaths. For example, to specify that the `Compile` classpath should use the 'default' configuration: ```scala -classpathConfiguration in Compile := config("default") +Compile / classpathConfiguration := config("default") ``` ##### Maven pom (dependencies only) @@ -673,14 +673,10 @@ For example, a `build.sbt` using external Ivy files might look like: ```scala externalIvySettings() - externalIvyFile(Def.setting(baseDirectory.value / "ivyA.xml")) - -classpathConfiguration in Compile := Compile - -classpathConfiguration in Test := Test - -classpathConfiguration in Runtime := Runtime +Compile / classpathConfiguration := Compile +Test / classpathConfiguration := Test +Runtime / classpathConfiguration := Runtime ``` ##### Forcing a revision (Not recommended) diff --git a/src/reference/template.properties b/src/reference/template.properties index 2a1df3de1..a53425621 100644 --- a/src/reference/template.properties +++ b/src/reference/template.properties @@ -1,5 +1,5 @@ -app_version=1.0.4 -windows_app_version=1.0.4 +app_version=1.1.0 +windows_app_version=1.1.0 color_scheme=github example_scala_version=2.12.4 diff --git a/src/sbt-test/ref/scope-delegation/build.sbt b/src/sbt-test/ref/scope-delegation/build.sbt new file mode 100644 index 000000000..599f2778c --- /dev/null +++ b/src/sbt-test/ref/scope-delegation/build.sbt @@ -0,0 +1 @@ +lazy val x = project diff --git a/src/sbt-test/ref/scope-delegation/pending b/src/sbt-test/ref/scope-delegation/pending new file mode 100644 index 000000000..51a2c7606 --- /dev/null +++ b/src/sbt-test/ref/scope-delegation/pending @@ -0,0 +1 @@ +> x/foo diff --git a/src/sbt-test/ref/scope-delegation/x/build.sbt b/src/sbt-test/ref/scope-delegation/x/build.sbt new file mode 100644 index 000000000..3bd850092 --- /dev/null +++ b/src/sbt-test/ref/scope-delegation/x/build.sbt @@ -0,0 +1,37 @@ +// #fig1 +lazy val foo = settingKey[Int]("") +lazy val bar = settingKey[Int]("") + +lazy val projX = (project in file("x")) + .settings( + foo := { + (Test / bar).value + 1 + }, + Compile / bar := 1 + ) +// #fig1 + +// #fig_c +ThisBuild / packageBin / scalaVersion := "2.12.2" + +lazy val projC = (project in file("c")) + .settings( + name := { + "foo-" + (packageBin / scalaVersion).value + }, + scalaVersion := "2.11.11" + ) +// #fig_c + +// #fig_d +ThisBuild / scalacOptions += "-Ywarn-unused-import" + +lazy val projD = (project in file("d")) + .settings( + test := { + println((Compile / console / scalacOptions).value) + }, + console / scalacOptions -= "-Ywarn-unused-import", + Compile / scalacOptions := scalacOptions.value // added by sbt + ) +// #fig_d diff --git a/src/sbt-test/ref/scopes/build.sbt b/src/sbt-test/ref/scopes/build.sbt new file mode 100644 index 000000000..e535b521c --- /dev/null +++ b/src/sbt-test/ref/scopes/build.sbt @@ -0,0 +1,22 @@ +// #unscoped +organization := name.value +// #unscoped + +// #confScoped +Compile / name := "hello" +// #confScoped + +// #taskScoped +packageBin / name := "hello" +// #taskScoped + +// #confAndTaskScoped +Compile / packageBin / name := "hello" +// #confAndTaskScoped + +// #global +// same as Zero / Zero / Zero / concurrentRestrictions +Global / concurrentRestrictions := Seq( + Tags.limitAll(1) +) +// #global diff --git a/src/sbt-test/ref/scopes/test b/src/sbt-test/ref/scopes/test new file mode 100644 index 000000000..477407e68 --- /dev/null +++ b/src/sbt-test/ref/scopes/test @@ -0,0 +1 @@ +> name