diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 6365a165dd..0de14e1892 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -31,7 +31,7 @@ jobs:
- name: Checkout
uses: actions/checkout@v3
with:
- submodules: true
+ fetch-depth: 2 # to be able to check for changes in subfolder res_syntax later
- name: Use OCaml ${{matrix.ocaml_compiler}}
uses: ocaml/setup-ocaml@v2
@@ -40,10 +40,35 @@ jobs:
opam-pin: false
opam-depext: false
- - name: Use Node.js
- uses: actions/setup-node@v3
- with:
- node-version: 16
+ - name: "Check if syntax subfolder has changed"
+ id: syntax-diff
+ shell: bash
+ run: |
+ if git diff --name-only --exit-code HEAD^ HEAD -- res_syntax; then
+ echo "syntax_status=unchanged" >> $GITHUB_ENV
+ else
+ echo "syntax_status=changed" >> $GITHUB_ENV
+ fi
+
+ - name: "Syntax: install OPAM dependencies"
+ if: env.syntax_status == 'changed'
+ run: opam install . --deps-only
+ working-directory: res_syntax
+
+ - name: "Syntax: build CLI"
+ if: env.syntax_status == 'changed'
+ run: opam exec -- dune build
+ working-directory: res_syntax
+
+ - name: "Syntax: Run roundtrip tests"
+ if: ${{ env.syntax_status == 'changed' && runner.os != 'Windows' }}
+ run: opam exec -- make roundtrip-test
+ working-directory: res_syntax
+
+ - name: "Syntax: Run tests (Windows)"
+ if: ${{ env.syntax_status == 'changed' && runner.os == 'Windows' }}
+ run: opam exec -- make test
+ working-directory: res_syntax
# Required for ninja build
- name: "Windows: Use MSVC"
@@ -55,6 +80,11 @@ jobs:
- name: Build ninja
run: node scripts/buildNinjaBinary.js
+ - name: Use Node.js
+ uses: actions/setup-node@v3
+ with:
+ node-version: 16
+
- name: NPM install
run: opam exec -- npm ci
env:
diff --git a/.gitignore b/.gitignore
index 0ac6194ee4..ab537df08a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -55,7 +55,6 @@ coverage
ppx_tools/
vendor/ocaml/stdlib/target_camlheader
vendor/ocaml/stdlib/target_camlheaderd
-reason/
man/
lib/ocaml
*.tgz
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index b370037fae..0000000000
--- a/.gitmodules
+++ /dev/null
@@ -1,8 +0,0 @@
-[submodule "ocaml"]
- path = ocaml
- url = https://github.com/rescript-lang/ocaml
- ignore = untracked
-[submodule "syntax"]
- path = syntax
- url = https://github.com/rescript-lang/syntax
- branch = master
diff --git a/.prettierignore b/.prettierignore
index 2cba134fbd..4230180348 100644
--- a/.prettierignore
+++ b/.prettierignore
@@ -2,7 +2,7 @@ jscomp/
lib/
vendor/
ninja/
-syntax/
+res_syntax/
_opam
CHANGELOG.md
README.md
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index bf6be2b822..fd4d01c4ba 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -4,7 +4,7 @@ Welcome to the ReScript compiler project!
This document will give you guidance on how to get up and running to work on the ReScript compiler and toolchain.
-(If you want to contribute to the documentation website, check out [rescript-association/rescript-lang.org](https://github.com/reason-association/rescript-lang.org). For contributions to the ReScript syntax, please visit the [rescript-lang/syntax](https://github.com/rescript-lang/syntax) project.)
+(If you want to contribute to the documentation website, check out [rescript-association/rescript-lang.org](https://github.com/reason-association/rescript-lang.org).
We tried to keep the installation process as simple as possible. In case you are having issues or get stuck in the process, please let us know in the issue tracker.
diff --git a/dune b/dune
index e4f11d99bc..23e70d65c8 100644
--- a/dune
+++ b/dune
@@ -1 +1 @@
-(dirs jscomp)
+(dirs jscomp res_syntax)
diff --git a/res_syntax/.gitignore b/res_syntax/.gitignore
new file mode 100644
index 0000000000..ee92b33d4b
--- /dev/null
+++ b/res_syntax/.gitignore
@@ -0,0 +1,19 @@
+.DS_STORE
+*.cm*
+*.o
+*.s
+lib/*
+!lib/README.md
+.vscode/settings.json
+
+# Dune
+_build
+
+# Merlin
+.merlin
+
+# Local opam switch
+_opam/
+
+# Output dir for bootstrap script
+bootstrap/
diff --git a/res_syntax/.ocamlformat b/res_syntax/.ocamlformat
new file mode 100644
index 0000000000..3bef5a7b46
--- /dev/null
+++ b/res_syntax/.ocamlformat
@@ -0,0 +1,11 @@
+profile = default
+version = 0.22.4
+
+field-space = tight-decl
+break-cases = toplevel
+module-item-spacing = preserve
+cases-exp-indent = 2
+space-around-arrays = false
+space-around-lists = false
+space-around-records = false
+space-around-variants = false
diff --git a/res_syntax/.ocamlformat-ignore b/res_syntax/.ocamlformat-ignore
new file mode 100644
index 0000000000..0c4403d0c7
--- /dev/null
+++ b/res_syntax/.ocamlformat-ignore
@@ -0,0 +1 @@
+compiler-libs-406/*
diff --git a/res_syntax/CHANGELOG.md b/res_syntax/CHANGELOG.md
new file mode 100644
index 0000000000..e12a288505
--- /dev/null
+++ b/res_syntax/CHANGELOG.md
@@ -0,0 +1,98 @@
+## Master
+
+> **Tags:**
+>
+> - :boom: [Breaking Change]
+> - :eyeglasses: [Spec Compliance]
+> - :rocket: [New Feature]
+> - :bug: [Bug Fix]
+> - :memo: [Documentation]
+> - :house: [Internal]
+> - :nail_care: [Polish]
+
+#### :boom: Breaking Change
+
+- Emit an error when a `@string` or `@int` attribute is used in a V4 component https://github.com/rescript-lang/rescript-compiler/issues/5724
+
+#### :rocket: New Feature
+
+- Add surface syntax for `async`/`await` https://github.com/rescript-lang/syntax/pull/600
+
+- Initial support for JSX V4, still work in progress.
+
+ - :boom: when V4 is activated, at most one component is allowed for each module.
+
+- Add support for empty record literal `{}` for records with only optional fields, and type definition of empty record (e.g. `type empty = {}`) https://github.com/rescript-lang/syntax/pull/632
+
+- Support the use of spread anywhere in list creation (e.g. `list{...x, 1, ...y, ...z}). https://github.com/rescript-lang/syntax/pull/692
+
+- Add support for the argument of `@react.component` to set a props type from the outside. https://github.com/rescript-lang/syntax/pull/699
+
+#### :bug: Bug Fix
+
+- Fix issue in formatting JSX spread props https://github.com/rescript-lang/syntax/pull/644
+- Fix pretty printer where it would print doc comments on the same line as other attributes https://github.com/rescript-lang/syntax/pull/642
+- Fix location issue in error messages with JSX V4 where the body of the component is an application https://github.com/rescript-lang/syntax/pull/633
+- Fix issue where the printer would omit attributes for `->` and `|>` https://github.com/rescript-lang/syntax/pull/629
+- Fix printing of optional fields in records https://github.com/rescript-lang/rescript-compiler/issues/5654
+- Fix printing of comments inside empty blocks https://github.com/rescript-lang/syntax/pull/647
+- Fix location issue in error messages with JSX V4 where the multiple props types are defined https://github.com/rescript-lang/syntax/pull/655
+- Fix location issue in make function in JSX V4 that breaks dead code elimination https://github.com/rescript-lang/syntax/pull/660
+- Fix parsing (hence pretty printing) of expressions with underscore `_` and comments.
+- Fix printing of comments inside JSX tag https://github.com/rescript-lang/syntax/pull/664
+- Fix issue where formatter erases tail comments inside JSX tag https://github.com/rescript-lang/syntax/issues/663
+- Fix issue where the JSX prop has type annotation of the first class module https://github.com/rescript-lang/syntax/pull/666
+- Fix issue where a spread `...x` in non-last position would not be reported as syntax error https://github.com/rescript-lang/syntax/pull/673/
+- Fix issue where the formatter would delete `async` in a function with labelled arguments.
+- Fix several printing issues with `async` including an infinite loop https://github.com/rescript-lang/syntax/pull/680
+- Fix issue where certain JSX expressions would be formatted differenctly in compiler 10.1.0-rc.1 https://github.com/rescript-lang/syntax/issues/675
+- Fix issue where printing nested pipe discards await https://github.com/rescript-lang/syntax/issues/687
+- Fix issue where the JSX key type is not an optional string https://github.com/rescript-lang/syntax/pull/693
+- Fix issue where the JSX fragment without children build error https://github.com/rescript-lang/syntax/pull/704
+- Fix issue where async as an id cannot be used with application and labelled arguments https://github.com/rescript-lang/syntax/issues/707
+- Treat await as almost-unary operator weaker than pipe so `await foo->bar` means `await (foo->bar)` https://github.com/rescript-lang/syntax/pull/711
+
+#### :eyeglasses: Spec Compliance
+
+- Functions with consecutive dots now print as multiple arrow functions like in JavaScript.
+
+#### :nail_care Polish
+
+- Change the internal representation of props for the lowercase components to record. https://github.com/rescript-lang/syntax/pull/665
+- Change the payload of Pconst_char for type safety. https://github.com/rescript-lang/rescript-compiler/pull/5759
+- Specialize the printing of the rhs of a record field assignment for optional values `{x: ? e}` https://github.com/rescript-lang/syntax/issues/714
+
+## ReScript 10.0
+
+- Fix printing for inline nullary functor types [#477](https://github.com/rescript-lang/syntax/pull/477)
+- Fix stripping of quotes for empty poly variants [#474](https://github.com/rescript-lang/syntax/pull/474)
+- Implement syntax for arity zero vs arity one in uncurried application in [#139](https://github.com/rescript-lang/syntax/pull/139)
+- Fix parsing of first class module exprs as part of binary/ternary expr in [#256](https://github.com/rescript-lang/syntax/pull/256)
+- Fix formatter hanging on deeply nested function calls [#261](https://github.com/rescript-lang/syntax/issues/261)
+- Remove parsing of "import" and "export" which was never officially supported.
+
+## ReScript 9.0.0
+
+- Fix parsing of poly-var typexpr consisting of one tag-spec-first in [#254](https://github.com/rescript-lang/syntax/pull/254)
+- Implement new syntax for guards on pattern match cases in [#248](https://github.com/rescript-lang/syntax/pull/248)
+- Implement intelligent breaking for poly-var type expressions in [#246](https://github.com/rescript-lang/syntax/pull/246)
+- Improve indentation of fast pipe chain in let binding in [#244](https://github.com/rescript-lang/syntax/pull/244)
+- Improve printing of non-callback arguments in call expressions with callback in [#241](https://github.com/rescript-lang/syntax/pull/241/files)
+- Fix printing of constrained expressions in rhs of js objects [#240](https://github.com/rescript-lang/syntax/pull/240)
+- Improve printing of trailing comments under lhs of "pipe" expression in [#329](https://github.com/rescript-lang/syntax/pull/239/files)
+- Improve printing of jsx children and props with leading line comment in [#236](https://github.com/rescript-lang/syntax/pull/236)
+- Improve conversion of quoted strings from Reason in [#238](https://github.com/rescript-lang/syntax/pull/238)
+- Print attributes/extension without bs prefix where possible in [#230](https://github.com/rescript-lang/syntax/pull/230)
+- Cleanup gentype attribute printing [fe05e1051aa94b16f6993ddc5ba9651f89e86907](https://github.com/rescript-lang/syntax/commit/fe05e1051aa94b16f6993ddc5ba9651f89e86907)
+- Implement light weight syntax for poly-variants [f84c5760b3f743f65e934195c87fc06bf88bff75](https://github.com/rescript-lang/syntax/commit/f84c5760b3f743f65e934195c87fc06bf88bff75)
+- Fix bug in fast pipe conversion from Reason. [3d5f2daba5418b821c577ba03e2de1afb0dd66de](https://github.com/rescript-lang/syntax/commit/3d5f2daba5418b821c577ba03e2de1afb0dd66de)
+- Improve parsed AST when tilde is missing in arrow expr parameters. [e52a0c89ac39b578a2062ef15fae2be625962e1f](https://github.com/rescript-lang/syntax/commit/e52a0c89ac39b578a2062ef15fae2be625962e1f)
+- Improve parser diagnostics for missing tilde in labeled parameters. [a0d7689d5d2bfc31dc251e966ac33a3001200171](https://github.com/rescript-lang/syntax/commit/a0d7689d5d2bfc31dc251e966ac33a3001200171)
+- Improve printing of uncurried application with a huggable expression in [c8767215186982e171fe9f9101d518150a65f0d7](https://github.com/rescript-lang/syntax/commit/c8767215186982e171fe9f9101d518150a65f0d7)
+- Improve printing of uncurried arrow typexpr outcome printer in [4d953b668cf47358deccb8b730566f24de25b9ee](https://github.com/rescript-lang/syntax/commit/4d953b668cf47358deccb8b730566f24de25b9ee)
+- Remove support for nativeint syntax in [72d9b7034fc28f317672c94994b322bee520acca](https://github.com/rescript-lang/syntax/commit/72d9b7034fc28f317672c94994b322bee520acca)
+- Improve printing of poly variant typexprs with attributes in [bf6561b](https://github.com/rescript-lang/syntax/commit/bf6561bb5d84557b8b6cbbcd40078c39526af4af)
+
+## ReScript 8.4.2 (December 11, 2020)
+
+Released in https://github.com/rescript-lang/syntax/releases/tag/v8.4.2 as part of https://github.com/rescript-lang/rescript-compiler/releases/tag/8.4.2
diff --git a/res_syntax/LICENSE b/res_syntax/LICENSE
new file mode 100644
index 0000000000..a1b2c17b66
--- /dev/null
+++ b/res_syntax/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2020 ReScript
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/res_syntax/Makefile b/res_syntax/Makefile
new file mode 100644
index 0000000000..8579ab747c
--- /dev/null
+++ b/res_syntax/Makefile
@@ -0,0 +1,36 @@
+SHELL = /bin/bash
+
+build:
+ dune build
+
+bootstrap: build
+ dune exec -- bash ./scripts/bootstrap.sh
+
+bench: build
+ dune exec -- bench
+
+test: build
+ dune exec -- testrunner
+ dune exec -- bash ./scripts/test.sh
+ make reanalyze
+ bash ./scripts/testok.sh
+
+roundtrip-test: build
+ dune exec -- testrunner
+ ROUNDTRIP_TEST=1 dune exec -- bash ./scripts/test.sh
+ make reanalyze
+ bash ./scripts/testok.sh
+
+reanalyze: build
+ reanalyze.exe -set-exit-code -all-cmt _build/default -suppress testrunner,compiler-libs-406 -exclude-paths compiler-libs-406
+
+format:
+ dune build @fmt --auto-promote
+
+checkformat:
+ dune build @fmt
+
+clean:
+ dune clean
+
+.PHONY: bench clean test roundtrip-test reanalyze bootstrap build-native
diff --git a/res_syntax/README.md b/res_syntax/README.md
new file mode 100644
index 0000000000..a1c7d02f7c
--- /dev/null
+++ b/res_syntax/README.md
@@ -0,0 +1,125 @@
+# ReScript Syntax 
+
+Documentation: https://rescript-lang.org/docs/manual/latest/overview
+
+This repo is the source of truth for the ReScript parser & printer. Issues go here.
+
+**You don't need this repo to use the ReScript syntax**. This comes with ReScript >=8.1. This repo is for syntax developers.
+
+## Contribute
+
+### Why
+
+A detailed discussion by Jonathan Blow and Casey Muratori on why you would hand-roll a parser for a production quality programming language
+[Discussion: Making Programming Language Parsers, etc](https://youtu.be/MnctEW1oL-E)
+
+"One reason why I switched off these parser tools is that the promises didn't really materialize.
+The amount of work that I had to do change a yacc script from one language to a variant of that language
+was more than if I hand wrote the code myself.
+"
+J. Blow.
+
+### Setup & Usage (For Repo Devs Only)
+
+Required:
+
+- OCaml 4.10 or later
+- Dune
+- Reanalyze
+- OS: macOS, Linux or Windows
+
+```sh
+git clone https://github.com/rescript-lang/syntax.git
+cd syntax
+opam install . --deps-only --with-test
+make # or "dune build"
+```
+
+This will produce the three binaries `rescript`, `tests` and `bench` (with `.exe` extension on Windows).
+
+We only build production binaries, even in dev mode. No need for a separate dev binary when the build is fast enough. Plus, this encourages proper benchmarking of the (production) binary each diff.
+
+After you make a change:
+
+```sh
+make
+```
+
+Run the core tests:
+
+```sh
+make test
+```
+
+Run the extended tests (not fully working on Windows yet):
+
+```sh
+make roundtrip-test
+```
+
+Those will tell you whether you've got a test output difference. If it's intentional, check them in.
+
+Debug a file:
+
+```sh
+# write code in test.res
+dune exec -- rescript test.res # test printer
+dune exec -- rescript -print ast test.res # print ast
+dune exec -- rescript -print comments test.res # print comment table
+dune exec -- rescript -print ml test.res # show ocaml code
+dune exec -- rescript -print res -width 80 test.res # test printer and change default print width
+```
+
+Benchmark:
+
+```sh
+make bench
+```
+
+Enable stack trace:
+
+```sh
+# Before you run the binary
+export OCAMLRUNPARAM="b"
+```
+
+This is likely a known knowledge: add the above line into your shell rc file so that every shell startup you have OCaml stack trace enabled.
+
+### Development Docs
+
+#### Folder Structure
+
+- `src` contains all the parser/printer source code. Don't change folder structure without notice; The [rescript-compiler](https://github.com/rescript-lang/rescript-compiler) repo uses this repo as a submodule and assumes `src`.
+- `benchmarks`, `cli` and `tests` contain the source code for the executables used for testing/benchmarking. These are not used by the [rescript-compiler](https://github.com/rescript-lang/rescript-compiler) repo.
+
+#### Error Reporting Logic
+
+Right now, ReScript's compiler's error reporting mechanism, for architectural reasons, is independent from this syntax repo's error reporting mechanism. However, we do want a unified look when they report the errors in the terminal. This is currently achieved by (carefully...) duplicating the error report logic from the compiler repo to here (or vice-versa; either way, just keep them in sync). The files to sync are the compiler repo's [super_location.ml](https://github.com/rescript-lang/rescript-compiler/blob/fcb21790dfb0592f609818df7790192061360631/jscomp/super_errors/super_location.ml) and [super_code_frame.ml](https://github.com/rescript-lang/rescript-compiler/blob/fcb21790dfb0592f609818df7790192061360631/jscomp/super_errors/super_code_frame.ml), into this repo's [res_diagnostics_printing_utils.ml](https://github.com/rescript-lang/syntax/blob/ec5cefb23b659b0a7be170ae0ad26f3fe8a05456/src/res_diagnostics_printing_utils.ml). A few notes:
+
+- Some lines are lightly changed to fit this repo's needs; they're documented in the latter file.
+- Please keep these files lightweight and as dependency-less as possible, for easier syncing.
+- The syntax logic currently doesn't have warnings, only errors, and potentially more than one.
+- In the future, ideally, the error reporting logic would also be unified with GenType and Reanalyze's. It'd be painful to copy paste around all this reporting logic.
+- The errors are reported by the parser [here](https://github.com/rescript-lang/syntax/blob/ec5cefb23b659b0a7be170ae0ad26f3fe8a05456/src/res_diagnostics.ml#L146).
+- Our editor plugin parses the error report from the compiler and from the syntax [here](https://github.com/rescript-lang/rescript-vscode/blob/0dbf2eb9cdb0bd6d95be1aee88b73830feecb5cc/server/src/utils.ts#L129-L329).
+
+### Example API usage
+
+```ocaml
+let filename = "foo.res"
+let src = FS.readFile filename
+
+let p =
+ (* intended for ocaml compiler *)
+ let mode = Res_parser.ParseForTypeChecker in
+ (* if you want to target the printer use: let mode = Res_parser.Default in*)
+ Res_parser.make ~mode src filename
+
+let structure = Res_core.parseImplementation p
+let signature = Res_core.parseSpecification p
+
+let () = match p.diagnostics with
+| [] -> () (* no problems *)
+| diagnostics -> (* parser contains problems *)
+ Res_diagnostics.printReport diagnostics src
+```
diff --git a/res_syntax/benchmarks/Benchmark.ml b/res_syntax/benchmarks/Benchmark.ml
new file mode 100644
index 0000000000..f4ea6b5fe0
--- /dev/null
+++ b/res_syntax/benchmarks/Benchmark.ml
@@ -0,0 +1,252 @@
+module ResParser = Res_core
+module Doc = Res_doc
+module CommentTable = Res_comments_table
+module Parser = Res_parser
+module Printer = Res_printer
+
+module IO : sig
+ val readFile : string -> string
+end = struct
+ (* random chunk size: 2^15, TODO: why do we guess randomly? *)
+ let chunkSize = 32768
+
+ let readFile filename =
+ let chan = open_in filename in
+ let buffer = Buffer.create chunkSize in
+ let chunk = (Bytes.create [@doesNotRaise]) chunkSize in
+ let rec loop () =
+ let len =
+ try input chan chunk 0 chunkSize with Invalid_argument _ -> 0
+ in
+ if len == 0 then (
+ close_in_noerr chan;
+ Buffer.contents buffer)
+ else (
+ Buffer.add_subbytes buffer chunk 0 len;
+ loop ())
+ in
+ loop ()
+end
+
+module Time : sig
+ type t
+
+ val now : unit -> t
+
+ val toUint64 : t -> int64 [@@live]
+
+ (* let of_uint64_ns ns = ns *)
+
+ val nanosecond : t [@@live]
+ val microsecond : t [@@live]
+ val millisecond : t [@@live]
+ val second : t [@@live]
+ val minute : t [@@live]
+ val hour : t [@@live]
+
+ val zero : t
+
+ val diff : t -> t -> t
+ val add : t -> t -> t
+ val print : t -> float
+end = struct
+ (* nanoseconds *)
+ type t = int64
+
+ let zero = 0L
+
+ let toUint64 s = s
+
+ let nanosecond = 1L
+ let microsecond = Int64.mul 1000L nanosecond
+ let millisecond = Int64.mul 1000L microsecond
+ let second = Int64.mul 1000L millisecond
+ let minute = Int64.mul 60L second
+ let hour = Int64.mul 60L minute
+
+ (* TODO: we could do this inside caml_absolute_time *)
+ external init : unit -> unit = "caml_mach_initialize"
+ let () = init ()
+ external now : unit -> t = "caml_mach_absolute_time"
+
+ let diff t1 t2 = Int64.sub t2 t1
+ let add t1 t2 = Int64.add t1 t2
+ let print t = Int64.to_float t *. 1e-6
+end
+
+module Benchmark : sig
+ type t
+
+ val make : name:string -> f:(t -> unit) -> unit -> t
+ val launch : t -> unit
+ val report : t -> unit
+end = struct
+ type t = {
+ name: string;
+ mutable start: Time.t;
+ mutable n: int; (* current iterations count *)
+ mutable duration: Time.t;
+ benchFunc: t -> unit;
+ mutable timerOn: bool;
+ (* mutable result: benchmarkResult; *)
+ (* The initial states *)
+ mutable startAllocs: float;
+ mutable startBytes: float;
+ (* The net total of this test after being run. *)
+ mutable netAllocs: float;
+ mutable netBytes: float;
+ }
+
+ let report b =
+ print_endline (Format.sprintf "Benchmark: %s" b.name);
+ print_endline (Format.sprintf "Nbr of iterations: %d" b.n);
+ print_endline
+ (Format.sprintf "Benchmark ran during: %fms" (Time.print b.duration));
+ print_endline
+ (Format.sprintf "Avg time/op: %fms"
+ (Time.print b.duration /. float_of_int b.n));
+ print_endline
+ (Format.sprintf "Allocs/op: %d"
+ (int_of_float (b.netAllocs /. float_of_int b.n)));
+ print_endline
+ (Format.sprintf "B/op: %d"
+ (int_of_float (b.netBytes /. float_of_int b.n)));
+
+ (* return (float64(r.Bytes) * float64(r.N) / 1e6) / r.T.Seconds() *)
+ print_newline ();
+ ()
+
+ let make ~name ~f () =
+ {
+ name;
+ start = Time.zero;
+ n = 0;
+ benchFunc = f;
+ duration = Time.zero;
+ timerOn = false;
+ startAllocs = 0.;
+ startBytes = 0.;
+ netAllocs = 0.;
+ netBytes = 0.;
+ }
+
+ (* total amount of memory allocated by the program since it started in words *)
+ let mallocs () =
+ let stats = Gc.quick_stat () in
+ stats.minor_words +. stats.major_words -. stats.promoted_words
+
+ let startTimer b =
+ if not b.timerOn then (
+ let allocatedWords = mallocs () in
+ b.startAllocs <- allocatedWords;
+ b.startBytes <- allocatedWords *. 8.;
+ b.start <- Time.now ();
+ b.timerOn <- true)
+
+ let stopTimer b =
+ if b.timerOn then (
+ let allocatedWords = mallocs () in
+ let diff = Time.diff b.start (Time.now ()) in
+ b.duration <- Time.add b.duration diff;
+ b.netAllocs <- b.netAllocs +. (allocatedWords -. b.startAllocs);
+ b.netBytes <- b.netBytes +. ((allocatedWords *. 8.) -. b.startBytes);
+ b.timerOn <- false)
+
+ let resetTimer b =
+ if b.timerOn then (
+ let allocatedWords = mallocs () in
+ b.startAllocs <- allocatedWords;
+ b.netAllocs <- allocatedWords *. 8.;
+ b.start <- Time.now ());
+ b.netAllocs <- 0.;
+ b.netBytes <- 0.
+
+ let runIteration b n =
+ Gc.full_major ();
+ b.n <- n;
+ resetTimer b;
+ startTimer b;
+ b.benchFunc b;
+ stopTimer b
+
+ let launch b =
+ (* 150 runs * all the benchmarks means around 1m of benchmark time *)
+ for n = 1 to 150 do
+ runIteration b n
+ done
+end
+
+module Benchmarks : sig
+ val run : unit -> unit
+end = struct
+ type action = Parse | Print
+ let string_of_action action =
+ match action with
+ | Parse -> "parser"
+ | Print -> "printer"
+
+ (* TODO: we could at Reason here *)
+ type lang = Ocaml | Rescript
+ let string_of_lang lang =
+ match lang with
+ | Ocaml -> "ocaml"
+ | Rescript -> "rescript"
+
+ let parseOcaml src filename =
+ let lexbuf = Lexing.from_string src in
+ Location.init lexbuf filename;
+ Parse.implementation lexbuf
+
+ let parseRescript src filename =
+ let p = Parser.make src filename in
+ let structure = ResParser.parseImplementation p in
+ assert (p.diagnostics == []);
+ structure
+
+ let benchmark filename lang action =
+ let src = IO.readFile filename in
+ let name =
+ filename ^ " " ^ string_of_lang lang ^ " " ^ string_of_action action
+ in
+ let benchmarkFn =
+ match (lang, action) with
+ | Rescript, Parse ->
+ fun _ ->
+ let _ = Sys.opaque_identity (parseRescript src filename) in
+ ()
+ | Ocaml, Parse ->
+ fun _ ->
+ let _ = Sys.opaque_identity (parseOcaml src filename) in
+ ()
+ | Rescript, Print ->
+ let p = Parser.make src filename in
+ let ast = ResParser.parseImplementation p in
+ fun _ ->
+ let _ =
+ Sys.opaque_identity
+ (let cmtTbl = CommentTable.make () in
+ let comments = List.rev p.Parser.comments in
+ let () = CommentTable.walkStructure ast cmtTbl comments in
+ Doc.toString ~width:80 (Printer.printStructure ast cmtTbl))
+ in
+ ()
+ | _ -> fun _ -> ()
+ in
+ let b = Benchmark.make ~name ~f:benchmarkFn () in
+ Benchmark.launch b;
+ Benchmark.report b
+
+ let run () =
+ benchmark "./benchmarks/data/RedBlackTree.res" Rescript Parse;
+ benchmark "./benchmarks/data/RedBlackTree.ml" Ocaml Parse;
+ benchmark "./benchmarks/data/RedBlackTree.res" Rescript Print;
+ benchmark "./benchmarks/data/RedBlackTreeNoComments.res" Rescript Print;
+ benchmark "./benchmarks/data/Napkinscript.res" Rescript Parse;
+ benchmark "./benchmarks/data/Napkinscript.ml" Ocaml Parse;
+ benchmark "./benchmarks/data/Napkinscript.res" Rescript Print;
+ benchmark "./benchmarks/data/HeroGraphic.res" Rescript Parse;
+ benchmark "./benchmarks/data/HeroGraphic.ml" Ocaml Parse;
+ benchmark "./benchmarks/data/HeroGraphic.res" Rescript Print
+end
+
+let () = Benchmarks.run ()
diff --git a/res_syntax/benchmarks/data/HeroGraphic.ml b/res_syntax/benchmarks/data/HeroGraphic.ml
new file mode 100644
index 0000000000..08feb30e3d
--- /dev/null
+++ b/res_syntax/benchmarks/data/HeroGraphic.ml
@@ -0,0 +1,8654 @@
+;;[%bs.raw {|require('./HeroGraphic.css')|}]
+let make ?(width= "760") ?(height= "380") =
+ ((svg ~width:((width)) ~height:((height))
+ ~viewBox:(("0 0 758 381")) ~fill:(("none")
+ ) ~xmlns:(("http://www.w3.org/2000/svg")
+ )
+ ~children:[((path
+ ~d:(("M78.8374 255.364H664.923C664.923 255.364 677.451 256.743 677.451 270.971C677.451 285.2 667.673 288.178 667.673 288.178H579.485C579.485 288.178 592.014 290.163 592.014 304.888C592.014 319.612 582.785 321.101 582.785 321.101H524.544C524.544 321.101 507.676 322.59 508.776 333.896C509.876 345.201 520.204 346.194 520.204 346.194H626.849C626.849 346.194 644.266 347.683 643.166 363.897C642.066 380.11 632.288 380.11 632.288 380.11H186.032C186.032 380.11 166.964 379.118 167.514 364.393C168.064 349.668 186.582 350.661 186.582 350.661H121.801C121.801 350.661 104.628 351.598 104.628 338.252C104.628 320.715 121.862 322.149 121.862 322.149H142.457C142.457 322.149 159.264 323.362 159.264 306.101C159.264 293.748 144.657 292.7 144.657 292.7H77.6151C77.6151 292.7 56.4084 290.439 56.4695 275.769C56.5918 260.879 66.3089 255.364 78.8374 255.364Z")
+ ) ~fill:(("#0B1627"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.57"))
+ ~d:(("M393.453 244.555C393.453 244.555 398.709 220.841 391.437 205.344C384.103 189.848 369.68 187.145 369.68 187.145C366.991 161.722 351.59 142.916 317.794 145.288C283.998 147.714 270.614 166.134 270.614 166.134C270.614 166.134 274.342 158.358 267.925 150.251C261.508 142.144 247.757 143.799 247.757 143.799C247.757 143.799 253.502 135.14 241.646 128.578C229.79 122.015 220.378 126.868 220.378 126.868C220.378 126.868 226.55 103.595 203.572 99.1834C180.593 94.7163 163.542 110.379 163.542 110.379C163.542 110.379 153.641 104.753 138.852 109.441C124.001 114.184 122.717 134.203 122.717 134.203C122.717 134.203 90.5713 118.541 55.675 137.457C31.5349 150.527 27.2569 178.377 26.9514 194.425C32.9406 207.495 40.152 215.547 40.152 215.547C31.5349 242.349 51.886 248.14 51.886 248.14H90.0824H358.496L393.453 244.555Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-cloudLeft")
+ ) ~opacity:(("0.7"))
+ ~d:(("M0.4885 96.7017H97.599C97.599 96.7017 99.0047 89.1463 88.6152 87.8779C88.6152 87.8779 87.393 74.4216 72.6644 69.6237C57.9359 64.8258 51.2133 72.2157 51.2133 72.2157C51.2133 72.2157 46.6909 67.3075 40.8239 68.0244C34.8958 68.7413 32.4513 75.5246 32.4513 75.5246C32.4513 75.5246 28.9677 73.3187 25.3009 75.1386C21.634 76.9585 21.8174 79.385 21.8174 79.385C21.8174 79.385 17.6005 78.4475 14.6059 78.282C11.6113 78.0615 -2.75056 78.999 0.4885 96.7017Z")
+ ) ~fill:(("white"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M326.044 228.066H250.141V241.246H326.044V228.066Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M331.606 228.066H291.759V240.198H331.606V228.066Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M432.811 224.15H375.303V240.915H432.811V224.15Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M382.514 226.466H378.419V227.128H382.514V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M389.053 226.466H384.959V227.128H389.053V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M395.531 226.466H391.437V227.128H395.531V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M402.009 226.466H397.915V227.128H402.009V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M408.488 226.466H404.393V227.128H408.488V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M414.966 226.466H410.871V227.128H414.966V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M421.444 226.466H417.349V227.128H421.444V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M427.983 226.466H423.888V227.128H427.983V226.466Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M382.514 228.727H378.419V229.389H382.514V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M389.053 228.727H384.959V229.389H389.053V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M395.531 228.727H391.437V229.389H395.531V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M402.009 228.727H397.915V229.389H402.009V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M408.488 228.727H404.393V229.389H408.488V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M414.966 228.727H410.871V229.389H414.966V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M421.444 228.727H417.349V229.389H421.444V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M427.983 228.727H423.888V229.389H427.983V228.727Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M382.514 230.988H378.419V231.65H382.514V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M389.053 230.988H384.959V231.65H389.053V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M395.531 230.988H391.437V231.65H395.531V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M402.009 230.988H397.915V231.65H402.009V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M408.488 230.988H404.393V231.65H408.488V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M414.966 230.988H410.871V231.65H414.966V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M421.444 230.988H417.349V231.65H421.444V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M427.983 230.988H423.888V231.65H427.983V230.988Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M382.514 233.25H378.419V233.911H382.514V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M389.053 233.25H384.959V233.911H389.053V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M395.531 233.25H391.437V233.911H395.531V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M402.009 233.25H397.915V233.911H402.009V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M408.488 233.25H404.393V233.911H408.488V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M414.966 233.25H410.871V233.911H414.966V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M421.444 233.25H417.349V233.911H421.444V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M427.983 233.25H423.888V233.911H427.983V233.25Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M382.514 235.511H378.419V236.172H382.514V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M389.053 235.511H384.959V236.172H389.053V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M395.531 235.511H391.437V236.172H395.531V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M402.009 235.511H397.915V236.172H402.009V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M408.488 235.511H404.393V236.172H408.488V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M414.966 235.511H410.871V236.172H414.966V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M421.444 235.511H417.349V236.172H421.444V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M427.983 235.511H423.888V236.172H427.983V235.511Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M378.969 224.15H375.303V240.915H378.969V224.15Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 221.999H171.181V225.032H178.087V221.999Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 221.999H179.92V225.032H186.826V221.999Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 221.999H188.66V225.032H195.566V221.999Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 226.411H171.181V229.444H178.087V226.411Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 226.411H179.92V229.444H186.826V226.411Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 226.411H188.66V229.444H195.566V226.411Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M160.914 230.933H112.206V244.555H160.914V230.933Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 220.234H115.689V221.779H117.4V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 220.234H120.701V221.779H122.412V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 220.234H125.651V221.779H127.362V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 220.234H130.601V221.779H132.312V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 220.234H135.551V221.779H137.263V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 220.234H140.563V221.779H142.274V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 220.234H145.513V221.779H147.224V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 220.234H150.463V221.779H152.174V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 220.234H155.413V221.779H157.125V220.234Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 223.819H115.689V225.363H117.4V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 223.819H120.701V225.363H122.412V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 223.819H125.651V225.363H127.362V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 223.819H130.601V225.363H132.312V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 223.819H135.551V225.363H137.263V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 223.819H140.563V225.363H142.274V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 223.819H145.513V225.363H147.224V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 223.819H150.463V225.363H152.174V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 223.819H155.413V225.363H157.125V223.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 227.459H115.689V229.003H117.4V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 227.459H120.701V229.003H122.412V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 227.459H125.651V229.003H127.362V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 227.459H130.601V229.003H132.312V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 227.459H135.551V229.003H137.263V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 227.459H140.563V229.003H142.274V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 227.459H145.513V229.003H147.224V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 227.459H150.463V229.003H152.174V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 227.459H155.413V229.003H157.125V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-wave"))
+ ~d:(("M81.5264 248.14H667.612C667.612 248.14 680.14 249.518 680.14 263.747C680.14 277.975 670.362 280.953 670.362 280.953H582.174C582.174 280.953 594.703 282.939 594.703 297.663C594.703 312.388 585.474 313.877 585.474 313.877H527.233C527.233 313.877 510.365 315.366 511.465 326.671C512.565 337.977 522.893 338.969 522.893 338.969H629.538C629.538 338.969 646.955 340.458 645.855 356.672C644.755 372.886 634.977 372.886 634.977 372.886H188.721C188.721 372.886 169.653 371.893 170.203 357.168C170.753 342.444 189.271 343.436 189.271 343.436H124.49C124.49 343.436 107.317 344.374 107.317 331.028C107.317 313.491 124.551 314.925 124.551 314.925H145.146C145.146 314.925 161.953 316.138 161.953 298.876C161.953 286.523 147.346 285.475 147.346 285.475H80.3041C80.3041 285.475 59.0975 283.214 59.1586 268.545C59.2808 253.655 68.998 248.14 81.5264 248.14Z")
+ ) ~fill:(("url(#paint0_linear)")
+ ) ~fillOpacity:(("0.7")
+ ) ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.66"))
+ ~d:(("M112.206 248.14V285.365H147.408C147.408 285.365 151.869 285.696 155.841 288.067H160.914V274.39L169.592 269.923V268.655H181.998H199.477V264.464H201.066V273.949L205.527 274.28V284.979V285.641H203.571V287.35H228.751H252.096V285.641H250.141V284.979V277.424L265.419 278.527H274.831V264.298H279.048V273.674H291.393V286.799H291.82V298.38V299.704H297.26V303.013H300.804V312.002H301.904V303.013H303.31V312.002H304.41V303.013H318.222V312.002H319.322V303.013H320.727V312.002H321.828V303.013H326.228V299.704H331.667V298.38V286.799H341.812V273.839H354.707V273.949H354.89V287.24H390.948V282.497H396.265V290.935H401.948V306.763L405.798 306.983L406.349 307.038L412.582 307.424H412.704L414.966 307.59L415.821 307.645L417.349 307.755L418.205 307.81L419.733 307.921L420.588 307.976L425.233 308.252V304.998H432.933V278.747H439.472L445.034 282.938V293.692H475.285V276.155H508.47V287.295H544.528V282.552H550.639V294.575V295.126H559.806V295.568V297.222H564.757V304.722H566.529V297.167H569.707V295.512V295.071H571.418V308.362H592.319C593.725 305.935 594.703 302.516 594.703 297.608C594.703 282.883 582.174 280.898 582.174 280.898H635.894V248.085H112.206V248.14Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.5"))
+ ~d:(("M125.712 326.34V326.782H126.14V326.34H129.073L130.54 339.19V339.025L131.885 326.34H134.879L136.346 339.19L137.69 326.34H140.379V326.782H140.929V326.34H141.235V326.23H140.929V321.984H144.169V314.87H124.673C124.673 314.87 123.817 314.814 122.412 314.87V321.929H125.773V326.175V326.34H125.712ZM138.118 321.929H140.379V326.175H137.69L138.118 321.929ZM132.312 321.929H134.329L134.818 326.175H131.823L132.312 321.929ZM126.079 321.929H128.523L129.012 326.175H126.14V321.929H126.079Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M552.717 270.806C552.717 272.57 551.128 274.06 549.111 274.06H480.358C478.402 274.06 476.752 272.626 476.752 270.806C476.752 269.041 478.341 267.552 480.358 267.552H549.173C551.128 267.552 552.717 269.041 552.717 270.806Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M165.008 261.1C165.008 262.313 163.97 263.25 162.625 263.25H153.458C152.113 263.25 151.074 262.313 151.074 261.1C151.074 259.886 152.113 258.949 153.458 258.949H162.625C163.97 258.949 165.008 259.941 165.008 261.1Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M226.673 263.25C226.673 264.464 225.634 265.401 224.289 265.401H192.449C191.104 265.401 190.065 264.464 190.065 263.25C190.065 262.037 191.104 261.1 192.449 261.1H224.289C225.573 261.1 226.673 262.092 226.673 263.25Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M376.036 297.553C376.036 298.27 375.425 298.821 374.63 298.821H361.613C360.819 298.821 360.207 298.27 360.207 297.553C360.207 296.836 360.819 296.284 361.613 296.284H374.63C375.425 296.284 376.036 296.836 376.036 297.553Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-wave"))
+ ~d:(("M391.681 296.836C391.681 295.733 392.659 294.85 393.881 294.85H479.747C480.969 294.85 481.947 295.733 481.947 296.836C481.947 297.939 480.969 298.821 479.747 298.821H393.881C392.659 298.766 391.681 297.884 391.681 296.836Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M208.4 296.174C208.4 296.891 207.727 297.498 206.933 297.498H194.343C193.549 297.498 192.877 296.891 192.877 296.174C192.877 295.457 193.549 294.85 194.343 294.85H206.872C207.727 294.85 208.4 295.457 208.4 296.174Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-wave"))
+ ~d:(("M309.788 324.631C309.788 326.009 308.566 327.168 306.977 327.168H236.634C235.106 327.168 233.823 326.065 233.823 324.631C233.823 323.252 235.045 322.094 236.634 322.094H307.038C308.505 322.094 309.788 323.252 309.788 324.631Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M372.186 328.932C372.186 329.925 371.269 330.752 370.169 330.752H355.074C353.974 330.752 353.057 329.925 353.057 328.932C353.057 327.94 353.974 327.112 355.074 327.112H370.169C371.33 327.112 372.186 327.94 372.186 328.932Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M537.255 360.863C537.255 360.477 537.622 360.146 538.05 360.146H551.984C552.412 360.146 552.778 360.477 552.778 360.863C552.778 361.249 552.412 361.58 551.984 361.58H538.05C537.561 361.58 537.255 361.249 537.255 360.863Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M224.656 359.816C224.656 358.823 223.739 357.996 222.639 357.996H212.678C211.578 357.996 210.661 358.823 210.661 359.816C210.661 360.808 211.578 361.635 212.678 361.635H222.639C223.8 361.58 224.656 360.753 224.656 359.816Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M151.136 332.517C151.136 331.524 150.219 330.697 149.119 330.697H141.968C140.868 330.697 139.952 331.524 139.952 332.517C139.952 333.51 140.868 334.337 141.968 334.337H149.119C150.219 334.337 151.136 333.51 151.136 332.517Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M250.202 228.066H205.649V241.246H250.202V228.066Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M630.149 117.052H611.326V120.802H630.149V117.052Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M630.149 119.588H611.326V120.857H630.149V119.588Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M199.538 230.933H160.914V240.529H199.538V230.933Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M339.795 222.606H337.473V224.702H339.795V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M345.234 222.606H342.912V224.702H345.234V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M350.673 222.606H348.351V224.702H350.673V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M356.052 222.606H353.79V224.702H356.052V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M361.552 222.606H359.229V224.702H361.552V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M366.991 222.606H364.669V224.702H366.991V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M372.43 222.606H370.108V224.702H372.43V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 221.007H478.769V222.661H480.175V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 221.007H482.13V222.661H483.536V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 221.007H485.43V222.661H486.836V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 221.007H488.792V222.661H490.197V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.497 221.007H492.092V222.661H493.497V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 221.007H495.453V222.661H496.859V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 221.007H498.814V222.661H500.22V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 221.007H501.931V222.661H503.337V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 221.007H506.087V222.661H507.493V221.007Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 224.26H478.769V225.915H480.175V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M483.536 224.26H482.13V225.915H483.536V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M486.836 224.26H485.43V225.915H486.836V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 224.26H488.792V225.915H490.197V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.497 224.26H492.092V225.915H493.497V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 224.26H495.453V225.915H496.859V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M500.22 224.26H498.814V225.915H500.22V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 224.26H501.931V225.915H503.337V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 224.26H506.087V225.915H507.493V224.26Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 227.569H478.769V229.224H480.175V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 227.569H482.13V229.224H483.536V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 227.569H485.43V229.224H486.836V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 227.569H488.792V229.224H490.197V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.497 227.569H492.092V229.224H493.497V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 227.569H495.453V229.224H496.859V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 227.569H498.814V229.224H500.22V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 227.569H501.931V229.224H503.337V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 227.569H506.087V229.224H507.493V227.569Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 230.878H478.769V232.533H480.175V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 230.878H482.13V232.533H483.536V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 230.878H485.43V232.533H486.836V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 230.878H488.792V232.533H490.197V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.497 230.878H492.092V232.533H493.497V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 230.878H495.453V232.533H496.859V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 230.878H498.814V232.533H500.22V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 230.878H501.931V232.533H503.337V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 230.878H506.087V232.533H507.493V230.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 234.132H478.769V235.786H480.175V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 234.132H482.13V235.786H483.536V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 234.132H485.43V235.786H486.836V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 234.132H488.792V235.786H490.197V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.497 234.132H492.092V235.786H493.497V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 234.132H495.453V235.786H496.859V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 234.132H498.814V235.786H500.22V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 234.132H501.931V235.786H503.337V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 234.132H506.087V235.786H507.493V234.132Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M339.795 228.01H337.473V230.106H339.795V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M345.234 228.01H342.912V230.106H345.234V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M350.673 228.01H348.351V230.106H350.673V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M356.052 228.01H353.79V230.106H356.052V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M361.552 228.01H359.229V230.106H361.552V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M366.991 228.01H364.669V230.106H366.991V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M372.43 228.01H370.108V230.106H372.43V228.01Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M267.925 236.228H266.519V237.496H267.925V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M271.286 236.228H269.88V237.496H271.286V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M274.586 236.228H273.181V237.496H274.586V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M277.886 236.228H276.481V237.496H277.886V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M281.187 236.228H279.781V237.496H281.187V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M284.487 236.228H283.081V237.496H284.487V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M287.787 236.228H286.381V237.496H287.787V236.228Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M375.303 228.066H334.173V240.253H375.303V228.066Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.3"))
+ ~d:(("M432.811 228.066H375.303V240.253H432.811V228.066Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M444.362 220.4H434.767V223.819H444.362V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M454.69 220.4H445.095V223.819H454.69V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M464.957 220.4H455.362V223.819H464.957V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M475.224 220.4H465.629V223.819H475.224V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 224.536H434.767V227.955H444.362V224.536Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 224.536H445.095V227.955H454.69V224.536Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 224.536H455.362V227.955H464.957V224.536Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 224.536H465.629V227.955H475.224V224.536Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 228.672H434.767V232.091H444.362V228.672Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 228.672H445.095V232.091H454.69V228.672Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 228.672H455.362V232.091H464.957V228.672Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 228.672H465.629V232.091H475.224V228.672Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 231.54H434.767V234.959H444.362V231.54Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M454.69 231.54H445.095V234.959H454.69V231.54Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 231.54H455.362V234.959H464.957V231.54Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 231.54H465.629V234.959H475.224V231.54Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.3"))
+ ~d:(("M476.141 228.01H432.628V241.081H476.141V228.01Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M509.998 230.713H476.08V240.474H509.998V230.713Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.4"))
+ ~d:(("M548.867 230.713H514.949V240.474H548.867V230.713Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.7"))
+ ~d:(("M603.809 230.713H578.569V240.474H603.809V230.713Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M128.401 75.3592L130.479 36.0934L132.374 75.3592H128.401Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.477 36.4794V36.0934L128.461 75.3592H128.95C129.744 75.1386 130.539 74.8628 130.539 74.8628V36.4794H130.477Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.207 75.3592L136.285 36.0934L138.179 75.3592H134.207Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.283 36.4794V36.0934L134.266 75.3592H134.755C135.55 75.1386 136.344 74.8628 136.344 74.8628V36.4794H136.283Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.14 62.8405H125.712V73.7599H126.14V62.8405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M140.868 62.8405H140.318V73.7599H140.868V62.8405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M141.174 63.778H125.712V64.0538H141.174V63.778Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M144.107 73.3187H122.351V105.801H144.107V73.3187Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M125.284 75.3592H124.001V76.5173H125.284V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M128.157 75.3592H126.873V76.5173H128.157V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.968 75.3592H129.684V76.5173H130.968V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.84 75.3592H132.557V76.5173H133.84V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.713 75.3592H135.429V76.5173H136.713V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.585 75.3592H138.302V76.5173H139.585V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.457 75.3592H141.174V76.5173H142.457V75.3592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M125.284 77.2343H124.001V78.3924H125.284V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M128.157 77.2343H126.873V78.3924H128.157V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.968 77.2343H129.684V78.3924H130.968V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.84 77.2343H132.557V78.3924H133.84V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.713 77.2343H135.429V78.3924H136.713V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.585 77.2343H138.302V78.3924H139.585V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.457 77.2343H141.174V78.3924H142.457V77.2343Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 79.2747H124.001V79.8262H124.612V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 79.2747H126.506V79.8262H127.118V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 79.2747H129.073V79.8262H129.684V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 79.2747H131.64V79.8262H132.251V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 79.2747H134.146V79.8262H134.757V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 79.2747H136.712V79.8262H137.324V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 79.2747H139.279V79.8262H139.891V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 79.2747H141.785V79.8262H142.396V79.2747Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 81.7564H124.001V82.3079H124.612V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 81.7564H126.506V82.3079H127.118V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 81.7564H129.073V82.3079H129.684V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 81.7564H131.64V82.3079H132.251V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 81.7564H134.146V82.3079H134.757V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 81.7564H136.712V82.3079H137.324V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 81.7564H139.279V82.3079H139.891V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 81.7564H141.785V82.3079H142.396V81.7564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 84.2381H124.001V84.7896H124.612V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 84.2381H126.506V84.7896H127.118V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 84.2381H129.073V84.7896H129.684V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 84.2381H131.64V84.7896H132.251V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 84.2381H134.146V84.7896H134.757V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 84.2381H136.712V84.7896H137.324V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 84.2381H139.279V84.7896H139.891V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 84.2381H141.785V84.7896H142.396V84.2381Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 86.6647H124.001V87.2162H124.612V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 86.6647H126.506V87.2162H127.118V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 86.6647H129.073V87.2162H129.684V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 86.6647H131.64V87.2162H132.251V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 86.6647H134.146V87.2162H134.757V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 86.6647H136.712V87.2162H137.324V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 86.6647H139.279V87.2162H139.891V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 86.6647H141.785V87.2162H142.396V86.6647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 89.1464H124.001V89.6978H124.612V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 89.1464H126.506V89.6978H127.118V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 89.1464H129.073V89.6978H129.684V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 89.1464H131.64V89.6978H132.251V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 89.1464H134.146V89.6978H134.757V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 89.1464H136.712V89.6978H137.324V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 89.1464H139.279V89.6978H139.891V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 89.1464H141.785V89.6978H142.396V89.1464Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 91.628H124.001V92.1795H124.612V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 91.628H126.506V92.1795H127.118V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 91.628H129.073V92.1795H129.684V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M132.251 91.628H131.64V92.1795H132.251V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 91.628H134.146V92.1795H134.757V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 91.628H136.712V92.1795H137.324V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 91.628H139.279V92.1795H139.891V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 91.628H141.785V92.1795H142.396V91.628Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 94.1097H124.001V94.6612H124.612V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 94.1097H126.506V94.6612H127.118V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 94.1097H129.073V94.6612H129.684V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 94.1097H131.64V94.6612H132.251V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 94.1097H134.146V94.6612H134.757V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 94.1097H136.712V94.6612H137.324V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 94.1097H139.279V94.6612H139.891V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 94.1097H141.785V94.6612H142.396V94.1097Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 96.5914H124.001V97.1429H124.612V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 96.5914H126.506V97.1429H127.118V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 96.5914H129.073V97.1429H129.684V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 96.5914H131.64V97.1429H132.251V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 96.5914H134.146V97.1429H134.757V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 96.5914H136.712V97.1429H137.324V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 96.5914H139.279V97.1429H139.891V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 96.5914H141.785V97.1429H142.396V96.5914Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 99.0731H124.001V99.6246H124.612V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 99.0731H126.506V99.6246H127.118V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 99.0731H129.073V99.6246H129.684V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 99.0731H131.64V99.6246H132.251V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 99.0731H134.146V99.6246H134.757V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 99.0731H136.712V99.6246H137.324V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 99.0731H139.279V99.6246H139.891V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 99.0731H141.785V99.6246H142.396V99.0731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 101.279H124.001V101.831H124.612V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.118 101.279H126.506V101.831H127.118V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M129.684 101.279H129.073V101.831H129.684V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.251 101.279H131.64V101.831H132.251V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.757 101.279H134.146V101.831H134.757V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.324 101.279H136.712V101.831H137.324V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.891 101.279H139.279V101.831H139.891V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.396 101.279H141.785V101.831H142.396V101.279Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M144.107 99.1833H122.351V105.746H144.107V99.1833Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M301.904 94.7715H300.804V115.618H301.904V94.7715Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M304.349 94.7715H303.249V115.618H304.349V94.7715Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M319.261 94.7715H318.161V115.618H319.261V94.7715Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M321.766 94.7715H320.666V115.618H321.766V94.7715Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M326.167 114.184H297.26V121.353H326.167V114.184Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M323.722 119.588H299.521V120.305H323.722V119.588Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.707 197.348H201.066V176.833L265.419 167.016H274.831V197.789H278.987V177.494H291.332V149.148H341.751V177.053H354.829V148.1H390.887V158.358H400.787L408.671 169.718V178.818H417.777V166.575H458.54V240.143H133.535L139.707 197.348Z")
+ ) ~fill:(("#21477C"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M293.287 197.348H354.646V176.833L418.999 167.016H428.411V197.789H432.567V177.494L444.912 157.586V134.203H475.163V172.035H508.409V148.1H544.467V158.358H554.306L562.251 169.718V178.818H571.357V102.492H612.976V240.143H287.115L293.287 197.348Z")
+ ) ~fill:(("#1B3B68"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M425.172 241.522H401.887V106.077L425.172 102.878V241.522Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M401.887 140.269H396.204V241.522H401.887V140.269Z")
+ ) ~fill:(("#1073AA"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M432.872 109.937H425.172V241.522H432.872V109.937Z")
+ ) ~fill:(("#1073AA"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.226 105.47L405.371 105.581V121.298H406.226V105.47Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M410.993 104.809L410.138 104.919V121.298H410.993V104.809Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M408.61 105.139L407.754 105.25V121.298H408.61V105.139Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.377 104.478L412.46 104.588V121.298H413.377V104.478Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M415.76 104.147L414.844 104.257V121.298H415.76V104.147Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M419.61 103.595V121.298H420.527V103.485L419.61 103.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M418.144 103.816L417.227 103.926V121.298H418.144V103.816Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 128.081H403.415V129.129H406.043V128.081Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 128.081H406.96V129.129H409.588V128.081Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 128.081H410.504V129.129H413.132V128.081Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 128.081H414.049V129.129H416.677V128.081Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 128.081H417.594V129.129H420.222V128.081Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 128.081H421.138V129.129H423.766V128.081Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 130.453H403.415V131.501H406.043V130.453Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 130.453H406.96V131.501H409.588V130.453Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 130.453H410.504V131.501H413.132V130.453Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 130.453H414.049V131.501H416.677V130.453Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 130.453H417.594V131.501H420.222V130.453Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 130.453H421.138V131.501H423.766V130.453Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 132.824H403.415V133.872H406.043V132.824Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 132.824H406.96V133.872H409.588V132.824Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 132.824H410.504V133.872H413.132V132.824Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 132.824H414.049V133.872H416.677V132.824Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 132.824H417.594V133.872H420.222V132.824Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 132.824H421.138V133.872H423.766V132.824Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 135.14H403.415V136.188H406.043V135.14Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M409.588 135.14H406.96V136.188H409.588V135.14Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 135.14H410.504V136.188H413.132V135.14Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 135.14H414.049V136.188H416.677V135.14Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 135.14H417.594V136.188H420.222V135.14Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 135.14H421.138V136.188H423.766V135.14Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 137.512H403.415V138.56H406.043V137.512Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 137.512H406.96V138.56H409.588V137.512Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 137.512H410.504V138.56H413.132V137.512Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 137.512H414.049V138.56H416.677V137.512Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 137.512H417.594V138.56H420.222V137.512Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 137.512H421.138V138.56H423.766V137.512Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 139.828H403.415V140.876H406.043V139.828Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 139.828H406.96V140.876H409.588V139.828Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 139.828H410.504V140.876H413.132V139.828Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 139.828H414.049V140.876H416.677V139.828Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 139.828H417.594V140.876H420.222V139.828Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 139.828H421.138V140.876H423.766V139.828Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 142.199H403.415V143.247H406.043V142.199Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 142.199H406.96V143.247H409.588V142.199Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 142.199H410.504V143.247H413.132V142.199Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 142.199H414.049V143.247H416.677V142.199Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 142.199H417.594V143.247H420.222V142.199Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 142.199H421.138V143.247H423.766V142.199Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 144.571H403.415V145.619H406.043V144.571Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 144.571H406.96V145.619H409.588V144.571Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 144.571H410.504V145.619H413.132V144.571Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 144.571H414.049V145.619H416.677V144.571Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 144.571H417.594V145.619H420.222V144.571Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 144.571H421.138V145.619H423.766V144.571Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 146.887H403.415V147.935H406.043V146.887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 146.887H406.96V147.935H409.588V146.887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 146.887H410.504V147.935H413.132V146.887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 146.887H414.049V147.935H416.677V146.887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 146.887H417.594V147.935H420.222V146.887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 146.887H421.138V147.935H423.766V146.887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 149.258H403.415V150.306H406.043V149.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 149.258H406.96V150.306H409.588V149.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 149.258H410.504V150.306H413.132V149.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 149.258H414.049V150.306H416.677V149.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 149.258H417.594V150.306H420.222V149.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 149.258H421.138V150.306H423.766V149.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 151.575H403.415V152.622H406.043V151.575Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 151.575H406.96V152.622H409.588V151.575Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 151.575H410.504V152.622H413.132V151.575Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 151.575H414.049V152.622H416.677V151.575Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 151.575H417.594V152.622H420.222V151.575Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 151.575H421.138V152.622H423.766V151.575Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 153.946H403.415V154.994H406.043V153.946Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 153.946H406.96V154.994H409.588V153.946Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 153.946H410.504V154.994H413.132V153.946Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 153.946H414.049V154.994H416.677V153.946Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 153.946H417.594V154.994H420.222V153.946Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 153.946H421.138V154.994H423.766V153.946Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 163.321H403.415V164.369H406.043V163.321Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 163.321H406.96V164.369H409.588V163.321Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 163.321H410.504V164.369H413.132V163.321Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 163.321H414.049V164.369H416.677V163.321Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 163.321H417.594V164.369H420.222V163.321Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 163.321H421.138V164.369H423.766V163.321Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 165.693H403.415V166.74H406.043V165.693Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 165.693H406.96V166.74H409.588V165.693Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 165.693H410.504V166.74H413.132V165.693Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 165.693H414.049V166.74H416.677V165.693Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 165.693H417.594V166.74H420.222V165.693Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 165.693H421.138V166.74H423.766V165.693Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 168.064H403.415V169.112H406.043V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 168.064H406.96V169.112H409.588V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 168.064H410.504V169.112H413.132V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 168.064H414.049V169.112H416.677V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 168.064H417.594V169.112H420.222V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 168.064H421.138V169.112H423.766V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 170.38H403.415V171.428H406.043V170.38Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M409.588 170.38H406.96V171.428H409.588V170.38Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 170.38H410.504V171.428H413.132V170.38Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 170.38H414.049V171.428H416.677V170.38Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 170.38H417.594V171.428H420.222V170.38Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 170.38H421.138V171.428H423.766V170.38Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 172.752H403.415V173.799H406.043V172.752Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 172.752H406.96V173.799H409.588V172.752Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 172.752H410.504V173.799H413.132V172.752Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 172.752H414.049V173.799H416.677V172.752Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 172.752H417.594V173.799H420.222V172.752Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 172.752H421.138V173.799H423.766V172.752Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 175.068H403.415V176.116H406.043V175.068Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 175.068H406.96V176.116H409.588V175.068Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 175.068H410.504V176.116H413.132V175.068Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 175.068H414.049V176.116H416.677V175.068Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 175.068H417.594V176.116H420.222V175.068Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 175.068H421.138V176.116H423.766V175.068Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 177.439H403.415V178.487H406.043V177.439Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 177.439H406.96V178.487H409.588V177.439Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 177.439H410.504V178.487H413.132V177.439Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 177.439H414.049V178.487H416.677V177.439Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 177.439H417.594V178.487H420.222V177.439Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 177.439H421.138V178.487H423.766V177.439Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 179.811H403.415V180.858H406.043V179.811Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 179.811H406.96V180.858H409.588V179.811Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 179.811H410.504V180.858H413.132V179.811Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 179.811H414.049V180.858H416.677V179.811Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 179.811H417.594V180.858H420.222V179.811Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 179.811H421.138V180.858H423.766V179.811Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 182.127H403.415V183.175H406.043V182.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 182.127H406.96V183.175H409.588V182.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 182.127H410.504V183.175H413.132V182.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 182.127H414.049V183.175H416.677V182.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 182.127H417.594V183.175H420.222V182.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 182.127H421.138V183.175H423.766V182.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 184.498H403.415V185.546H406.043V184.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 184.498H406.96V185.546H409.588V184.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 184.498H410.504V185.546H413.132V184.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 184.498H414.049V185.546H416.677V184.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 184.498H417.594V185.546H420.222V184.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 184.498H421.138V185.546H423.766V184.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 186.87H403.415V187.917H406.043V186.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M409.588 186.87H406.96V187.917H409.588V186.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 186.87H410.504V187.917H413.132V186.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 186.87H414.049V187.917H416.677V186.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 186.87H417.594V187.917H420.222V186.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 186.87H421.138V187.917H423.766V186.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 189.186H403.415V190.234H406.043V189.186Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 189.186H406.96V190.234H409.588V189.186Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 189.186H410.504V190.234H413.132V189.186Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 189.186H414.049V190.234H416.677V189.186Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 189.186H417.594V190.234H420.222V189.186Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 189.186H421.138V190.234H423.766V189.186Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 191.557H403.415V192.605H406.043V191.557Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 191.557H406.96V192.605H409.588V191.557Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 191.557H410.504V192.605H413.132V191.557Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 191.557H414.049V192.605H416.677V191.557Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 191.557H417.594V192.605H420.222V191.557Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 191.557H421.138V192.605H423.766V191.557Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 193.874H403.415V194.921H406.043V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 193.874H406.96V194.921H409.588V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 193.874H410.504V194.921H413.132V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 193.874H414.049V194.921H416.677V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 193.874H417.594V194.921H420.222V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 193.874H421.138V194.921H423.766V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 203.304H403.415V204.352H406.043V203.304Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 203.304H406.96V204.352H409.588V203.304Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 203.304H410.504V204.352H413.132V203.304Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 203.304H414.049V204.352H416.677V203.304Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 203.304H417.594V204.352H420.222V203.304Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 203.304H421.138V204.352H423.766V203.304Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 205.62H403.415V206.668H406.043V205.62Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M409.588 205.62H406.96V206.668H409.588V205.62Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 205.62H410.504V206.668H413.132V205.62Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 205.62H414.049V206.668H416.677V205.62Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 205.62H417.594V206.668H420.222V205.62Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 205.62H421.138V206.668H423.766V205.62Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 207.992H403.415V209.039H406.043V207.992Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 207.992H406.96V209.039H409.588V207.992Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 207.992H410.504V209.039H413.132V207.992Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 207.992H414.049V209.039H416.677V207.992Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 207.992H417.594V209.039H420.222V207.992Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 207.992H421.138V209.039H423.766V207.992Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 210.363H403.415V211.411H406.043V210.363Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 210.363H406.96V211.411H409.588V210.363Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 210.363H410.504V211.411H413.132V210.363Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 210.363H414.049V211.411H416.677V210.363Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 210.363H417.594V211.411H420.222V210.363Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 210.363H421.138V211.411H423.766V210.363Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 212.679H403.415V213.727H406.043V212.679Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 212.679H406.96V213.727H409.588V212.679Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 212.679H410.504V213.727H413.132V212.679Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 212.679H414.049V213.727H416.677V212.679Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 212.679H417.594V213.727H420.222V212.679Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 212.679H421.138V213.727H423.766V212.679Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 215.051H403.415V216.098H406.043V215.051Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 215.051H406.96V216.098H409.588V215.051Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 215.051H410.504V216.098H413.132V215.051Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 215.051H414.049V216.098H416.677V215.051Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 215.051H417.594V216.098H420.222V215.051Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 215.051H421.138V216.098H423.766V215.051Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 217.367H403.415V218.415H406.043V217.367Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 217.367H406.96V218.415H409.588V217.367Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 217.367H410.504V218.415H413.132V217.367Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 217.367H414.049V218.415H416.677V217.367Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 217.367H417.594V218.415H420.222V217.367Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 217.367H421.138V218.415H423.766V217.367Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 219.738H403.415V220.786H406.043V219.738Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 219.738H406.96V220.786H409.588V219.738Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 219.738H410.504V220.786H413.132V219.738Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 219.738H414.049V220.786H416.677V219.738Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 219.738H417.594V220.786H420.222V219.738Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 219.738H421.138V220.786H423.766V219.738Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 222.11H403.415V223.157H406.043V222.11Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M409.588 222.11H406.96V223.157H409.588V222.11Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 222.11H410.504V223.157H413.132V222.11Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 222.11H414.049V223.157H416.677V222.11Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 222.11H417.594V223.157H420.222V222.11Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 222.11H421.138V223.157H423.766V222.11Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 224.426H403.415V225.474H406.043V224.426Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 224.426H406.96V225.474H409.588V224.426Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 224.426H410.504V225.474H413.132V224.426Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 224.426H414.049V225.474H416.677V224.426Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 224.426H417.594V225.474H420.222V224.426Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 224.426H421.138V225.474H423.766V224.426Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 226.797H403.415V227.845H406.043V226.797Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 226.797H406.96V227.845H409.588V226.797Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 226.797H410.504V227.845H413.132V226.797Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 226.797H414.049V227.845H416.677V226.797Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 226.797H417.594V227.845H420.222V226.797Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 226.797H421.138V227.845H423.766V226.797Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 229.113H403.415V230.161H406.043V229.113Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 229.113H406.96V230.161H409.588V229.113Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 229.113H410.504V230.161H413.132V229.113Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 229.113H414.049V230.161H416.677V229.113Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 229.113H417.594V230.161H420.222V229.113Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 229.113H421.138V230.161H423.766V229.113Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 231.485H403.415V232.533H406.043V231.485Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 231.485H406.96V232.533H409.588V231.485Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 231.485H410.504V232.533H413.132V231.485Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 231.485H414.049V232.533H416.677V231.485Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 231.485H417.594V232.533H420.222V231.485Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 231.485H421.138V232.533H423.766V231.485Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M406.043 233.856H403.415V234.904H406.043V233.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M409.588 233.856H406.96V234.904H409.588V233.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M413.132 233.856H410.504V234.904H413.132V233.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M416.677 233.856H414.049V234.904H416.677V233.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M420.222 233.856H417.594V234.904H420.222V233.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M423.766 233.856H421.138V234.904H423.766V233.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.4"))
+ ~d:(("M425.172 227.955H401.887V240.474H425.172V227.955Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M326.044 212.128H250.141V241.246H326.044V212.128Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M263.83 213.286H252.157V214.72H263.83V213.286Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M276.42 213.286H264.747V214.72H276.42V213.286Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 213.286H277.153V214.72H288.826V213.286Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M263.83 215.878H252.157V217.312H263.83V215.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M276.42 215.878H264.747V217.312H276.42V215.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 215.878H277.153V217.312H288.826V215.878Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M263.83 218.47H252.157V219.904H263.83V218.47Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M276.42 218.47H264.747V219.904H276.42V218.47Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 218.47H277.153V219.904H288.826V218.47Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M263.83 221.062H252.157V222.496H263.83V221.062Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M276.42 221.062H264.747V222.496H276.42V221.062Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 221.062H277.153V222.496H288.826V221.062Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M263.83 223.654H252.157V225.088H263.83V223.654Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M276.42 223.654H264.747V225.088H276.42V223.654Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 223.654H277.153V225.088H288.826V223.654Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M263.83 226.191H252.157V227.624H263.83V226.191Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M276.42 226.191H264.747V227.624H276.42V226.191Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 226.191H277.153V227.624H288.826V226.191Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M263.83 228.783H252.157V230.216H263.83V228.783Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M276.42 228.783H264.747V230.216H276.42V228.783Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 228.783H277.153V230.216H288.826V228.783Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M263.83 231.375H252.157V232.808H263.83V231.375Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M276.42 231.375H264.747V232.808H276.42V231.375Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M288.826 231.375H277.153V232.808H288.826V231.375Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M360.818 199.664H358.191V209.701H360.818V199.664Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M360.818 199.664H358.191V205.014H360.818V199.664Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M368.274 209.701H334.173V215.216H368.274V209.701Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M375.303 215.216H334.173V240.915H375.303V215.216Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M331.606 121.298H291.759V240.253H331.606V121.298Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M299.582 126.592H294.143V127.254H299.582V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M305.449 126.592H300.01V127.254H305.449V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M311.194 126.592H305.754V127.254H311.194V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M316.938 126.592H311.499V127.254H316.938V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.866 126.592H317.427V127.254H322.866V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M328.611 126.592H323.172V127.254H328.611V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M299.582 129.46H294.143V130.122H299.582V129.46Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M305.449 129.46H300.01V130.122H305.449V129.46Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M311.194 129.46H305.755V130.122H311.194V129.46Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M316.938 129.46H311.499V130.122H316.938V129.46Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.866 129.46H317.427V130.122H322.866V129.46Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M328.611 129.46H323.172V130.122H328.611V129.46Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M299.582 132.383H294.143V133.045H299.582V132.383Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M305.449 132.383H300.01V133.045H305.449V132.383Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M311.194 132.383H305.755V133.045H311.194V132.383Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M316.938 132.383H311.499V133.045H316.938V132.383Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.866 132.383H317.427V133.045H322.866V132.383Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M328.611 132.383H323.172V133.045H328.611V132.383Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M299.582 135.251H294.143V135.912H299.582V135.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M305.449 135.251H300.01V135.912H305.449V135.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M311.194 135.251H305.755V135.912H311.194V135.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M316.938 135.251H311.499V135.912H316.938V135.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.866 135.251H317.427V135.912H322.866V135.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M328.611 135.251H323.172V135.912H328.611V135.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M299.582 138.173H294.143V138.835H299.582V138.173Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M305.449 138.173H300.01V138.835H305.449V138.173Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M311.194 138.173H305.755V138.835H311.194V138.173Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M316.938 138.173H311.499V138.835H316.938V138.173Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.866 138.173H317.427V138.835H322.866V138.173Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M328.611 138.173H323.172V138.835H328.611V138.173Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M299.582 141.041H294.143V141.703H299.582V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M305.449 141.041H300.01V141.703H305.449V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M311.194 141.041H305.755V141.703H311.194V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M316.938 141.041H311.499V141.703H316.938V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.866 141.041H317.427V141.703H322.866V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M328.611 141.041H323.172V141.703H328.611V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 144.626H294.143V145.288H298.237V144.626Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 144.626H303.921V145.288H308.016V144.626Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 144.626H313.699V145.288H317.794V144.626Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 144.626H323.478V145.288H327.572V144.626Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 148.045H299.032V148.707H303.127V148.045Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 148.045H308.81V148.707H312.905V148.045Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 148.045H318.589V148.707H322.683V148.045Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 151.519H294.143V152.181H298.237V151.519Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 151.519H303.921V152.181H308.016V151.519Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 151.519H313.699V152.181H317.794V151.519Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 151.519H323.478V152.181H327.572V151.519Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 154.883H299.032V155.545H303.127V154.883Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 154.883H308.81V155.545H312.905V154.883Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 154.883H318.589V155.545H322.683V154.883Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 158.413H294.143V159.075H298.237V158.413Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 158.413H303.921V159.075H308.016V158.413Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 158.413H313.699V159.075H317.794V158.413Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 158.413H323.478V159.075H327.572V158.413Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 161.777H299.032V162.439H303.127V161.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 161.777H308.81V162.439H312.905V161.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 161.777H318.589V162.439H322.683V161.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 165.307H294.143V165.968H298.237V165.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 165.307H303.921V165.968H308.016V165.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 165.307H313.699V165.968H317.794V165.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 165.307H323.478V165.968H327.572V165.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 168.671H299.032V169.332H303.127V168.671Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 168.671H308.81V169.332H312.905V168.671Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 168.671H318.589V169.332H322.683V168.671Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M298.237 172.2H294.143V172.862H298.237V172.2Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M308.016 172.2H303.921V172.862H308.016V172.2Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M317.794 172.2H313.699V172.862H317.794V172.2Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M327.572 172.2H323.478V172.862H327.572V172.2Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 175.564H299.032V176.226H303.127V175.564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 175.564H308.81V176.226H312.905V175.564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 175.564H318.589V176.226H322.683V175.564Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 179.094H294.143V179.755H298.237V179.094Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 179.094H303.921V179.755H308.016V179.094Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 179.094H313.699V179.755H317.794V179.094Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 179.094H323.478V179.755H327.572V179.094Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 182.458H299.032V183.12H303.127V182.458Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 182.458H308.81V183.12H312.905V182.458Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 182.458H318.589V183.12H322.683V182.458Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 185.932H294.143V186.594H298.237V185.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 185.932H303.921V186.594H308.016V185.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 185.932H313.699V186.594H317.794V185.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 185.932H323.478V186.594H327.572V185.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 189.351H299.032V190.013H303.127V189.351Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 189.351H308.81V190.013H312.905V189.351Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 189.351H318.589V190.013H322.683V189.351Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 192.826H294.143V193.487H298.237V192.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 192.826H303.921V193.487H308.016V192.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 192.826H313.699V193.487H317.794V192.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 192.826H323.478V193.487H327.572V192.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 196.245H299.032V196.907H303.127V196.245Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 196.245H308.81V196.907H312.905V196.245Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 196.245H318.589V196.907H322.683V196.245Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 199.719H294.143V200.381H298.237V199.719Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 199.719H303.921V200.381H308.016V199.719Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 199.719H313.699V200.381H317.794V199.719Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 199.719H323.478V200.381H327.572V199.719Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 203.138H299.032V203.8H303.127V203.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M312.905 203.138H308.81V203.8H312.905V203.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 203.138H318.589V203.8H322.683V203.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 206.613H294.143V207.275H298.237V206.613Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 206.613H303.921V207.275H308.016V206.613Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 206.613H313.699V207.275H317.794V206.613Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 206.613H323.478V207.275H327.572V206.613Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 209.977H299.032V210.639H303.127V209.977Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 209.977H308.81V210.639H312.905V209.977Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 209.977H318.589V210.639H322.683V209.977Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 213.506H294.143V214.168H298.237V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 213.506H303.921V214.168H308.016V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 213.506H313.699V214.168H317.794V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 213.506H323.478V214.168H327.572V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 216.87H299.032V217.532H303.127V216.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M312.905 216.87H308.81V217.532H312.905V216.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 216.87H318.589V217.532H322.683V216.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M298.237 220.4H294.143V221.062H298.237V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M308.016 220.4H303.921V221.062H308.016V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M317.794 220.4H313.699V221.062H317.794V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M327.572 220.4H323.478V221.062H327.572V220.4Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 223.764H299.032V224.426H303.127V223.764Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 223.764H308.81V224.426H312.905V223.764Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 223.764H318.589V224.426H322.683V223.764Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M298.237 227.294H294.143V227.955H298.237V227.294Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M308.016 227.294H303.921V227.955H308.016V227.294Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M317.794 227.294H313.699V227.955H317.794V227.294Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M327.572 227.294H323.478V227.955H327.572V227.294Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M303.127 230.658H299.032V231.319H303.127V230.658Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M312.905 230.658H308.81V231.319H312.905V230.658Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M322.683 230.658H318.589V231.319H322.683V230.658Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M199.538 188.303H169.531V240.695H199.538V188.303Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 191.226H171.181V194.26H178.087V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 191.226H179.92V194.26H186.826V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 191.226H188.66V194.26H195.566V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 195.638H171.181V198.671H178.087V195.638Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 195.638H179.92V198.671H186.826V195.638Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 195.638H188.66V198.671H195.566V195.638Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 200.05H171.181V203.083H178.087V200.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M186.826 200.05H179.92V203.083H186.826V200.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M195.566 200.05H188.66V203.083H195.566V200.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 204.407H171.181V207.44H178.087V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 204.407H179.92V207.44H186.826V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 204.407H188.66V207.44H195.566V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 208.819H171.181V211.852H178.087V208.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 208.819H179.92V211.852H186.826V208.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 208.819H188.66V211.852H195.566V208.819Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 213.231H171.181V216.264H178.087V213.231Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 213.231H179.92V216.264H186.826V213.231Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 213.231H188.66V216.264H195.566V213.231Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M178.087 217.587H171.181V220.621H178.087V217.587Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M186.826 217.587H179.92V220.621H186.826V217.587Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M195.566 217.587H188.66V220.621H195.566V217.587Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M160.914 175.84L169.653 185.546V244.555H160.914V175.84Z")
+ ) ~fill:(("#1474AA"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M163.542 242.57V178.763L160.914 175.895V241.963C162.014 242.349 163.542 242.57 163.542 242.57Z")
+ ) ~fill:(("#1474AA"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M154.191 108.448H144.046V146.335H154.191V108.448Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M151.991 108.448H144.046V146.335H151.991V108.448Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M160.914 146.335H112.206V244.555H160.914V146.335Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.835 105.746H117.462V146.335H147.835V105.746Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 115.066H119.539V116.059H120.639V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 115.066H122.656V116.059H123.756V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 115.066H125.773V116.059H126.873V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 115.066H128.951V116.059H130.051V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 115.066H132.068V116.059H133.168V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 115.066H135.185V116.059H136.285V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 115.066H138.363V116.059H139.463V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 115.066H141.479V116.059H142.58V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 115.066H144.596V116.059H145.696V115.066Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 118.871H119.539V119.864H120.639V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 118.871H122.656V119.864H123.756V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 118.871H125.773V119.864H126.873V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 118.871H128.951V119.864H130.051V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 118.871H132.068V119.864H133.168V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 118.871H135.185V119.864H136.285V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 118.871H138.363V119.864H139.463V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 118.871H141.479V119.864H142.58V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 118.871H144.596V119.864H145.696V118.871Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 122.732H119.539V123.724H120.639V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 122.732H122.656V123.724H123.756V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 122.732H125.773V123.724H126.873V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 122.732H128.951V123.724H130.051V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 122.732H132.068V123.724H133.168V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 122.732H135.185V123.724H136.285V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 122.732H138.363V123.724H139.463V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 122.732H141.479V123.724H142.58V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 122.732H144.596V123.724H145.696V122.732Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 126.592H119.539V127.585H120.639V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 126.592H122.656V127.585H123.756V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 126.592H125.773V127.585H126.873V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 126.592H128.951V127.585H130.051V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 126.592H132.068V127.585H133.168V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 126.592H135.185V127.585H136.285V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 126.592H138.363V127.585H139.463V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 126.592H141.479V127.585H142.58V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 126.592H144.596V127.585H145.696V126.592Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 130.397H119.539V131.39H120.639V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 130.397H122.656V131.39H123.756V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 130.397H125.773V131.39H126.873V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 130.397H128.951V131.39H130.051V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 130.397H132.068V131.39H133.168V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 130.397H135.185V131.39H136.285V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 130.397H138.363V131.39H139.463V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M142.58 130.397H141.479V131.39H142.58V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 130.397H144.596V131.39H145.696V130.397Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 134.258H119.539V135.251H120.639V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 134.258H122.656V135.251H123.756V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 134.258H125.773V135.251H126.873V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 134.258H128.951V135.251H130.051V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 134.258H132.068V135.251H133.168V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 134.258H135.185V135.251H136.285V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 134.258H138.363V135.251H139.463V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 134.258H141.479V135.251H142.58V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 134.258H144.596V135.251H145.696V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 138.063H119.539V139.056H120.639V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 138.063H122.656V139.056H123.756V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 138.063H125.773V139.056H126.873V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 138.063H128.951V139.056H130.051V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 138.063H132.068V139.056H133.168V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 138.063H135.185V139.056H136.285V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 138.063H138.363V139.056H139.463V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 138.063H141.479V139.056H142.58V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 138.063H144.596V139.056H145.696V138.063Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M120.639 141.924H119.539V142.916H120.639V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.756 141.924H122.656V142.916H123.756V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.873 141.924H125.773V142.916H126.873V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.051 141.924H128.951V142.916H130.051V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.168 141.924H132.068V142.916H133.168V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.285 141.924H135.185V142.916H136.285V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M139.463 141.924H138.363V142.916H139.463V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.58 141.924H141.479V142.916H142.58V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 141.924H144.596V142.916H145.696V141.924Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M121.556 109.496H119.539V111.316H121.556V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.94 109.496H121.923V111.316H123.94V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.384 109.496H124.367V111.316H126.384V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M128.768 109.496H126.751V111.316H128.768V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M131.212 109.496H129.195V111.316H131.212V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.596 109.496H131.579V111.316H133.596V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.04 109.496H134.023V111.316H136.04V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M138.424 109.496H136.407V111.316H138.424V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M140.868 109.496H138.851V111.316H140.868V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M143.252 109.496H141.235V111.316H143.252V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 109.496H143.68V111.316H145.696V109.496Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M121.556 111.647H119.539V113.467H121.556V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M123.94 111.647H121.923V113.467H123.94V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.384 111.647H124.367V113.467H126.384V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M128.768 111.647H126.751V113.467H128.768V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M131.212 111.647H129.195V113.467H131.212V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M133.596 111.647H131.579V113.467H133.596V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M136.04 111.647H134.023V113.467H136.04V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M138.424 111.647H136.407V113.467H138.424V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M140.868 111.647H138.851V113.467H140.868V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M143.252 111.647H141.235V113.467H143.252V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 111.647H143.68V113.467H145.696V111.647Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M117.462 141.317V146.335H144.046H147.835H154.191V141.317H117.462Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M118.928 150.968H115.689V153.891H118.928V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.778 150.968H119.539V153.891H122.778V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.568 150.968H123.328V153.891H126.568V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.418 150.968H127.179V153.891H130.418V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.268 150.968H131.029V153.891H134.268V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M138.057 150.968H134.818V153.891H138.057V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M141.907 150.968H138.668V153.891H141.907V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 150.968H142.457V153.891H145.696V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M149.546 150.968H146.307V153.891H149.546V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M153.336 150.968H150.097V153.891H153.336V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.186 150.968H153.947V153.891H157.186V150.968Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M118.928 154.442H115.689V157.365H118.928V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 158.634H115.689V160.178H117.4V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 158.634H120.701V160.178H122.412V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 158.634H125.651V160.178H127.362V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 158.634H130.601V160.178H132.312V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 158.634H135.551V160.178H137.263V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 158.634H140.563V160.178H142.274V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 158.634H145.513V160.178H147.224V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 158.634H150.463V160.178H152.174V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 158.634H155.414V160.178H157.125V158.634Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 162.273H115.689V163.818H117.4V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 162.273H120.701V163.818H122.412V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 162.273H125.651V163.818H127.362V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 162.273H130.601V163.818H132.312V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 162.273H135.551V163.818H137.263V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 162.273H140.563V163.818H142.274V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 162.273H145.513V163.818H147.224V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 162.273H150.463V163.818H152.174V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 162.273H155.414V163.818H157.125V162.273Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 165.913H115.689V167.457H117.4V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 165.913H120.701V167.457H122.412V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 165.913H125.651V167.457H127.362V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 165.913H130.601V167.457H132.312V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 165.913H135.551V167.457H137.263V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 165.913H140.563V167.457H142.274V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 165.913H145.513V167.457H147.224V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 165.913H150.463V167.457H152.174V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 165.913H155.414V167.457H157.125V165.913Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 169.498H115.689V171.042H117.4V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 169.498H120.701V171.042H122.412V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 169.498H125.651V171.042H127.362V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 169.498H130.601V171.042H132.312V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 169.498H135.551V171.042H137.263V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 169.498H140.563V171.042H142.274V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 169.498H145.513V171.042H147.224V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 169.498H150.463V171.042H152.174V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 169.498H155.414V171.042H157.125V169.498Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 173.138H115.689V174.682H117.4V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 173.138H120.701V174.682H122.412V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 173.138H125.651V174.682H127.362V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 173.138H130.601V174.682H132.312V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 173.138H135.551V174.682H137.263V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 173.138H140.563V174.682H142.274V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 173.138H145.513V174.682H147.224V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 173.138H150.463V174.682H152.174V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 173.138H155.414V174.682H157.125V173.138Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 176.777H115.689V178.322H117.4V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 176.777H120.701V178.322H122.412V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 176.777H125.651V178.322H127.362V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 176.777H130.601V178.322H132.312V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 176.777H135.551V178.322H137.263V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 176.777H140.563V178.322H142.274V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 176.777H145.513V178.322H147.224V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 176.777H150.463V178.322H152.174V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 176.777H155.414V178.322H157.125V176.777Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 180.362H115.689V181.906H117.4V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 180.362H120.701V181.906H122.412V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 180.362H125.651V181.906H127.362V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 180.362H130.601V181.906H132.312V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 180.362H135.551V181.906H137.263V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 180.362H140.563V181.906H142.274V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 180.362H145.513V181.906H147.224V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 180.362H150.463V181.906H152.174V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 180.362H155.414V181.906H157.125V180.362Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 184.002H115.689V185.546H117.4V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 184.002H120.701V185.546H122.412V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 184.002H125.651V185.546H127.362V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 184.002H130.601V185.546H132.312V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 184.002H135.551V185.546H137.263V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 184.002H140.563V185.546H142.274V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 184.002H145.513V185.546H147.224V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 184.002H150.463V185.546H152.174V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 184.002H155.414V185.546H157.125V184.002Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 187.642H115.689V189.186H117.4V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 187.642H120.701V189.186H122.412V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 187.642H125.651V189.186H127.362V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 187.642H130.601V189.186H132.312V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 187.642H135.551V189.186H137.263V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 187.642H140.563V189.186H142.274V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 187.642H145.513V189.186H147.224V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 187.642H150.463V189.186H152.174V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 187.642H155.414V189.186H157.125V187.642Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 191.226H115.689V192.771H117.4V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 191.226H120.701V192.771H122.412V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 191.226H125.651V192.771H127.362V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 191.226H130.601V192.771H132.312V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 191.226H135.551V192.771H137.263V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 191.226H140.563V192.771H142.274V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 191.226H145.513V192.771H147.224V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 191.226H150.463V192.771H152.174V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 191.226H155.414V192.771H157.125V191.226Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 194.866H115.689V196.41H117.4V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 194.866H120.701V196.41H122.412V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 194.866H125.651V196.41H127.362V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 194.866H130.601V196.41H132.312V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 194.866H135.551V196.41H137.263V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 194.866H140.563V196.41H142.274V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 194.866H145.513V196.41H147.224V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 194.866H150.463V196.41H152.174V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 194.866H155.414V196.41H157.125V194.866Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 198.506H115.689V200.05H117.4V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 198.506H120.701V200.05H122.412V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 198.506H125.651V200.05H127.362V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 198.506H130.601V200.05H132.312V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 198.506H135.551V200.05H137.263V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 198.506H140.563V200.05H142.274V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 198.506H145.513V200.05H147.224V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 198.506H150.463V200.05H152.174V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 198.506H155.414V200.05H157.125V198.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 202.091H115.689V203.635H117.4V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 202.091H120.701V203.635H122.412V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 202.091H125.651V203.635H127.362V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 202.091H130.601V203.635H132.312V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 202.091H135.551V203.635H137.263V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 202.091H140.563V203.635H142.274V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 202.091H145.513V203.635H147.224V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 202.091H150.463V203.635H152.174V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 202.091H155.414V203.635H157.125V202.091Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 205.73H115.689V207.275H117.4V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 205.73H120.701V207.275H122.412V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 205.73H125.651V207.275H127.362V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 205.73H130.601V207.275H132.312V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 205.73H135.551V207.275H137.263V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 205.73H140.563V207.275H142.274V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 205.73H145.513V207.275H147.224V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 205.73H150.463V207.275H152.174V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 205.73H155.414V207.275H157.125V205.73Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 209.37H115.689V210.914H117.4V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 209.37H120.701V210.914H122.412V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 209.37H125.651V210.914H127.362V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 209.37H130.601V210.914H132.312V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 209.37H135.551V210.914H137.263V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 209.37H140.563V210.914H142.274V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 209.37H145.513V210.914H147.224V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 209.37H150.463V210.914H152.174V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 209.37H155.414V210.914H157.125V209.37Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 212.955H115.689V214.499H117.4V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 212.955H120.701V214.499H122.412V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 212.955H125.651V214.499H127.362V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 212.955H130.601V214.499H132.312V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 212.955H135.551V214.499H137.263V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 212.955H140.563V214.499H142.274V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 212.955H145.513V214.499H147.224V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 212.955H150.463V214.499H152.174V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 212.955H155.414V214.499H157.125V212.955Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M117.4 216.595H115.689V218.139H117.4V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.412 216.595H120.701V218.139H122.412V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M127.362 216.595H125.651V218.139H127.362V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M132.312 216.595H130.601V218.139H132.312V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M137.263 216.595H135.551V218.139H137.263V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M142.274 216.595H140.563V218.139H142.274V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M147.224 216.595H145.513V218.139H147.224V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M152.174 216.595H150.463V218.139H152.174V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.125 216.595H155.414V218.139H157.125V216.595Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M122.778 154.442H119.539V157.365H122.778V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M126.568 154.442H123.328V157.365H126.568V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M130.418 154.442H127.179V157.365H130.418V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M134.268 154.442H131.029V157.365H134.268V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M138.057 154.442H134.818V157.365H138.057V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M141.907 154.442H138.668V157.365H141.907V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M145.696 154.442H142.457V157.365H145.696V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M149.546 154.442H146.307V157.365H149.546V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M153.336 154.442H150.097V157.365H153.336V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M157.186 154.442H153.947V157.365H157.186V154.442Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M509.998 178.597H476.08V240.474H509.998V178.597Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M250.141 150.968H205.588V241.246H250.141V150.968Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 157.475H209.377V159.351H213.594V157.475Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 160.288H216.161V162.163H220.378V160.288Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 160.288H233.151V162.163H237.368V160.288Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 157.475H224.534V159.351H228.751V157.475Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 157.475H241.646V159.351H245.863V157.475Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 164.204H209.377V166.079H213.594V164.204Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 167.016H216.161V168.891H220.378V167.016Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 167.016H233.151V168.891H237.368V167.016Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 164.204H224.534V166.079H228.751V164.204Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 164.204H241.646V166.079H245.863V164.204Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 170.932H209.377V172.807H213.594V170.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 173.689H216.161V175.564H220.378V173.689Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 173.689H233.151V175.564H237.368V173.689Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 170.932H224.534V172.807H228.751V170.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 170.932H241.646V172.807H245.863V170.932Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 177.605H209.377V179.48H213.594V177.605Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 180.417H216.161V182.292H220.378V180.417Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 180.417H233.151V182.292H237.368V180.417Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 177.605H224.534V179.48H228.751V177.605Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 177.605H241.646V179.48H245.863V177.605Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 184.333H209.377V186.208H213.594V184.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 187.145H216.161V189.02H220.378V187.145Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 187.145H233.151V189.02H237.368V187.145Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 184.333H224.534V186.208H228.751V184.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 184.333H241.646V186.208H245.863V184.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 191.061H209.377V192.936H213.594V191.061Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 193.874H216.161V195.749H220.378V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 193.874H233.151V195.749H237.368V193.874Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 191.061H224.534V192.936H228.751V191.061Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 191.061H241.646V192.936H245.863V191.061Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 197.789H209.377V199.664H213.594V197.789Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 200.602H216.161V202.477H220.378V200.602Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 200.602H233.151V202.477H237.368V200.602Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 197.789H224.534V199.664H228.751V197.789Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 197.789H241.646V199.664H245.863V197.789Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 204.517H209.377V206.392H213.594V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 207.33H216.161V209.205H220.378V207.33Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 207.33H233.151V209.205H237.368V207.33Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 204.517H224.534V206.392H228.751V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 204.517H241.646V206.392H245.863V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 211.245H209.377V213.12H213.594V211.245Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 214.003H216.161V215.878H220.378V214.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 214.003H233.151V215.878H237.368V214.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 211.245H224.534V213.12H228.751V211.245Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 211.245H241.646V213.12H245.863V211.245Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 217.918H209.377V219.793H213.594V217.918Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 220.731H216.161V222.606H220.378V220.731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 220.731H233.151V222.606H237.368V220.731Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 217.918H224.534V219.793H228.751V217.918Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 217.918H241.646V219.793H245.863V217.918Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M213.594 224.646H209.377V226.521H213.594V224.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M220.378 227.459H216.161V229.334H220.378V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M237.368 227.459H233.151V229.334H237.368V227.459Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M228.751 224.646H224.534V226.521H228.751V224.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 224.646H241.646V226.521H245.863V224.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M476.141 192.55H432.628V241.081H476.141V192.55Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M465.996 185.27H441.306V192.55H465.996V185.27Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M465.996 184.057H441.306V185.27H465.996V184.057Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M465.996 178.873H441.306V180.086H465.996V178.873Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.163 180.086H442.406V184.057H464.163V180.086Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.163 180.086H442.406V180.858H464.163V180.086Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M442.406 178.873L448.884 165.527H458.051L464.957 178.873H442.406Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M531.877 158.964L514.765 181.741V240.364H531.877H549.05V181.741L531.877 158.964Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 189.241H514.765V189.958H549.05V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 190.785H514.765V191.502H549.05V190.785Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 192.385H514.765V193.101H549.05V192.385Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 193.984H514.765V194.701H549.05V193.984Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 195.528H514.765V196.245H549.05V195.528Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 197.127H514.765V197.844H549.05V197.127Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 198.671H514.765V199.388H549.05V198.671Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 200.271H514.765V200.988H549.05V200.271Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 201.87H514.765V202.587H549.05V201.87Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 203.414H514.765V204.131H549.05V203.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 205.014H514.765V205.73H549.05V205.014Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 206.613H514.765V207.33H549.05V206.613Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 208.157H514.765V208.874H549.05V208.157Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 209.756H514.765V210.473H549.05V209.756Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 211.3H514.765V212.017H549.05V211.3Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 212.9H514.765V213.617H549.05V212.9Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 214.499H514.765V215.216H549.05V214.499Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 216.043H514.765V216.76H549.05V216.043Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 217.643H514.765V218.36H549.05V217.643Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 219.187H514.765V219.904H549.05V219.187Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 220.786H514.765V221.503H549.05V220.786Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 222.385H514.765V223.102H549.05V222.385Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M549.05 223.93H514.765V224.646H549.05V223.93Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 225.529H514.765V226.246H549.05V225.529Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 227.128H514.765V227.845H549.05V227.128Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 228.672H514.765V229.389H549.05V228.672Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 230.272H514.765V230.989H549.05V230.272Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 231.816H514.765V232.533H549.05V231.816Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 233.415H514.765V234.132H549.05V233.415Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 235.014H514.765V235.731H549.05V235.014Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 236.559H514.765V237.275H549.05V236.559Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M549.05 181.741L531.877 158.964L514.765 181.741L530.594 206.282L549.05 181.741Z")
+ ) ~fill:(("#1073AA"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M578.568 240.75V162.218L590.975 141.593L603.747 160.84V240.75H578.568Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M587.552 161.832H594.825L591.036 143.026L587.552 161.832Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M635.894 120.802H605.581V241.081H635.894V120.802Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M182.059 188.303H169.531V240.695H182.059V188.303Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M250.141 150.968H205.588V153.064H250.141V150.968Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M252.096 147.99H203.572V151.74H252.096V147.99Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.4"))
+ ~d:(("M228.751 241.246V147.99H203.572V151.685H205.588V241.246H228.751Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M331.606 121.298H291.759V124.166H331.606V121.298Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M336.801 199.664H334.173V209.701H336.801V199.664Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M336.801 199.664H334.173V205.014H336.801V199.664Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M338.94 211.962H337.106V213.672H338.94V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M343.34 211.962H341.506V213.672H343.34V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M347.801 211.962H345.968V213.672H347.801V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M352.201 211.962H350.368V213.672H352.201V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M356.602 211.962H354.768V213.672H356.602V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M361.002 211.962H359.168V213.672H361.002V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M365.402 211.962H363.569V213.672H365.402V211.962Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M339.795 217.146H337.473V219.242H339.795V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M345.234 217.146H342.912V219.242H345.234V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M350.673 217.146H348.351V219.242H350.673V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M356.052 217.146H353.79V219.242H356.052V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M361.552 217.146H359.229V219.242H361.552V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M366.991 217.146H364.669V219.242H366.991V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M372.43 217.146H370.108V219.242H372.43V217.146Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 181.52H478.769V183.175H480.175V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M483.536 181.52H482.13V183.175H483.536V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M486.836 181.52H485.43V183.175H486.836V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 181.52H488.792V183.175H490.197V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 181.52H492.092V183.175H493.498V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 181.52H495.453V183.175H496.859V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M500.22 181.52H498.814V183.175H500.22V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 181.52H501.931V183.175H503.337V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 181.52H506.087V183.175H507.493V181.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 184.774H478.769V186.428H480.175V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 184.774H482.13V186.428H483.536V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 184.774H485.43V186.428H486.836V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 184.774H488.792V186.428H490.197V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 184.774H492.092V186.428H493.498V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 184.774H495.453V186.428H496.859V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 184.774H498.814V186.428H500.22V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 184.774H501.931V186.428H503.337V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 184.774H506.087V186.428H507.493V184.774Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 188.083H478.769V189.737H480.175V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 188.083H482.13V189.737H483.536V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 188.083H485.43V189.737H486.836V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 188.083H488.792V189.737H490.197V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 188.083H492.092V189.737H493.498V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 188.083H495.453V189.737H496.859V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 188.083H498.814V189.737H500.22V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 188.083H501.931V189.737H503.337V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 188.083H506.087V189.737H507.493V188.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 191.392H478.769V193.046H480.175V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 191.392H482.13V193.046H483.536V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 191.392H485.43V193.046H486.836V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 191.392H488.792V193.046H490.197V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 191.392H492.092V193.046H493.498V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 191.392H495.453V193.046H496.859V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 191.392H498.814V193.046H500.22V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 191.392H501.931V193.046H503.337V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 191.392H506.087V193.046H507.493V191.392Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 194.646H478.769V196.3H480.175V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M483.536 194.646H482.13V196.3H483.536V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M486.836 194.646H485.43V196.3H486.836V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 194.646H488.792V196.3H490.197V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 194.646H492.092V196.3H493.498V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 194.646H495.453V196.3H496.859V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M500.22 194.646H498.814V196.3H500.22V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 194.646H501.931V196.3H503.337V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 194.646H506.087V196.3H507.493V194.646Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 197.954H478.769V199.609H480.175V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 197.954H482.13V199.609H483.536V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 197.954H485.43V199.609H486.836V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 197.954H488.792V199.609H490.197V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 197.954H492.092V199.609H493.498V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 197.954H495.453V199.609H496.859V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 197.954H498.814V199.609H500.22V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 197.954H501.931V199.609H503.337V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 197.954H506.087V199.609H507.493V197.954Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 201.263H478.769V202.918H480.175V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 201.263H482.13V202.918H483.536V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 201.263H485.43V202.918H486.836V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 201.263H488.792V202.918H490.197V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 201.263H492.092V202.918H493.498V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 201.263H495.453V202.918H496.859V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 201.263H498.814V202.918H500.22V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 201.263H501.931V202.918H503.337V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 201.263H506.087V202.918H507.493V201.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 204.517H478.769V206.172H480.175V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 204.517H482.13V206.172H483.536V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 204.517H485.43V206.172H486.836V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 204.517H488.792V206.172H490.197V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 204.517H492.092V206.172H493.498V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 204.517H495.453V206.172H496.859V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 204.517H498.814V206.172H500.22V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 204.517H501.931V206.172H503.337V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 204.517H506.087V206.172H507.493V204.517Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 207.826H478.769V209.481H480.175V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 207.826H482.13V209.481H483.536V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 207.826H485.43V209.481H486.836V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 207.826H488.792V209.481H490.197V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 207.826H492.092V209.481H493.498V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 207.826H495.453V209.481H496.859V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 207.826H498.814V209.481H500.22V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 207.826H501.931V209.481H503.337V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 207.826H506.087V209.481H507.493V207.826Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 211.135H478.769V212.789H480.175V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 211.135H482.13V212.789H483.536V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 211.135H485.43V212.789H486.836V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 211.135H488.792V212.789H490.197V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 211.135H492.092V212.789H493.498V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 211.135H495.453V212.789H496.859V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 211.135H498.814V212.789H500.22V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 211.135H501.931V212.789H503.337V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 211.135H506.087V212.789H507.493V211.135Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 214.389H478.769V216.043H480.175V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 214.389H482.13V216.043H483.536V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 214.389H485.43V216.043H486.836V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 214.389H488.792V216.043H490.197V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 214.389H492.092V216.043H493.498V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 214.389H495.453V216.043H496.859V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 214.389H498.814V216.043H500.22V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 214.389H501.931V216.043H503.337V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 214.389H506.087V216.043H507.493V214.389Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M480.175 217.698H478.769V219.352H480.175V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M483.536 217.698H482.13V219.352H483.536V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.836 217.698H485.43V219.352H486.836V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.197 217.698H488.792V219.352H490.197V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M493.498 217.698H492.092V219.352H493.498V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M496.859 217.698H495.453V219.352H496.859V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M500.22 217.698H498.814V219.352H500.22V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M503.337 217.698H501.931V219.352H503.337V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M507.493 217.698H506.087V219.352H507.493V217.698Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M360.88 197.844H334.173V203.911H360.88V197.844Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M337.84 199.333H336.312V200.712H337.84V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M341.384 199.333H339.856V200.712H341.384V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M344.929 199.333H343.401V200.712H344.929V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M348.473 199.333H346.946V200.712H348.473V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M352.079 199.333H350.551V200.712H352.079V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M355.624 199.333H354.096V200.712H355.624V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M359.168 199.333H357.641V200.712H359.168V199.333Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.5"))
+ ~d:(("M368.274 212.459H334.173V215.216H368.274V212.459Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M446.317 186.594H443.811V188.028H446.317V186.594Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M449.617 186.594H447.112V188.028H449.617V186.594Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M452.918 186.594H450.412V188.028H452.918V186.594Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M456.157 186.594H453.651V188.028H456.157V186.594Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M459.457 186.594H456.951V188.028H459.457V186.594Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M462.757 186.594H460.251V188.028H462.757V186.594Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M446.317 188.524H443.811V189.958H446.317V188.524Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M449.617 188.524H447.112V189.958H449.617V188.524Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M452.918 188.524H450.412V189.958H452.918V188.524Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M456.157 188.524H453.651V189.958H456.157V188.524Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M459.457 188.524H456.951V189.958H459.457V188.524Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M462.757 188.524H460.251V189.958H462.757V188.524Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M446.317 190.454H443.811V191.888H446.317V190.454Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M449.617 190.454H447.112V191.888H449.617V190.454Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M452.918 190.454H450.412V191.888H452.918V190.454Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M456.157 190.454H453.651V191.888H456.157V190.454Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M459.457 190.454H456.951V191.888H459.457V190.454Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M462.757 190.454H460.251V191.888H462.757V190.454Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.5"))
+ ~d:(("M465.996 190.454H441.306V192.55H465.996V190.454Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 195.473H434.767V198.892H444.362V195.473Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 195.473H445.095V198.892H454.69V195.473Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 195.473H455.362V198.892H464.957V195.473Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 195.473H465.629V198.892H475.224V195.473Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 199.609H434.767V203.028H444.362V199.609Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 199.609H445.095V203.028H454.69V199.609Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 199.609H455.362V203.028H464.957V199.609Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 199.609H465.629V203.028H475.224V199.609Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M444.362 203.8H434.767V207.219H444.362V203.8Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M454.69 203.8H445.095V207.219H454.69V203.8Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M464.957 203.8H455.362V207.219H464.957V203.8Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M475.224 203.8H465.629V207.219H475.224V203.8Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 207.936H434.767V211.356H444.362V207.936Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 207.936H445.095V211.356H454.69V207.936Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 207.936H455.362V211.356H464.957V207.936Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 207.936H465.629V211.356H475.224V207.936Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 212.073H434.767V215.492H444.362V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 212.073H445.095V215.492H454.69V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 212.073H455.362V215.492H464.957V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 212.073H465.629V215.492H475.224V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M444.362 216.209H434.767V219.628H444.362V216.209Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M454.69 216.209H445.095V219.628H454.69V216.209Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M464.957 216.209H455.362V219.628H464.957V216.209Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M475.224 216.209H465.629V219.628H475.224V216.209Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M476.141 192.55H467.402V240.474H476.141V192.55Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.646 126.647H559.745V134.313H569.646V126.647Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.646 130.287H559.745V133.431H569.646V130.287Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M566.468 110.379H564.695V128.853H566.468V110.379Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M573.863 132.383H550.578V240.474H573.863V132.383Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M573.863 131.17H550.578V239.261H573.863V131.17Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 140.765H551.678V142.034H553.023V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 140.765H554.917V142.034H556.262V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 140.765H558.156V142.034H559.501V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 140.765H561.396V142.034H562.74V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 140.765H564.634V142.034H565.979V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 140.765H567.812V142.034H569.157V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 140.765H571.052V142.034H572.396V140.765Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 143.743H551.678V145.012H553.023V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 143.743H554.917V145.012H556.262V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 143.743H558.156V145.012H559.501V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 143.743H561.396V145.012H562.74V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 143.743H564.634V145.012H565.979V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 143.743H567.812V145.012H569.157V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 143.743H571.052V145.012H572.396V143.743Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 149.865H551.678V151.133H553.023V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 149.865H554.917V151.133H556.262V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 149.865H558.156V151.133H559.501V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 149.865H561.396V151.133H562.74V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 149.865H564.634V151.133H565.979V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 149.865H567.812V151.133H569.157V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 149.865H571.052V151.133H572.396V149.865Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 152.843H551.678V154.111H553.023V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 152.843H554.917V154.111H556.262V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 152.843H558.156V154.111H559.501V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 152.843H561.396V154.111H562.74V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 152.843H564.634V154.111H565.979V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 152.843H567.812V154.111H569.157V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 152.843H571.052V154.111H572.396V152.843Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 158.964H551.678V160.233H553.023V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 158.964H554.917V160.233H556.262V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 158.964H558.156V160.233H559.501V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 158.964H561.396V160.233H562.74V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 158.964H564.634V160.233H565.979V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 158.964H567.812V160.233H569.157V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 158.964H571.052V160.233H572.396V158.964Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 161.942H551.678V163.211H553.023V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 161.942H554.917V163.211H556.262V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 161.942H558.156V163.211H559.501V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 161.942H561.396V163.211H562.74V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 161.942H564.634V163.211H565.979V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 161.942H567.812V163.211H569.157V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 161.942H571.052V163.211H572.396V161.942Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 168.064H551.678V169.332H553.023V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 168.064H554.917V169.332H556.262V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 168.064H558.156V169.332H559.501V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 168.064H561.396V169.332H562.74V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 168.064H564.634V169.332H565.979V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 168.064H567.812V169.332H569.157V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 168.064H571.052V169.332H572.396V168.064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 171.042H551.678V172.31H553.023V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 171.042H554.917V172.31H556.262V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 171.042H558.156V172.31H559.501V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M562.74 171.042H561.396V172.31H562.74V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 171.042H564.634V172.31H565.979V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 171.042H567.812V172.31H569.157V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 171.042H571.052V172.31H572.396V171.042Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 177.163H551.678V178.432H553.023V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 177.163H554.917V178.432H556.262V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 177.163H558.156V178.432H559.501V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 177.163H561.396V178.432H562.74V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 177.163H564.634V178.432H565.979V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 177.163H567.812V178.432H569.157V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 177.163H571.052V178.432H572.396V177.163Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 180.141H551.678V181.41H553.023V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 180.141H554.917V181.41H556.262V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 180.141H558.156V181.41H559.501V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M562.74 180.141H561.396V181.41H562.74V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 180.141H564.634V181.41H565.979V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 180.141H567.812V181.41H569.157V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 180.141H571.052V181.41H572.396V180.141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 186.263H551.678V187.531H553.023V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 186.263H554.917V187.531H556.262V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 186.263H558.156V187.531H559.501V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 186.263H561.396V187.531H562.74V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 186.263H564.634V187.531H565.979V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 186.263H567.812V187.531H569.157V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 186.263H571.052V187.531H572.396V186.263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 189.241H551.678V190.509H553.023V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 189.241H554.917V190.509H556.262V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 189.241H558.156V190.509H559.501V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 189.241H561.396V190.509H562.74V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 189.241H564.634V190.509H565.979V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 189.241H567.812V190.509H569.157V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 189.241H571.052V190.509H572.396V189.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 195.307H551.678V196.576H553.023V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 195.307H554.917V196.576H556.262V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 195.307H558.156V196.576H559.501V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 195.307H561.396V196.576H562.74V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 195.307H564.634V196.576H565.979V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 195.307H567.812V196.576H569.157V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 195.307H571.052V196.576H572.396V195.307Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 198.341H551.678V199.609H553.023V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 198.341H554.917V199.609H556.262V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 198.341H558.156V199.609H559.501V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 198.341H561.396V199.609H562.74V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 198.341H564.634V199.609H565.979V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 198.341H567.812V199.609H569.157V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 198.341H571.052V199.609H572.396V198.341Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 204.407H551.678V205.675H553.023V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 204.407H554.917V205.675H556.262V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 204.407H558.156V205.675H559.501V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 204.407H561.396V205.675H562.74V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 204.407H564.634V205.675H565.979V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 204.407H567.812V205.675H569.157V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 204.407H571.052V205.675H572.396V204.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 207.44H551.678V208.708H553.023V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 207.44H554.917V208.708H556.262V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 207.44H558.156V208.708H559.501V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M562.74 207.44H561.396V208.708H562.74V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 207.44H564.634V208.708H565.979V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 207.44H567.812V208.708H569.157V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 207.44H571.052V208.708H572.396V207.44Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 213.506H551.678V214.775H553.023V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 213.506H554.917V214.775H556.262V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 213.506H558.156V214.775H559.501V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 213.506H561.396V214.775H562.74V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 213.506H564.634V214.775H565.979V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 213.506H567.812V214.775H569.157V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 213.506H571.052V214.775H572.396V213.506Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 216.484H551.678V217.753H553.023V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 216.484H554.917V217.753H556.262V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 216.484H558.156V217.753H559.501V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 216.484H561.396V217.753H562.74V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 216.484H564.634V217.753H565.979V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 216.484H567.812V217.753H569.157V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 216.484H571.052V217.753H572.396V216.484Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 222.606H551.678V223.874H553.023V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 222.606H554.917V223.874H556.262V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 222.606H558.156V223.874H559.501V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 222.606H561.396V223.874H562.74V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 222.606H564.634V223.874H565.979V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 222.606H567.812V223.874H569.157V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 222.606H571.052V223.874H572.396V222.606Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 225.584H551.678V226.852H553.023V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 225.584H554.917V226.852H556.262V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 225.584H558.156V226.852H559.501V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 225.584H561.396V226.852H562.74V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 225.584H564.634V226.852H565.979V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 225.584H567.812V226.852H569.157V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 225.584H571.052V226.852H572.396V225.584Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 231.705H551.678V232.974H553.023V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 231.705H554.917V232.974H556.262V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 231.705H558.156V232.974H559.501V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 231.705H561.396V232.974H562.74V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 231.705H564.634V232.974H565.979V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 231.705H567.812V232.974H569.157V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 231.705H571.052V232.974H572.396V231.705Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M553.023 234.683H551.678V235.952H553.023V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M556.262 234.683H554.917V235.952H556.262V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M559.501 234.683H558.156V235.952H559.501V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M562.74 234.683H561.396V235.952H562.74V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M565.979 234.683H564.634V235.952H565.979V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M569.157 234.683H567.812V235.952H569.157V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M572.396 234.683H571.052V235.952H572.396V234.683Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.3"))
+ ~d:(("M573.863 230.988H550.578V240.474H573.863V230.988Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.6"))
+ ~d:(("M562.068 131.17H550.578V240.364H562.068V131.17Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M531.877 158.964L549.05 181.741L530.594 206.282L514.765 181.741L531.877 158.964Z")
+ ) ~stroke:(("#E37056"))
+ ~strokeWidth:(("3"))
+ ~strokeMiterlimit:(("10"))
+ ~strokeLinecap:(("round"))
+ ~strokeLinejoin:(("round")) ~children:[]
+ ())
+ [@JSX ]);
+ ((path
+ ~d:(("M579.73 161.667H584.986L590.425 143.247L579.73 161.667Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M602.77 161.667H597.575L591.586 143.247L602.77 161.667Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M590.058 165.251H589.386V180.472H590.058V165.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M588.897 165.858H588.225V181.079H588.897V165.858Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M587.797 166.41H587.125V181.631H587.797V166.41Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M586.697 167.016H586.025V182.237H586.697V167.016Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M585.536 167.623H584.863V182.844H585.536V167.623Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M584.435 168.174H583.763V183.395H584.435V168.174Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M583.336 168.781H582.663V184.002H583.336V168.781Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M582.174 169.332H581.502V184.553H582.174V169.332Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M593.542 165.251H592.869V180.472H593.542V165.251Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M594.703 165.858H594.03V181.079H594.703V165.858Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M595.803 166.41H595.13V181.631H595.803V166.41Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M596.903 167.016H596.231V182.237H596.903V167.016Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M598.064 167.623H597.392V182.844H598.064V167.623Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M599.164 168.174H598.492V183.395H599.164V168.174Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M600.264 168.781H599.592V184.002H600.264V168.781Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M601.425 169.332H600.753V184.553H601.425V169.332Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M590.058 182.734H589.386V197.955H590.058V182.734Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M588.897 183.285H588.225V198.506H588.897V183.285Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M587.797 183.892H587.125V199.113H587.797V183.892Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M586.697 184.443H586.025V199.664H586.697V184.443Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M585.536 185.05H584.863V200.271H585.536V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M584.435 185.656H583.763V200.877H584.435V185.656Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M583.336 186.208H582.663V201.429H583.336V186.208Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M582.174 186.815H581.502V202.036H582.174V186.815Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M593.542 182.734H592.869V197.955H593.542V182.734Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M594.703 183.285H594.03V198.506H594.703V183.285Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M595.803 183.892H595.13V199.113H595.803V183.892Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M596.903 184.443H596.231V199.664H596.903V184.443Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M598.064 185.05H597.392V200.271H598.064V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M599.164 185.656H598.492V200.877H599.164V185.656Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M600.264 186.208H599.592V201.429H600.264V186.208Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M601.425 186.814H600.753V202.035H601.425V186.814Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M590.058 200.16H589.386V215.381H590.058V200.16Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M588.897 200.767H588.225V215.988H588.897V200.767Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M587.797 201.319H587.125V216.54H587.797V201.319Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M586.697 201.925H586.025V217.146H586.697V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M585.536 202.532H584.863V217.753H585.536V202.532Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M584.435 203.083H583.763V218.304H584.435V203.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M583.336 203.69H582.663V218.911H583.336V203.69Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M582.174 204.241H581.502V219.462H582.174V204.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M593.542 200.16H592.869V215.381H593.542V200.16Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M594.703 200.767H594.03V215.988H594.703V200.767Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M595.803 201.318H595.13V216.54H595.803V201.318Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M596.903 201.925H596.231V217.146H596.903V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M598.064 202.532H597.392V217.753H598.064V202.532Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M599.164 203.083H598.492V218.304H599.164V203.083Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M600.264 203.69H599.592V218.911H600.264V203.69Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M601.425 204.241H600.753V219.462H601.425V204.241Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M590.058 217.643H589.386V232.864H590.058V217.643Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M588.897 218.194H588.225V233.415H588.897V218.194Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M587.797 218.801H587.125V234.022H587.797V218.801Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M586.697 219.407H586.025V234.628H586.697V219.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M585.536 219.959H584.863V235.18H585.536V219.959Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M584.435 220.565H583.763V235.786H584.435V220.565Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M583.336 221.117H582.663V236.338H583.336V221.117Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M582.174 221.724H581.502V236.945H582.174V221.724Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M593.542 217.643H592.869V232.864H593.542V217.643Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M594.703 218.194H594.03V233.415H594.703V218.194Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M595.803 218.801H595.13V234.022H595.803V218.801Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M596.903 219.407H596.231V234.628H596.903V219.407Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M598.064 219.959H597.392V235.18H598.064V219.959Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M599.164 220.565H598.492V235.786H599.164V220.565Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M600.264 221.117H599.592V236.338H600.264V221.117Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M601.425 221.724H600.753V236.945H601.425V221.724Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 134.258H608.331V135.692H609.92V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 134.258H612.976V135.692H614.565V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 134.258H617.62V135.692H619.21V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 134.258H622.265V135.692H623.854V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 134.258H626.91V135.692H628.499V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 134.258H631.555V135.692H633.144V134.258Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 137.622H608.331V139.056H609.92V137.622Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 137.622H612.976V139.056H614.565V137.622Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 137.622H617.62V139.056H619.21V137.622Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 137.622H622.265V139.056H623.854V137.622Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 137.622H626.91V139.056H628.499V137.622Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 137.622H631.555V139.056H633.144V137.622Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 141.041H608.331V142.475H609.92V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 141.041H612.976V142.475H614.565V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 141.041H617.62V142.475H619.21V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 141.041H622.265V142.475H623.854V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 141.041H626.91V142.475H628.499V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 141.041H631.555V142.475H633.144V141.041Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 144.405H608.331V145.839H609.92V144.405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 144.405H612.976V145.839H614.565V144.405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 144.405H617.62V145.839H619.21V144.405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 144.405H622.265V145.839H623.854V144.405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 144.405H626.91V145.839H628.499V144.405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 144.405H631.555V145.839H633.144V144.405Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 147.769H608.331V149.203H609.92V147.769Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 147.769H612.976V149.203H614.565V147.769Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 147.769H617.62V149.203H619.21V147.769Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 147.769H622.265V149.203H623.854V147.769Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 147.769H626.91V149.203H628.499V147.769Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 147.769H631.555V149.203H633.144V147.769Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 151.188H608.331V152.622H609.92V151.188Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 151.188H612.976V152.622H614.565V151.188Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 151.188H617.62V152.622H619.21V151.188Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 151.188H622.265V152.622H623.854V151.188Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 151.188H626.91V152.622H628.499V151.188Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 151.188H631.555V152.622H633.144V151.188Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 154.553H608.331V155.986H609.92V154.553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 154.553H612.976V155.986H614.565V154.553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 154.553H617.62V155.986H619.21V154.553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 154.553H622.265V155.986H623.854V154.553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 154.553H626.91V155.986H628.499V154.553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 154.553H631.555V155.986H633.144V154.553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 157.972H608.331V159.406H609.92V157.972Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 157.972H612.976V159.406H614.565V157.972Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 157.972H617.62V159.406H619.21V157.972Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 157.972H622.265V159.406H623.854V157.972Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 157.972H626.91V159.406H628.499V157.972Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 157.972H631.555V159.406H633.144V157.972Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 161.336H608.331V162.77H609.92V161.336Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 161.336H612.976V162.77H614.565V161.336Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 161.336H617.62V162.77H619.21V161.336Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 161.336H622.265V162.77H623.854V161.336Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 161.336H626.91V162.77H628.499V161.336Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 161.336H631.555V162.77H633.144V161.336Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 164.7H608.331V166.134H609.92V164.7Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M614.565 164.7H612.976V166.134H614.565V164.7Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 164.7H617.62V166.134H619.21V164.7Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M623.854 164.7H622.265V166.134H623.854V164.7Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M628.499 164.7H626.91V166.134H628.499V164.7Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M633.144 164.7H631.555V166.134H633.144V164.7Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 168.119H608.331V169.553H609.92V168.119Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 168.119H612.976V169.553H614.565V168.119Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 168.119H617.62V169.553H619.21V168.119Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 168.119H622.265V169.553H623.854V168.119Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 168.119H626.91V169.553H628.499V168.119Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 168.119H631.555V169.553H633.144V168.119Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 171.483H608.331V172.917H609.92V171.483Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 171.483H612.976V172.917H614.565V171.483Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 171.483H617.62V172.917H619.21V171.483Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 171.483H622.265V172.917H623.854V171.483Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 171.483H626.91V172.917H628.499V171.483Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 171.483H631.555V172.917H633.144V171.483Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 174.902H608.331V176.336H609.92V174.902Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 174.902H612.976V176.336H614.565V174.902Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 174.902H617.62V176.336H619.21V174.902Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 174.902H622.265V176.336H623.854V174.902Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 174.902H626.91V176.336H628.499V174.902Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 174.902H631.555V176.336H633.144V174.902Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 178.266H608.331V179.7H609.92V178.266Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 178.266H612.976V179.7H614.565V178.266Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 178.266H617.62V179.7H619.21V178.266Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 178.266H622.265V179.7H623.854V178.266Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M628.499 178.266H626.91V179.7H628.499V178.266Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 178.266H631.555V179.7H633.144V178.266Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 181.63H608.331V183.064H609.92V181.63Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 181.63H612.976V183.064H614.565V181.63Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 181.63H617.62V183.064H619.21V181.63Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 181.63H622.265V183.064H623.854V181.63Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M628.499 181.63H626.91V183.064H628.499V181.63Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 181.63H631.555V183.064H633.144V181.63Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 185.05H608.331V186.484H609.92V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 185.05H612.976V186.484H614.565V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 185.05H617.62V186.484H619.21V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 185.05H622.265V186.484H623.854V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M628.499 185.05H626.91V186.484H628.499V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 185.05H631.555V186.484H633.144V185.05Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 188.414H608.331V189.848H609.92V188.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 188.414H612.976V189.848H614.565V188.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 188.414H617.62V189.848H619.21V188.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 188.414H622.265V189.848H623.854V188.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 188.414H626.91V189.848H628.499V188.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 188.414H631.555V189.848H633.144V188.414Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 191.833H608.331V193.267H609.92V191.833Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 191.833H612.976V193.267H614.565V191.833Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 191.833H617.62V193.267H619.21V191.833Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 191.833H622.265V193.267H623.854V191.833Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 191.833H626.91V193.267H628.499V191.833Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 191.833H631.555V193.267H633.144V191.833Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 195.197H608.331V196.631H609.92V195.197Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 195.197H612.976V196.631H614.565V195.197Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 195.197H617.62V196.631H619.21V195.197Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 195.197H622.265V196.631H623.854V195.197Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 195.197H626.91V196.631H628.499V195.197Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 195.197H631.555V196.631H633.144V195.197Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 198.616H608.331V200.05H609.92V198.616Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 198.616H612.976V200.05H614.565V198.616Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 198.616H617.62V200.05H619.21V198.616Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 198.616H622.265V200.05H623.854V198.616Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 198.616H626.91V200.05H628.499V198.616Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 198.616H631.555V200.05H633.144V198.616Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 201.925H608.331V203.359H609.92V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 201.925H612.976V203.359H614.565V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 201.925H617.62V203.359H619.21V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 201.925H622.265V203.359H623.854V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 201.925H626.91V203.359H628.499V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 201.925H631.555V203.359H633.144V201.925Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 205.344H608.331V206.778H609.92V205.344Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 205.344H612.976V206.778H614.565V205.344Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 205.344H617.62V206.778H619.21V205.344Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 205.344H622.265V206.778H623.854V205.344Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 205.344H626.91V206.778H628.499V205.344Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 205.344H631.555V206.778H633.144V205.344Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 208.708H608.331V210.142H609.92V208.708Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 208.708H612.976V210.142H614.565V208.708Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 208.708H617.62V210.142H619.21V208.708Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 208.708H622.265V210.142H623.854V208.708Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 208.708H626.91V210.142H628.499V208.708Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 208.708H631.555V210.142H633.144V208.708Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 212.073H608.331V213.506H609.92V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 212.073H612.976V213.506H614.565V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 212.073H617.62V213.506H619.21V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 212.073H622.265V213.506H623.854V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 212.073H626.91V213.506H628.499V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 212.073H631.555V213.506H633.144V212.073Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 215.492H608.331V216.926H609.92V215.492Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 215.492H612.976V216.926H614.565V215.492Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 215.492H617.62V216.926H619.21V215.492Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 215.492H622.265V216.926H623.854V215.492Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 215.492H626.91V216.926H628.499V215.492Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 215.492H631.555V216.926H633.144V215.492Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M609.92 218.856H608.331V220.29H609.92V218.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 218.856H612.976V220.29H614.565V218.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 218.856H617.62V220.29H619.21V218.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 218.856H622.265V220.29H623.854V218.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 218.856H626.91V220.29H628.499V218.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 218.856H631.555V220.29H633.144V218.856Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 222.275H608.331V223.709H609.92V222.275Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 222.275H612.976V223.709H614.565V222.275Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 222.275H617.62V223.709H619.21V222.275Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 222.275H622.265V223.709H623.854V222.275Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 222.275H626.91V223.709H628.499V222.275Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 222.275H631.555V223.709H633.144V222.275Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 225.639H608.331V227.073H609.92V225.639Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 225.639H612.976V227.073H614.565V225.639Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 225.639H617.62V227.073H619.21V225.639Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 225.639H622.265V227.073H623.854V225.639Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 225.639H626.91V227.073H628.499V225.639Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 225.639H631.555V227.073H633.144V225.639Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 229.003H608.331V230.437H609.92V229.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 229.003H612.976V230.437H614.565V229.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 229.003H617.62V230.437H619.21V229.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 229.003H622.265V230.437H623.854V229.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 229.003H626.91V230.437H628.499V229.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 229.003H631.555V230.437H633.144V229.003Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M609.92 232.422H608.331V233.856H609.92V232.422Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M614.565 232.422H612.976V233.856H614.565V232.422Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~d:(("M619.21 232.422H617.62V233.856H619.21V232.422Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M623.854 232.422H622.265V233.856H623.854V232.422Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M628.499 232.422H626.91V233.856H628.499V232.422Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M633.144 232.422H631.555V233.856H633.144V232.422Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.7"))
+ ~d:(("M635.894 230.713H605.581V240.474H635.894V230.713Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-cloudRight")
+ ) ~opacity:(("0.7"))
+ ~d:(("M609.126 61.903H757.878C757.878 61.903 760.078 50.3218 744.127 48.3364C744.127 48.3364 742.232 27.7108 719.742 20.3761C697.252 13.0413 686.924 24.3468 686.924 24.3468C686.924 24.3468 680.018 16.8466 670.973 17.8944C661.928 18.9422 658.139 29.4205 658.139 29.4205C658.139 29.4205 652.822 26.0564 647.2 28.869C641.577 31.6815 641.883 35.3765 641.883 35.3765C641.883 35.3765 635.405 33.9426 630.821 33.6669C626.238 33.3912 604.237 34.825 609.126 61.903Z")
+ ) ~fill:(("white"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.5"))
+ ~d:(("M406.715 74.3666H472.413C472.413 74.3666 472.169 68.2451 465.079 68.2451C465.079 68.2451 463.063 64.8258 452.918 62.7302C442.773 60.6897 432.139 65.4876 432.139 65.4876C432.139 65.4876 429.45 63.8883 426.455 63.8883C423.461 63.8883 422.422 67.3075 422.422 67.3075C422.422 67.3075 406.715 64.881 406.715 74.3666Z")
+ ) ~fill:(("white"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~opacity:(("0.5"))
+ ~d:(("M233.09 126.647H283.02C283.02 126.647 277.581 121.022 267.192 117.493C256.802 113.963 250.141 117.493 250.141 117.493C250.141 117.493 247.207 114.68 242.807 117.493C238.407 120.305 239.385 121.298 239.385 121.298C239.385 121.298 231.99 120.36 233.09 126.647Z")
+ ) ~fill:(("white"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M173.992 45.193C173.992 39.0715 178.392 33.9427 184.504 32.1779C183.037 31.7367 181.509 31.5161 179.859 31.5161C171.487 31.5161 164.703 37.6376 164.703 45.193C164.703 52.7483 171.487 58.8698 179.859 58.8698C181.509 58.8698 183.037 58.6492 184.504 58.208C178.454 56.4433 173.992 51.3145 173.992 45.193Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M245.863 68.4656L246.474 69.5686L247.818 69.734L246.84 70.5613L247.085 71.7194L245.863 71.1679L244.701 71.7194L244.946 70.5613L243.968 69.734L245.313 69.5686L245.863 68.4656Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M334.173 43.2627L334.784 44.4208L336.189 44.5863L335.15 45.5238L335.395 46.7371L334.173 46.1856L332.889 46.7371L333.134 45.5238L332.156 44.5863L333.561 44.4208L334.173 43.2627Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M377.136 23.9056L377.686 24.9534L378.969 25.1188L378.053 25.8909L378.236 27.0491L377.136 26.4976L376.036 27.0491L376.219 25.8909L375.303 25.1188L376.586 24.9534L377.136 23.9056Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M498.815 134.644L499.365 135.692L500.587 135.857L499.67 136.629L499.915 137.732L498.815 137.236L497.653 137.732L497.898 136.629L496.981 135.857L498.203 135.692L498.815 134.644Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M354.096 0.90863L354.646 1.90131L355.868 2.06675L355.013 2.89398L355.196 3.99695L354.096 3.50061L352.935 3.99695L353.179 2.89398L352.263 2.06675L353.546 1.90131L354.096 0.90863Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M160.914 17.9495L161.464 18.9422L162.747 19.1076L161.831 19.9349L162.075 21.0378L160.914 20.4864L159.814 21.0378L159.997 19.9349L159.142 19.1076L160.364 18.9422L160.914 17.9495Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M40.8243 164.314L41.3743 165.362L42.6577 165.527L41.741 166.299L41.9244 167.457L40.8243 166.906L39.7243 167.457L39.9076 166.299L38.9909 165.527L40.2743 165.362L40.8243 164.314Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M380.497 53.9064L381.108 55.0093L382.453 55.1748L381.475 56.002L381.658 57.2153L380.497 56.6638L379.275 57.2153L379.519 56.002L378.542 55.1748L379.886 55.0093L380.497 53.9064Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M216.161 102.272L216.833 103.54L218.361 103.706L217.261 104.643L217.506 106.022L216.161 105.36L214.817 106.022L215.122 104.643L214.022 103.706L215.489 103.54L216.161 102.272Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M97.6606 109.993L98.1495 110.985L99.3718 111.151L98.5162 111.923L98.6995 112.971L97.6606 112.474L96.5605 112.971L96.7439 111.923L95.8883 111.151L97.1105 110.985L97.6606 109.993Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M90.9991 146.28L91.5491 147.273L92.7714 147.438L91.8547 148.21L92.0992 149.258L90.9991 148.762L89.9602 149.258L90.1435 148.21L89.2879 147.438L90.4491 147.273L90.9991 146.28Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M105.239 55.7263L105.728 56.6087L106.828 56.7741L106.033 57.4359L106.217 58.4286L105.239 57.9323L104.261 58.4286L104.444 57.4359L103.711 56.7741L104.75 56.6087L105.239 55.7263Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M115.689 19.4937L116.3 20.5415L117.584 20.7069L116.606 21.5342L116.85 22.6371L115.689 22.0856L114.589 22.6371L114.772 21.5342L113.856 20.7069L115.139 20.5415L115.689 19.4937Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M486.164 101.831L486.347 102.217L486.836 102.272L486.469 102.603L486.592 102.989L486.164 102.823L485.736 102.989L485.797 102.603L485.492 102.272L485.981 102.217L486.164 101.831Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M418.999 18.3907L419.305 18.887L419.977 18.9973L419.488 19.4385L419.61 19.99L418.999 19.7143L418.449 19.99L418.51 19.4385L418.082 18.9973L418.694 18.887L418.999 18.3907Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M558.156 67.9141L558.89 69.2377L560.418 69.4031L559.318 70.451L559.562 71.8848L558.156 71.1679L556.751 71.8848L556.995 70.451L555.834 69.4031L557.423 69.2377L558.156 67.9141Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-starCenterLeft")
+ )
+ ~d:(("M380.497 100.893L381.17 102.161L382.759 102.382L381.658 103.375L381.903 104.809L380.497 104.147L379.092 104.809L379.336 103.375L378.175 102.382L379.764 102.161L380.497 100.893Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-startRight")
+ )
+ ~d:(("M160.914 73.9805L161.647 75.2489L163.236 75.4695L162.075 76.4621L162.319 77.896L160.914 77.2342L159.508 77.896L159.814 76.4621L158.653 75.4695L160.242 75.2489L160.914 73.9805Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M348.473 94.3854L349.146 95.709L350.735 95.8744L349.635 96.9223L349.879 98.3561L348.473 97.6392L347.068 98.3561L347.312 96.9223L346.151 95.8744L347.74 95.709L348.473 94.3854Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M527.783 46.7922L528.271 47.7849L529.494 47.9504L528.638 48.7224L528.822 49.7703L527.783 49.2739L526.682 49.7703L526.866 48.7224L526.01 47.9504L527.233 47.7849L527.783 46.7922Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-starCenterLeft")
+ )
+ ~d:(("M531.877 90.9662L532.427 91.9589L533.649 92.1244L532.733 92.8964L532.977 93.9443L531.877 93.4479L530.838 93.9443L531.022 92.8964L530.166 92.1244L531.388 91.9589L531.877 90.9662Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M632.349 87.8779L632.716 88.4846L633.449 88.5949L632.899 89.0912L633.021 89.753L632.349 89.4221L631.677 89.753L631.799 89.0912L631.249 88.5949L632.043 88.4846L632.349 87.8779Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M683.196 143.357L683.991 144.846L685.824 145.067L684.48 146.225L684.785 147.824L683.196 147.052L681.607 147.824L681.913 146.225L680.629 145.067L682.402 144.846L683.196 143.357Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-starRight")
+ )
+ ~d:(("M669.873 94.3854L671.095 96.7017L673.907 97.0326L671.89 98.7973L672.379 101.334L669.873 100.121L667.368 101.334L667.856 98.7973L665.84 97.0326L668.59 96.7017L669.873 94.3854Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path ~className:(("HeroGraphic-starCenter")
+ )
+ ~d:(("M261.997 28.0417L263.28 30.3028L266.03 30.6888L264.014 32.4536L264.502 34.9904L261.997 33.7771L259.552 34.9904L259.98 32.4536L257.963 30.6888L260.774 30.3028L261.997 28.0417Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M608.331 26.0012L608.698 26.6079L609.431 26.7182L608.881 27.2145L609.003 27.8763L608.331 27.6005L607.659 27.8763L607.781 27.2145L607.231 26.7182L607.964 26.6079L608.331 26.0012Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M656.123 163.211L656.489 163.873L657.284 163.983L656.673 164.479L656.856 165.141L656.123 164.81L655.45 165.141L655.572 164.479L655.022 163.983L655.817 163.873L656.123 163.211Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M497.959 92.4553L498.326 93.117L499.12 93.2273L498.509 93.7237L498.692 94.3855L497.959 94.0546L497.287 94.3855L497.409 93.7237L496.859 93.2273L497.653 93.117L497.959 92.4553Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M506.087 78.8887L506.393 79.4953L507.187 79.6056L506.637 80.1019L506.759 80.8189L506.087 80.488L505.354 80.8189L505.476 80.1019L504.926 79.6056L505.72 79.4953L506.087 78.8887Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M546.239 110.048L546.973 111.426L548.745 111.647L547.461 112.75L547.767 114.294L546.239 113.577L544.711 114.294L544.956 112.75L543.733 111.647L545.445 111.426L546.239 110.048Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M481.947 38.52L482.436 39.4023L483.536 39.5678L482.742 40.2847L482.925 41.2222L481.947 40.7811L480.969 41.2222L481.153 40.2847L480.358 39.5678L481.458 39.4023L481.947 38.52Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M304.349 67.142L304.838 68.0244L305.938 68.1347L305.143 68.8516L305.327 69.8443L304.349 69.348L303.371 69.8443L303.554 68.8516L302.76 68.1347L303.86 68.0244L304.349 67.142Z")
+ ) ~fill:(("#E37056"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M643.105 244.279C643.105 246.43 641.15 248.195 638.766 248.195H90.0824C87.699 248.195 85.7433 246.43 85.7433 244.279C85.7433 242.129 87.699 240.364 90.0824 240.364H638.766C641.211 240.364 643.105 242.129 643.105 244.279Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M643.105 244.22C643.105 246.371 641.15 248.135 638.766 248.135H90.0824C87.699 248.135 85.7433 246.371 85.7433 244.22C85.7433 242.069 87.699 240.304 90.0824 240.304H638.766C641.211 240.304 643.105 242.069 643.105 244.22Z")
+ ) ~fill:(("#2484C6"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M124.612 231.485C124.612 231.485 123.817 225.86 127.118 223.985C130.418 222.165 131.64 222.165 131.64 222.165C131.64 222.165 130.54 218.911 133.351 217.477C136.163 216.043 137.446 217.477 137.446 217.477C137.446 217.477 137.752 212.128 141.541 210.97C145.269 209.867 146.246 210.97 146.246 210.97C146.246 210.97 147.958 203.58 153.458 202.697C158.958 201.815 158.958 203.635 158.958 203.635C158.958 203.635 159.569 202.532 162.686 202.422C165.803 202.311 165.681 204.021 165.681 204.021C165.681 204.021 167.697 203.028 170.387 203.58C173.076 204.131 173.076 206.558 173.076 206.558C173.076 206.558 174.298 204.738 179.676 208.543C184.993 212.348 184.198 218.856 184.198 218.856C184.198 218.856 188.293 219.021 188.721 220.676C189.21 222.385 188.11 223.929 188.11 223.929C188.11 223.929 192.938 223.488 193.427 226.025C193.916 228.562 192.815 229.83 192.815 229.83C192.815 229.83 194.527 230.823 194.71 232.919C194.893 235.014 194.71 236.007 194.71 236.007C194.71 236.007 196.299 236.448 196.605 239.371C196.91 242.349 195.504 244.445 195.504 244.445C195.504 244.445 195.504 247.643 191.899 248.195C188.293 248.801 119.112 248.195 119.112 248.195C119.112 248.195 117.095 245.272 118.317 243.728C119.539 242.184 120.334 242.184 120.334 242.184C120.334 242.184 120.028 240.198 120.028 239.757C120.028 239.316 122.351 238.489 122.351 238.489C122.351 238.489 120.762 237.386 121.556 236.393C122.351 235.4 123.451 234.959 123.451 234.959C123.451 234.959 120.578 232.367 124.612 231.485Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M191.654 238.599C191.654 238.599 191.776 232.919 195.443 232.367C199.11 231.816 199.416 232.367 199.416 232.367C199.416 232.367 199.721 230.327 201.433 229.996C203.144 229.61 203.572 229.996 203.572 229.996C203.572 229.996 204.61 226.852 207.85 226.08C211.089 225.308 212.739 229.389 212.739 229.389C212.739 229.389 214.694 228.562 216.283 229.113C217.872 229.665 217.261 231.209 217.261 231.209C217.261 231.209 218.545 230.823 218.85 231.761C219.156 232.698 218.85 233.139 218.85 233.139C218.85 233.139 220.5 232.422 221.417 234.132C222.334 235.897 222.028 236.889 222.028 236.889C222.028 236.889 223.556 235.897 224.045 237.441C224.534 238.985 224.534 239.537 224.534 239.537C224.534 239.537 227.773 238.709 228.017 239.978C228.262 241.246 226.062 242.459 226.062 242.459C226.062 242.459 227.467 243.452 227.467 244.665C227.467 245.879 226.734 246.044 226.734 246.32C226.734 246.596 227.223 247.588 227.162 248.36C227.039 249.077 189.271 248.36 189.271 248.36L191.654 238.599Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M247.329 238.599C247.329 238.599 247.452 232.919 251.118 232.367C254.785 231.816 255.091 232.367 255.091 232.367C255.091 232.367 255.396 230.327 257.108 229.996C258.819 229.61 259.247 229.996 259.247 229.996C259.247 229.996 260.286 226.852 263.525 226.08C266.764 225.308 268.414 229.389 268.414 229.389C268.414 229.389 270.369 228.562 271.958 229.113C273.547 229.665 272.936 231.209 272.936 231.209C272.936 231.209 274.22 230.823 274.525 231.761C274.831 232.698 274.525 233.139 274.525 233.139C274.525 233.139 276.175 232.422 277.092 234.132C278.009 235.897 277.703 236.889 277.703 236.889C277.703 236.889 279.231 235.897 279.72 237.441C280.209 238.985 280.209 239.537 280.209 239.537C280.209 239.537 283.448 238.709 283.692 239.978C283.937 241.246 281.737 242.459 281.737 242.459C281.737 242.459 283.142 243.452 283.142 244.665C283.142 245.879 282.409 246.044 282.409 246.32C282.409 246.596 282.898 247.588 282.837 248.36C282.714 249.077 244.946 248.36 244.946 248.36L247.329 238.599Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M222.089 241.687C222.089 241.687 219.828 233.967 220.989 232.202C222.089 230.437 222.089 230.437 222.089 230.437C222.089 230.437 220.989 228.617 222.089 227.955C223.189 227.294 223.189 227.294 223.189 227.294C223.189 227.294 223.189 224.702 224.839 224.095C226.489 223.433 226.978 224.095 226.978 224.095C226.978 224.095 227.101 220.124 228.69 219.959C230.34 219.793 230.523 220.676 230.523 220.676C230.523 220.676 230.34 218.029 232.54 217.091C234.801 216.154 236.512 218.47 236.512 218.47C236.512 218.47 239.14 218.58 239.568 220.29C239.996 222.054 239.568 223.047 239.568 223.047C239.568 223.047 241.707 223.654 241.401 225.915C241.096 228.176 241.096 228.176 241.096 228.176C241.096 228.176 244.946 226.687 245.985 228.176C247.024 229.665 245.985 230.547 245.985 230.547C245.985 230.547 247.696 230.382 247.696 231.926C247.696 233.47 247.696 234.297 247.696 234.297C247.696 234.297 249.713 233.856 249.957 235.566C250.141 237.331 249.041 238.489 249.041 238.489C249.041 238.489 250.08 239.867 249.957 241.081C249.835 242.294 249.102 242.901 249.102 242.901C249.102 242.901 251.057 242.625 251.546 244.445C252.035 246.265 251.057 246.926 251.057 246.926L251.73 248.526H223.8L222.089 241.687Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M488.547 236.889C488.547 236.889 487.997 233.029 490.258 231.761C492.52 230.492 493.375 230.492 493.375 230.492C493.375 230.492 492.642 228.286 494.536 227.294C496.492 226.301 497.348 227.294 497.348 227.294C497.348 227.294 497.592 223.599 500.159 222.827C502.726 222.054 503.398 222.827 503.398 222.827C503.398 222.827 504.559 217.753 508.348 217.146C512.137 216.54 512.137 217.808 512.137 217.808C512.137 217.808 512.565 217.036 514.704 216.981C516.843 216.926 516.721 218.084 516.721 218.084C516.721 218.084 518.127 217.422 519.96 217.808C521.793 218.194 521.793 219.849 521.793 219.849C521.793 219.849 522.649 218.58 526.316 221.227C529.983 223.819 529.433 228.286 529.433 228.286C529.433 228.286 532.244 228.397 532.55 229.555C532.855 230.713 532.122 231.761 532.122 231.761C532.122 231.761 535.483 231.485 535.789 233.194C536.094 234.959 535.361 235.786 535.361 235.786C535.361 235.786 536.522 236.448 536.644 237.937C536.766 239.371 536.644 240.088 536.644 240.088C536.644 240.088 537.744 240.364 537.928 242.404C538.172 244.445 537.194 245.879 537.194 245.879C537.194 245.879 537.194 248.085 534.75 248.471C532.305 248.912 484.758 248.471 484.758 248.471C484.758 248.471 483.352 246.485 484.208 245.382C485.064 244.334 485.614 244.334 485.614 244.334C485.614 244.334 485.369 242.956 485.369 242.68C485.369 242.404 486.958 241.798 486.958 241.798C486.958 241.798 485.858 241.026 486.408 240.364C486.958 239.702 487.692 239.371 487.692 239.371C487.692 239.371 485.797 237.441 488.547 236.889Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M533.038 238.599C533.038 238.599 533.161 232.919 536.827 232.367C540.494 231.816 540.8 232.367 540.8 232.367C540.8 232.367 541.105 230.327 542.817 229.996C544.528 229.61 544.956 229.996 544.956 229.996C544.956 229.996 545.995 226.852 549.234 226.08C552.473 225.308 554.123 229.389 554.123 229.389C554.123 229.389 556.078 228.562 557.667 229.113C559.318 229.665 558.645 231.209 558.645 231.209C558.645 231.209 559.929 230.823 560.234 231.761C560.54 232.698 560.234 233.139 560.234 233.139C560.234 233.139 561.884 232.422 562.801 234.132C563.718 235.897 563.412 236.889 563.412 236.889C563.412 236.889 564.94 235.897 565.429 237.441C565.918 238.985 565.918 239.537 565.918 239.537C565.918 239.537 569.157 238.709 569.401 239.978C569.585 241.246 567.446 242.459 567.446 242.459C567.446 242.459 568.851 243.452 568.851 244.665C568.851 245.879 568.118 246.044 568.118 246.32C568.118 246.596 568.607 247.588 568.546 248.36C568.424 249.077 530.655 248.36 530.655 248.36L533.038 238.599Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M588.713 238.599C588.713 238.599 588.836 232.919 592.503 232.367C596.169 231.816 596.475 232.367 596.475 232.367C596.475 232.367 596.781 230.327 598.492 229.996C600.203 229.61 600.631 229.996 600.631 229.996C600.631 229.996 601.67 226.852 604.909 226.08C608.148 225.308 609.798 229.389 609.798 229.389C609.798 229.389 611.754 228.562 613.343 229.113C614.993 229.665 614.32 231.209 614.32 231.209C614.32 231.209 615.604 230.823 615.909 231.761C616.215 232.698 615.909 233.139 615.909 233.139C615.909 233.139 617.559 232.422 618.476 234.132C619.393 235.897 619.087 236.889 619.087 236.889C619.087 236.889 620.615 235.897 621.104 237.441C621.593 238.985 621.593 239.537 621.593 239.537C621.593 239.537 624.832 238.709 625.077 239.978C625.26 241.246 623.121 242.459 623.121 242.459C623.121 242.459 624.527 243.452 624.527 244.665C624.527 245.879 623.793 246.044 623.793 246.32C623.793 246.596 624.282 247.588 624.221 248.36C624.099 249.077 586.33 248.36 586.33 248.36L588.713 238.599Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M490.808 240.805C490.808 240.805 490.747 236.338 487.875 235.897C485.003 235.456 484.758 235.897 484.758 235.897C484.758 235.897 484.514 234.297 483.169 234.022C481.825 233.746 481.519 234.022 481.519 234.022C481.519 234.022 480.725 231.54 478.158 230.933C475.591 230.327 474.308 233.525 474.308 233.525C474.308 233.525 472.78 232.864 471.496 233.305C470.213 233.746 470.702 234.959 470.702 234.959C470.702 234.959 469.724 234.684 469.48 235.4C469.235 236.117 469.48 236.503 469.48 236.503C469.48 236.503 468.196 235.952 467.463 237.276C466.729 238.654 466.974 239.426 466.974 239.426C466.974 239.426 465.752 238.654 465.385 239.868C464.957 241.081 464.957 241.522 464.957 241.522C464.957 241.522 462.39 240.86 462.268 241.908C462.085 242.901 463.796 243.838 463.796 243.838C463.796 243.838 462.696 244.61 462.696 245.548C462.696 246.485 463.246 246.651 463.246 246.816C463.246 246.982 462.818 247.809 462.94 248.416C463.001 248.967 492.581 248.416 492.581 248.416L490.808 240.805Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((path
+ ~d:(("M563.473 241.687C563.473 241.687 561.212 233.967 562.373 232.202C563.473 230.437 563.473 230.437 563.473 230.437C563.473 230.437 562.373 228.617 563.473 227.955C564.573 227.294 564.573 227.294 564.573 227.294C564.573 227.294 564.573 224.702 566.223 224.095C567.874 223.433 568.362 224.095 568.362 224.095C568.362 224.095 568.485 220.124 570.074 219.959C571.724 219.793 571.907 220.676 571.907 220.676C571.907 220.676 571.724 218.029 573.924 217.091C576.185 216.154 577.896 218.47 577.896 218.47C577.896 218.47 580.524 218.58 580.952 220.29C581.38 222.054 580.952 223.047 580.952 223.047C580.952 223.047 583.091 223.654 582.785 225.915C582.48 228.176 582.48 228.176 582.48 228.176C582.48 228.176 586.33 226.687 587.369 228.176C588.408 229.665 587.369 230.547 587.369 230.547C587.369 230.547 589.08 230.382 589.08 231.926C589.08 233.47 589.08 234.297 589.08 234.297C589.08 234.297 591.097 233.856 591.341 235.566C591.525 237.331 590.425 238.489 590.425 238.489C590.425 238.489 591.464 239.867 591.341 241.081C591.219 242.294 590.486 242.901 590.486 242.901C590.486 242.901 592.442 242.625 592.93 244.445C593.419 246.265 592.442 246.926 592.442 246.926L593.114 248.526H565.184L563.473 241.687Z")
+ ) ~fill:(("#0D1522"))
+ ~children:[] ())
+ [@JSX ]);
+ ((defs
+ ~children:[((linearGradient ~id:(("paint0_linear")
+ ) ~x1:(("374.5")
+ ) ~y1:(("180.64")
+ ) ~x2:(("362.765")
+ ) ~y2:(("435.722")
+ )
+ ~gradientUnits:(("userSpaceOnUse")
+ )
+ ~children:[((stop ~offset:(("0.0658436")
+ )
+ ~stopColor:(("#3A7DDD")
+ )
+ ~children:[] ())
+ [@JSX ]);
+ ((stop ~offset:(("0.4001")
+ )
+ ~stopColor:(("#265291")
+ )
+ ~children:[] ())
+ [@JSX ]);
+ ((stop ~offset:(("0.571")
+ )
+ ~stopColor:(("#1D3E6E")
+ )
+ ~children:[] ())
+ [@JSX ]);
+ ((stop ~offset:(("0.7224")
+ )
+ ~stopColor:(("#173156")
+ )
+ ~children:[] ())
+ [@JSX ]);
+ ((stop ~offset:(("0.8486")
+ )
+ ~stopColor:(("#10213A")
+ )
+ ~children:[] ())
+ [@JSX ]);
+ ((stop ~offset:(("0.9342")
+ )
+ ~stopColor:(("#091321")
+ )
+ ~children:[] ())
+ [@JSX ])] ())
+ [@JSX ])] ())
+ [@JSX ])] ())
+ [@JSX ])[@@react.component ]
\ No newline at end of file
diff --git a/res_syntax/benchmarks/data/HeroGraphic.res b/res_syntax/benchmarks/data/HeroGraphic.res
new file mode 100644
index 0000000000..53decfa8a0
--- /dev/null
+++ b/res_syntax/benchmarks/data/HeroGraphic.res
@@ -0,0 +1,2009 @@
+%bs.raw(`require('./HeroGraphic.css')`)
+
+@react.component
+let make = (~width="760", ~height="380") =>
+
diff --git a/res_syntax/benchmarks/data/Napkinscript.ml b/res_syntax/benchmarks/data/Napkinscript.ml
new file mode 100644
index 0000000000..f36dd9af5f
--- /dev/null
+++ b/res_syntax/benchmarks/data/Napkinscript.ml
@@ -0,0 +1,19541 @@
+module MiniBuffer : sig
+ type t
+ val add_char : t -> char -> unit
+ val add_string : t -> string -> unit
+ val contents : t -> string
+ val create : int -> t
+ val flush_newline : t -> unit
+ val length : t -> int
+ val unsafe_get : t -> int -> char
+end = struct
+ type t = {
+ mutable buffer : bytes;
+ mutable position : int;
+ mutable length : int;
+ }
+
+ let create n =
+ let n = if n < 1 then 1 else n in
+ let s = (Bytes.create [@doesNotRaise]) n in
+ {buffer = s; position = 0; length = n}
+
+ let contents b = Bytes.sub_string b.buffer 0 b.position
+
+ let unsafe_get b ofs =
+ Bytes.unsafe_get b.buffer ofs
+
+ let length b = b.position
+
+ (* Can't be called directly, don't add to the interface *)
+ let resize_internal b more =
+ let len = b.length in
+ let new_len = ref len in
+ while b.position + more > !new_len do new_len := 2 * !new_len done;
+ if !new_len > Sys.max_string_length then begin
+ if b.position + more <= Sys.max_string_length
+ then new_len := Sys.max_string_length
+ end;
+ let new_buffer = (Bytes.create [@doesNotRaise]) !new_len in
+ (* PR#6148: let's keep using [blit] rather than [unsafe_blit] in
+ this tricky function that is slow anyway. *)
+ Bytes.blit b.buffer 0 new_buffer 0 b.position [@doesNotRaise];
+ b.buffer <- new_buffer;
+ b.length <- !new_len
+
+ let add_char b c =
+ let pos = b.position in
+ if pos >= b.length then resize_internal b 1;
+ Bytes.unsafe_set b.buffer pos c;
+ b.position <- pos + 1
+
+ let add_string b s =
+ let len = String.length s in
+ let new_position = b.position + len in
+ if new_position > b.length then resize_internal b len;
+ Bytes.blit_string s 0 b.buffer b.position len [@doesNotRaise];
+ b.position <- new_position
+
+ (* adds newline and trims all preceding whitespace *)
+ let flush_newline b =
+ let position = ref (b.position) in
+ while (Bytes.unsafe_get b.buffer (!position - 1)) = ' ' && !position >= 0 do
+ position := !position - 1;
+ done;
+ b.position <- !position;
+ add_char b '\n'
+end
+
+module Doc = struct
+ type mode = Break | Flat
+
+ type lineStyle =
+ | Classic (* fits? -> replace with space *)
+ | Soft (* fits? -> replaced with nothing *)
+ | Hard (* always included, forces breaks in parents *)
+
+ type t =
+ | Nil
+ | Text of string
+ | Concat of t list
+ | Indent of t
+ | IfBreaks of {yes: t; no: t}
+ | LineSuffix of t
+ | LineBreak of lineStyle
+ | Group of {shouldBreak: bool; doc: t}
+ | CustomLayout of t list
+ | BreakParent
+ (* | Cursor *)
+
+ let nil = Nil
+ let line = LineBreak Classic
+ let hardLine = LineBreak Hard
+ let softLine = LineBreak Soft
+ let text s = Text s
+ let concat l = Concat l
+ let indent d = Indent d
+ let ifBreaks t f = IfBreaks {yes = t; no = f}
+ let lineSuffix d = LineSuffix d
+ let group d = Group {shouldBreak = false; doc = d}
+ let breakableGroup ~forceBreak d = Group {shouldBreak = forceBreak; doc = d}
+ let customLayout gs = CustomLayout gs
+ let breakParent = BreakParent
+ (* let cursor = Cursor *)
+
+ let space = Text " "
+ let comma = Text ","
+ let dot = Text "."
+ let dotdot = Text ".."
+ let dotdotdot = Text "..."
+ let lessThan = Text "<"
+ let greaterThan = Text ">"
+ let lbrace = Text "{"
+ let rbrace = Text "}"
+ let lparen = Text "("
+ let rparen = Text ")"
+ let lbracket = Text "["
+ let rbracket = Text "]"
+ let question = Text "?"
+ let tilde = Text "~"
+ let equal = Text "="
+ let trailingComma = IfBreaks {yes = comma; no = nil}
+ let doubleQuote = Text "\""
+
+ let propagateForcedBreaks doc =
+ let rec walk doc = match doc with
+ | Text _ | Nil | LineSuffix _ ->
+ (false, doc)
+ | BreakParent ->
+ (true, Nil)
+ | LineBreak Hard ->
+ (true, doc)
+ | LineBreak (Classic | Soft) ->
+ (false, doc)
+ | Indent children ->
+ let (childForcesBreak, newChildren) = walk children in
+ (childForcesBreak, Indent newChildren)
+ | IfBreaks {yes = trueDoc; no = falseDoc} ->
+ let (falseForceBreak, falseDoc) = walk falseDoc in
+ if falseForceBreak then
+ let (_, trueDoc) = walk trueDoc in
+ (true, trueDoc)
+ else
+ let forceBreak, trueDoc = walk trueDoc in
+ (forceBreak, IfBreaks {yes = trueDoc; no = falseDoc})
+ | Group {shouldBreak = forceBreak; doc = children} ->
+ let (childForcesBreak, newChildren) = walk children in
+ let shouldBreak = forceBreak || childForcesBreak in
+ (shouldBreak, Group {shouldBreak; doc = newChildren})
+ | Concat children ->
+ let (forceBreak, newChildren) = List.fold_left (fun (forceBreak, newChildren) child ->
+ let (childForcesBreak, newChild) = walk child in
+ (forceBreak || childForcesBreak, newChild::newChildren)
+ ) (false, []) children
+ in
+ (forceBreak, Concat (List.rev newChildren))
+ | CustomLayout children ->
+ (* When using CustomLayout, we don't want to propagate forced breaks
+ * from the children up. By definition it picks the first layout that fits
+ * otherwise it takes the last of the list.
+ * However we do want to propagate forced breaks in the sublayouts. They
+ * might need to be broken. We just don't propagate them any higher here *)
+ let children = match walk (Concat children) with
+ | (_, Concat children) -> children
+ | _ -> assert false
+ in
+ (false, CustomLayout children)
+ in
+ let (_, processedDoc) = walk doc in
+ processedDoc
+
+ let join ~sep docs =
+ let rec loop acc sep docs =
+ match docs with
+ | [] -> List.rev acc
+ | [x] -> List.rev (x::acc)
+ | x::xs -> loop (sep::x::acc) sep xs
+ in
+ Concat(loop [] sep docs)
+
+ let rec fits w doc = match doc with
+ | _ when w < 0 -> false
+ | [] -> true
+ | (_ind, _mode, Text txt)::rest -> fits (w - String.length txt) rest
+ | (ind, mode, Indent doc)::rest -> fits w ((ind + 2, mode, doc)::rest)
+ | (_ind, Flat, LineBreak break)::rest ->
+ if break = Hard then true
+ else
+ let w = if break = Classic then w - 1 else w in
+ fits w rest
+ | (_ind, _mode, Nil)::rest -> fits w rest
+ | (_ind, Break, LineBreak _break)::_rest -> true
+ | (ind, mode, Group {shouldBreak = forceBreak; doc})::rest ->
+ let mode = if forceBreak then Break else mode in
+ fits w ((ind, mode, doc)::rest)
+ | (ind, mode, IfBreaks {yes = breakDoc; no = flatDoc})::rest ->
+ if mode = Break then
+ fits w ((ind, mode, breakDoc)::rest)
+ else
+ fits w ((ind, mode, flatDoc)::rest)
+ | (ind, mode, Concat docs)::rest ->
+ let ops = List.map (fun doc -> (ind, mode, doc)) docs in
+ fits w (List.append ops rest)
+ (* | (_ind, _mode, Cursor)::rest -> fits w rest *)
+ | (_ind, _mode, LineSuffix _)::rest -> fits w rest
+ | (_ind, _mode, BreakParent)::rest -> fits w rest
+ | (ind, mode, CustomLayout (hd::_))::rest ->
+ (* TODO: if we have nested custom layouts, what we should do here? *)
+ fits w ((ind, mode, hd)::rest)
+ | (_ind, _mode, CustomLayout _)::rest ->
+ fits w rest
+
+ let toString ~width doc =
+ let doc = propagateForcedBreaks doc in
+ let buffer = MiniBuffer.create 1000 in
+
+ let rec process ~pos lineSuffices stack =
+ match stack with
+ | ((ind, mode, doc) as cmd)::rest ->
+ begin match doc with
+ | Nil | BreakParent ->
+ process ~pos lineSuffices rest
+ | Text txt ->
+ MiniBuffer.add_string buffer txt;
+ process ~pos:(String.length txt + pos) lineSuffices rest
+ | LineSuffix doc ->
+ process ~pos ((ind, mode, doc)::lineSuffices) rest
+ | Concat docs ->
+ let ops = List.map (fun doc -> (ind, mode, doc)) docs in
+ process ~pos lineSuffices (List.append ops rest)
+ | Indent doc ->
+ process ~pos lineSuffices ((ind + 2, mode, doc)::rest)
+ | IfBreaks {yes = breakDoc; no = flatDoc} ->
+ if mode = Break then
+ process ~pos lineSuffices ((ind, mode, breakDoc)::rest)
+ else
+ process ~pos lineSuffices ((ind, mode, flatDoc)::rest)
+ | LineBreak lineStyle ->
+ if mode = Break then (
+ begin match lineSuffices with
+ | [] ->
+ MiniBuffer.flush_newline buffer;
+ MiniBuffer.add_string buffer (String.make ind ' ' [@doesNotRaise]);
+ process ~pos:ind [] rest
+ | _docs ->
+ process ~pos:ind [] (List.concat [List.rev lineSuffices; cmd::rest])
+ end
+ ) else (* mode = Flat *) (
+ let pos = match lineStyle with
+ | Classic -> MiniBuffer.add_string buffer " "; pos + 1
+ | Hard -> MiniBuffer.flush_newline buffer; 0
+ | Soft -> pos
+ in
+ process ~pos lineSuffices rest
+ )
+ | Group {shouldBreak; doc} ->
+ if shouldBreak || not (fits (width - pos) ((ind, Flat, doc)::rest)) then
+ process ~pos lineSuffices ((ind, Break, doc)::rest)
+ else
+ process ~pos lineSuffices ((ind, Flat, doc)::rest)
+ | CustomLayout docs ->
+ let rec findGroupThatFits groups = match groups with
+ | [] -> Nil
+ | [lastGroup] -> lastGroup
+ | doc::docs ->
+ if (fits (width - pos) ((ind, Flat, doc)::rest)) then
+ doc
+ else
+ findGroupThatFits docs
+ in
+ let doc = findGroupThatFits docs in
+ process ~pos lineSuffices ((ind, Flat, doc)::rest)
+ end
+ | [] ->
+ begin match lineSuffices with
+ | [] -> ()
+ | suffices ->
+ process ~pos:0 [] (List.rev suffices)
+ end
+ in
+ process ~pos:0 [] [0, Flat, doc];
+
+ let len = MiniBuffer.length buffer in
+ if len > 0 && MiniBuffer.unsafe_get buffer (len - 1) != '\n' then
+ MiniBuffer.add_char buffer '\n';
+ MiniBuffer.contents buffer
+
+
+ let debug t =
+ let rec toDoc = function
+ | Nil -> text "nil"
+ | BreakParent -> text "breakparent"
+ | Text txt -> text ("text(" ^ txt ^ ")")
+ | LineSuffix doc -> group(
+ concat [
+ text "linesuffix(";
+ indent (
+ concat [line; toDoc doc]
+ );
+ line;
+ text ")"
+ ]
+ )
+ | Concat docs -> group(
+ concat [
+ text "concat(";
+ indent (
+ concat [
+ line;
+ join ~sep:(concat [text ","; line])
+ (List.map toDoc docs) ;
+ ]
+ );
+ line;
+ text ")"
+ ]
+ )
+ | CustomLayout docs -> group(
+ concat [
+ text "customLayout(";
+ indent (
+ concat [
+ line;
+ join ~sep:(concat [text ","; line])
+ (List.map toDoc docs) ;
+ ]
+ );
+ line;
+ text ")"
+ ]
+ )
+ | Indent doc ->
+ concat [
+ text "indent(";
+ softLine;
+ toDoc doc;
+ softLine;
+ text ")";
+ ]
+ | IfBreaks {yes = trueDoc; no = falseDoc} ->
+ group(
+ concat [
+ text "ifBreaks(";
+ indent (
+ concat [
+ line;
+ toDoc trueDoc;
+ concat [text ","; line];
+ toDoc falseDoc;
+ ]
+ );
+ line;
+ text ")"
+ ]
+ )
+ | LineBreak break ->
+ let breakTxt = match break with
+ | Classic -> "Classic"
+ | Soft -> "Soft"
+ | Hard -> "Hard"
+ in
+ text ("LineBreak(" ^ breakTxt ^ ")")
+ | Group {shouldBreak; doc} ->
+ group(
+ concat [
+ text "Group(";
+ indent (
+ concat [
+ line;
+ text ("shouldBreak: " ^ (string_of_bool shouldBreak));
+ concat [text ","; line];
+ toDoc doc;
+ ]
+ );
+ line;
+ text ")"
+ ]
+ )
+ in
+ let doc = toDoc t in
+ toString ~width:10 doc |> print_endline
+ [@@live]
+end
+
+module Sexp: sig
+ type t
+
+ val atom: string -> t
+ val list: t list -> t
+ val toString: t -> string
+end = struct
+ type t =
+ | Atom of string
+ | List of t list
+
+ let atom s = Atom s
+ let list l = List l
+
+ let rec toDoc t =
+ match t with
+ | Atom s -> Doc.text s
+ | List [] -> Doc.text "()"
+ | List [sexpr] -> Doc.concat [Doc.lparen; toDoc sexpr; Doc.rparen;]
+ | List (hd::tail) ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ toDoc hd;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.join ~sep:Doc.line (List.map toDoc tail);
+ ]
+ );
+ Doc.rparen;
+ ]
+ )
+
+ let toString sexpr =
+ let doc = toDoc sexpr in
+ Doc.toString ~width:80 doc
+end
+
+module SexpAst: sig
+ val implementation: Parsetree.structure -> Sexp.t
+ val interface: Parsetree.signature -> Sexp.t
+end = struct
+ open Parsetree
+
+ let mapEmpty ~f items =
+ match items with
+ | [] -> [Sexp.list []]
+ | items -> List.map f items
+
+ let string txt =
+ Sexp.atom ("\"" ^ txt ^ "\"")
+
+ let char c =
+ Sexp.atom ("'" ^ (Char.escaped c) ^ "'")
+
+ let optChar oc =
+ match oc with
+ | None -> Sexp.atom "None"
+ | Some c ->
+ Sexp.list [
+ Sexp.atom "Some";
+ char c
+ ]
+
+ let longident l =
+ let rec loop l = match l with
+ | Longident.Lident ident -> Sexp.list [
+ Sexp.atom "Lident";
+ string ident;
+ ]
+ | Longident.Ldot (lident, txt) ->
+ Sexp.list [
+ Sexp.atom "Ldot";
+ loop lident;
+ string txt;
+ ]
+ | Longident.Lapply (l1, l2) ->
+ Sexp.list [
+ Sexp.atom "Lapply";
+ loop l1;
+ loop l2;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "longident";
+ loop l;
+ ]
+
+ let closedFlag flag = match flag with
+ | Asttypes.Closed -> Sexp.atom "Closed"
+ | Open -> Sexp.atom "Open"
+
+ let directionFlag flag = match flag with
+ | Asttypes.Upto -> Sexp.atom "Upto"
+ | Downto -> Sexp.atom "Downto"
+
+ let recFlag flag = match flag with
+ | Asttypes.Recursive -> Sexp.atom "Recursive"
+ | Nonrecursive -> Sexp.atom "Nonrecursive"
+
+ let overrideFlag flag = match flag with
+ | Asttypes.Override -> Sexp.atom "Override"
+ | Fresh -> Sexp.atom "Fresh"
+
+ let privateFlag flag = match flag with
+ | Asttypes.Public -> Sexp.atom "Public"
+ | Private -> Sexp.atom "Private"
+
+ let mutableFlag flag = match flag with
+ | Asttypes.Immutable -> Sexp.atom "Immutable"
+ | Mutable -> Sexp.atom "Mutable"
+
+ let variance v = match v with
+ | Asttypes.Covariant -> Sexp.atom "Covariant"
+ | Contravariant -> Sexp.atom "Contravariant"
+ | Invariant -> Sexp.atom "Invariant"
+
+ let argLabel lbl = match lbl with
+ | Asttypes.Nolabel -> Sexp.atom "Nolabel"
+ | Labelled txt -> Sexp.list [
+ Sexp.atom "Labelled";
+ string txt;
+ ]
+ | Optional txt -> Sexp.list [
+ Sexp.atom "Optional";
+ string txt;
+ ]
+
+ let constant c =
+ let sexpr = match c with
+ | Pconst_integer (txt, tag) ->
+ Sexp.list [
+ Sexp.atom "Pconst_integer";
+ string txt;
+ optChar tag;
+ ]
+ | Pconst_char c ->
+ Sexp.list [
+ Sexp.atom "Pconst_char";
+ Sexp.atom (Char.escaped c);
+ ]
+ | Pconst_string (txt, tag) ->
+ Sexp.list [
+ Sexp.atom "Pconst_string";
+ string txt;
+ match tag with
+ | Some txt -> Sexp.list [
+ Sexp.atom "Some";
+ string txt;
+ ]
+ | None -> Sexp.atom "None";
+ ]
+ | Pconst_float (txt, tag) ->
+ Sexp.list [
+ Sexp.atom "Pconst_float";
+ string txt;
+ optChar tag;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "constant";
+ sexpr
+ ]
+
+ let rec structure s =
+ Sexp.list (
+ (Sexp.atom "structure")::(List.map structureItem s)
+ )
+
+ and structureItem si =
+ let desc = match si.pstr_desc with
+ | Pstr_eval (expr, attrs) ->
+ Sexp.list [
+ Sexp.atom "Pstr_eval";
+ expression expr;
+ attributes attrs;
+ ]
+ | Pstr_value (flag, vbs) ->
+ Sexp.list [
+ Sexp.atom "Pstr_value";
+ recFlag flag;
+ Sexp.list (mapEmpty ~f:valueBinding vbs)
+ ]
+ | Pstr_primitive (vd) ->
+ Sexp.list [
+ Sexp.atom "Pstr_primitive";
+ valueDescription vd;
+ ]
+ | Pstr_type (flag, tds) ->
+ Sexp.list [
+ Sexp.atom "Pstr_type";
+ recFlag flag;
+ Sexp.list (mapEmpty ~f:typeDeclaration tds)
+ ]
+ | Pstr_typext typext ->
+ Sexp.list [
+ Sexp.atom "Pstr_type";
+ typeExtension typext;
+ ]
+ | Pstr_exception ec ->
+ Sexp.list [
+ Sexp.atom "Pstr_exception";
+ extensionConstructor ec;
+ ]
+ | Pstr_module mb ->
+ Sexp.list [
+ Sexp.atom "Pstr_module";
+ moduleBinding mb;
+ ]
+ | Pstr_recmodule mbs ->
+ Sexp.list [
+ Sexp.atom "Pstr_recmodule";
+ Sexp.list (mapEmpty ~f:moduleBinding mbs);
+ ]
+ | Pstr_modtype modTypDecl ->
+ Sexp.list [
+ Sexp.atom "Pstr_modtype";
+ moduleTypeDeclaration modTypDecl;
+ ]
+ | Pstr_open openDesc ->
+ Sexp.list [
+ Sexp.atom "Pstr_open";
+ openDescription openDesc;
+ ]
+ | Pstr_class _ -> Sexp.atom "Pstr_class"
+ | Pstr_class_type _ -> Sexp.atom "Pstr_class_type"
+ | Pstr_include id ->
+ Sexp.list [
+ Sexp.atom "Pstr_include";
+ includeDeclaration id;
+ ]
+ | Pstr_attribute attr ->
+ Sexp.list [
+ Sexp.atom "Pstr_attribute";
+ attribute attr;
+ ]
+ | Pstr_extension (ext, attrs) ->
+ Sexp.list [
+ Sexp.atom "Pstr_extension";
+ extension ext;
+ attributes attrs;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "structure_item";
+ desc;
+ ]
+
+ and includeDeclaration id =
+ Sexp.list [
+ Sexp.atom "include_declaration";
+ moduleExpression id.pincl_mod;
+ attributes id.pincl_attributes;
+ ]
+
+ and openDescription od =
+ Sexp.list [
+ Sexp.atom "open_description";
+ longident od.popen_lid.Asttypes.txt;
+ attributes od.popen_attributes;
+ ]
+
+ and moduleTypeDeclaration mtd =
+ Sexp.list [
+ Sexp.atom "module_type_declaration";
+ string mtd.pmtd_name.Asttypes.txt;
+ (match mtd.pmtd_type with
+ | None -> Sexp.atom "None"
+ | Some modType -> Sexp.list [
+ Sexp.atom "Some";
+ moduleType modType;
+ ]);
+ attributes mtd.pmtd_attributes;
+ ]
+
+ and moduleBinding mb =
+ Sexp.list [
+ Sexp.atom "module_binding";
+ string mb.pmb_name.Asttypes.txt;
+ moduleExpression mb.pmb_expr;
+ attributes mb.pmb_attributes;
+ ]
+
+ and moduleExpression me =
+ let desc = match me.pmod_desc with
+ | Pmod_ident modName ->
+ Sexp.list [
+ Sexp.atom "Pmod_ident";
+ longident modName.Asttypes.txt;
+ ]
+ | Pmod_structure s ->
+ Sexp.list [
+ Sexp.atom "Pmod_structure";
+ structure s;
+ ]
+ | Pmod_functor (lbl, optModType, modExpr) ->
+ Sexp.list [
+ Sexp.atom "Pmod_functor";
+ string lbl.Asttypes.txt;
+ (match optModType with
+ | None -> Sexp.atom "None"
+ | Some modType -> Sexp.list [
+ Sexp.atom "Some";
+ moduleType modType;
+ ]);
+ moduleExpression modExpr;
+ ]
+ | Pmod_apply (callModExpr, modExprArg) ->
+ Sexp.list [
+ Sexp.atom "Pmod_apply";
+ moduleExpression callModExpr;
+ moduleExpression modExprArg;
+ ]
+ | Pmod_constraint (modExpr, modType) ->
+ Sexp.list [
+ Sexp.atom "Pmod_constraint";
+ moduleExpression modExpr;
+ moduleType modType;
+ ]
+ | Pmod_unpack expr ->
+ Sexp.list [
+ Sexp.atom "Pmod_unpack";
+ expression expr;
+ ]
+ | Pmod_extension ext ->
+ Sexp.list [
+ Sexp.atom "Pmod_extension";
+ extension ext;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "module_expr";
+ desc;
+ attributes me.pmod_attributes;
+ ]
+
+ and moduleType mt =
+ let desc = match mt.pmty_desc with
+ | Pmty_ident longidentLoc ->
+ Sexp.list [
+ Sexp.atom "Pmty_ident";
+ longident longidentLoc.Asttypes.txt;
+ ]
+ | Pmty_signature s ->
+ Sexp.list [
+ Sexp.atom "Pmty_signature";
+ signature s;
+ ]
+ | Pmty_functor (lbl, optModType, modType) ->
+ Sexp.list [
+ Sexp.atom "Pmty_functor";
+ string lbl.Asttypes.txt;
+ (match optModType with
+ | None -> Sexp.atom "None"
+ | Some modType -> Sexp.list [
+ Sexp.atom "Some";
+ moduleType modType;
+ ]);
+ moduleType modType;
+ ]
+ | Pmty_alias longidentLoc ->
+ Sexp.list [
+ Sexp.atom "Pmty_alias";
+ longident longidentLoc.Asttypes.txt;
+ ]
+ | Pmty_extension ext ->
+ Sexp.list [
+ Sexp.atom "Pmty_extension";
+ extension ext;
+ ]
+ | Pmty_typeof modExpr ->
+ Sexp.list [
+ Sexp.atom "Pmty_typeof";
+ moduleExpression modExpr;
+ ]
+ | Pmty_with (modType, withConstraints) ->
+ Sexp.list [
+ Sexp.atom "Pmty_with";
+ moduleType modType;
+ Sexp.list (mapEmpty ~f:withConstraint withConstraints);
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "module_type";
+ desc;
+ attributes mt.pmty_attributes;
+ ]
+
+ and withConstraint wc = match wc with
+ | Pwith_type (longidentLoc, td) ->
+ Sexp.list [
+ Sexp.atom "Pmty_with";
+ longident longidentLoc.Asttypes.txt;
+ typeDeclaration td;
+ ]
+ | Pwith_module (l1, l2) ->
+ Sexp.list [
+ Sexp.atom "Pwith_module";
+ longident l1.Asttypes.txt;
+ longident l2.Asttypes.txt;
+ ]
+ | Pwith_typesubst (longidentLoc, td) ->
+ Sexp.list [
+ Sexp.atom "Pwith_typesubst";
+ longident longidentLoc.Asttypes.txt;
+ typeDeclaration td;
+ ]
+ | Pwith_modsubst (l1, l2) ->
+ Sexp.list [
+ Sexp.atom "Pwith_modsubst";
+ longident l1.Asttypes.txt;
+ longident l2.Asttypes.txt;
+ ]
+
+ and signature s =
+ Sexp.list (
+ (Sexp.atom "signature")::(List.map signatureItem s)
+ )
+
+ and signatureItem si =
+ let descr = match si.psig_desc with
+ | Psig_value vd ->
+ Sexp.list [
+ Sexp.atom "Psig_value";
+ valueDescription vd;
+ ]
+ | Psig_type (flag, typeDeclarations) ->
+ Sexp.list [
+ Sexp.atom "Psig_type";
+ recFlag flag;
+ Sexp.list (mapEmpty ~f:typeDeclaration typeDeclarations);
+ ]
+ | Psig_typext typExt ->
+ Sexp.list [
+ Sexp.atom "Psig_typext";
+ typeExtension typExt;
+ ]
+ | Psig_exception extConstr ->
+ Sexp.list [
+ Sexp.atom "Psig_exception";
+ extensionConstructor extConstr;
+ ]
+ | Psig_module modDecl ->
+ Sexp.list [
+ Sexp.atom "Psig_module";
+ moduleDeclaration modDecl;
+ ]
+ | Psig_recmodule modDecls ->
+ Sexp.list [
+ Sexp.atom "Psig_recmodule";
+ Sexp.list (mapEmpty ~f:moduleDeclaration modDecls);
+ ]
+ | Psig_modtype modTypDecl ->
+ Sexp.list [
+ Sexp.atom "Psig_modtype";
+ moduleTypeDeclaration modTypDecl;
+ ]
+ | Psig_open openDesc ->
+ Sexp.list [
+ Sexp.atom "Psig_open";
+ openDescription openDesc;
+ ]
+ | Psig_include inclDecl ->
+ Sexp.list [
+ Sexp.atom "Psig_include";
+ includeDescription inclDecl
+ ]
+ | Psig_class _ -> Sexp.list [Sexp.atom "Psig_class";]
+ | Psig_class_type _ -> Sexp.list [ Sexp.atom "Psig_class_type"; ]
+ | Psig_attribute attr ->
+ Sexp.list [
+ Sexp.atom "Psig_attribute";
+ attribute attr;
+ ]
+ | Psig_extension (ext, attrs) ->
+ Sexp.list [
+ Sexp.atom "Psig_extension";
+ extension ext;
+ attributes attrs;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "signature_item";
+ descr;
+ ]
+
+ and includeDescription id =
+ Sexp.list [
+ Sexp.atom "include_description";
+ moduleType id.pincl_mod;
+ attributes id.pincl_attributes;
+ ]
+
+ and moduleDeclaration md =
+ Sexp.list [
+ Sexp.atom "module_declaration";
+ string md.pmd_name.Asttypes.txt;
+ moduleType md.pmd_type;
+ attributes md.pmd_attributes;
+ ]
+
+ and valueBinding vb =
+ Sexp.list [
+ Sexp.atom "value_binding";
+ pattern vb.pvb_pat;
+ expression vb.pvb_expr;
+ attributes vb.pvb_attributes;
+ ]
+
+ and valueDescription vd =
+ Sexp.list [
+ Sexp.atom "value_description";
+ string vd.pval_name.Asttypes.txt;
+ coreType vd.pval_type;
+ Sexp.list (mapEmpty ~f:string vd.pval_prim);
+ attributes vd.pval_attributes;
+ ]
+
+ and typeDeclaration td =
+ Sexp.list [
+ Sexp.atom "type_declaration";
+ string td.ptype_name.Asttypes.txt;
+ Sexp.list [
+ Sexp.atom "ptype_params";
+ Sexp.list (mapEmpty ~f:(fun (typexpr, var) ->
+ Sexp.list [
+ coreType typexpr;
+ variance var;
+ ]) td.ptype_params)
+ ];
+ Sexp.list [
+ Sexp.atom "ptype_cstrs";
+ Sexp.list (mapEmpty ~f:(fun (typ1, typ2, _loc) ->
+ Sexp.list [
+ coreType typ1;
+ coreType typ2;
+ ]) td.ptype_cstrs)
+ ];
+ Sexp.list [
+ Sexp.atom "ptype_kind";
+ typeKind td.ptype_kind;
+ ];
+ Sexp.list [
+ Sexp.atom "ptype_manifest";
+ match td.ptype_manifest with
+ | None -> Sexp.atom "None"
+ | Some typ -> Sexp.list [
+ Sexp.atom "Some";
+ coreType typ;
+ ]
+ ];
+ Sexp.list [
+ Sexp.atom "ptype_private";
+ privateFlag td.ptype_private;
+ ];
+ attributes td.ptype_attributes;
+ ]
+
+ and extensionConstructor ec =
+ Sexp.list [
+ Sexp.atom "extension_constructor";
+ string ec.pext_name.Asttypes.txt;
+ extensionConstructorKind ec.pext_kind;
+ attributes ec.pext_attributes;
+ ]
+
+ and extensionConstructorKind kind = match kind with
+ | Pext_decl (args, optTypExpr) ->
+ Sexp.list [
+ Sexp.atom "Pext_decl";
+ constructorArguments args;
+ match optTypExpr with
+ | None -> Sexp.atom "None"
+ | Some typ -> Sexp.list [
+ Sexp.atom "Some";
+ coreType typ;
+ ]
+ ]
+ | Pext_rebind longidentLoc ->
+ Sexp.list [
+ Sexp.atom "Pext_rebind";
+ longident longidentLoc.Asttypes.txt;
+ ]
+
+ and typeExtension te =
+ Sexp.list [
+ Sexp.atom "type_extension";
+ Sexp.list [
+ Sexp.atom "ptyext_path";
+ longident te.ptyext_path.Asttypes.txt;
+ ];
+ Sexp.list [
+ Sexp.atom "ptyext_parms";
+ Sexp.list (mapEmpty ~f:(fun (typexpr, var) ->
+ Sexp.list [
+ coreType typexpr;
+ variance var;
+ ]) te.ptyext_params)
+ ];
+ Sexp.list [
+ Sexp.atom "ptyext_constructors";
+ Sexp.list (mapEmpty ~f:extensionConstructor te.ptyext_constructors);
+ ];
+ Sexp.list [
+ Sexp.atom "ptyext_private";
+ privateFlag te.ptyext_private;
+ ];
+ attributes te.ptyext_attributes;
+ ]
+
+ and typeKind kind = match kind with
+ | Ptype_abstract -> Sexp.atom "Ptype_abstract"
+ | Ptype_variant constrDecls ->
+ Sexp.list [
+ Sexp.atom "Ptype_variant";
+ Sexp.list (mapEmpty ~f:constructorDeclaration constrDecls);
+ ]
+ | Ptype_record lblDecls ->
+ Sexp.list [
+ Sexp.atom "Ptype_record";
+ Sexp.list (mapEmpty ~f:labelDeclaration lblDecls);
+ ]
+ | Ptype_open -> Sexp.atom "Ptype_open"
+
+ and constructorDeclaration cd =
+ Sexp.list [
+ Sexp.atom "constructor_declaration";
+ string cd.pcd_name.Asttypes.txt;
+ Sexp.list [
+ Sexp.atom "pcd_args";
+ constructorArguments cd.pcd_args;
+ ];
+ Sexp.list [
+ Sexp.atom "pcd_res";
+ match cd.pcd_res with
+ | None -> Sexp.atom "None"
+ | Some typ -> Sexp.list [
+ Sexp.atom "Some";
+ coreType typ;
+ ]
+ ];
+ attributes cd.pcd_attributes;
+ ]
+
+ and constructorArguments args = match args with
+ | Pcstr_tuple types ->
+ Sexp.list [
+ Sexp.atom "Pcstr_tuple";
+ Sexp.list (mapEmpty ~f:coreType types)
+ ]
+ | Pcstr_record lds ->
+ Sexp.list [
+ Sexp.atom "Pcstr_record";
+ Sexp.list (mapEmpty ~f:labelDeclaration lds)
+ ]
+
+ and labelDeclaration ld =
+ Sexp.list [
+ Sexp.atom "label_declaration";
+ string ld.pld_name.Asttypes.txt;
+ mutableFlag ld.pld_mutable;
+ coreType ld.pld_type;
+ attributes ld.pld_attributes;
+ ]
+
+ and expression expr =
+ let desc = match expr.pexp_desc with
+ | Pexp_ident longidentLoc ->
+ Sexp.list [
+ Sexp.atom "Pexp_ident";
+ longident longidentLoc.Asttypes.txt;
+ ]
+ | Pexp_constant c ->
+ Sexp.list [
+ Sexp.atom "Pexp_constant";
+ constant c
+ ]
+ | Pexp_let (flag, vbs, expr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_let";
+ recFlag flag;
+ Sexp.list (mapEmpty ~f:valueBinding vbs);
+ expression expr;
+ ]
+ | Pexp_function cases ->
+ Sexp.list [
+ Sexp.atom "Pexp_function";
+ Sexp.list (mapEmpty ~f:case cases);
+ ]
+ | Pexp_fun (argLbl, exprOpt, pat, expr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_fun";
+ argLabel argLbl;
+ (match exprOpt with
+ | None -> Sexp.atom "None"
+ | Some expr -> Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]);
+ pattern pat;
+ expression expr;
+ ]
+ | Pexp_apply (expr, args) ->
+ Sexp.list [
+ Sexp.atom "Pexp_apply";
+ expression expr;
+ Sexp.list (mapEmpty ~f:(fun (argLbl, expr) -> Sexp.list [
+ argLabel argLbl;
+ expression expr
+ ]) args);
+ ]
+ | Pexp_match (expr, cases) ->
+ Sexp.list [
+ Sexp.atom "Pexp_match";
+ expression expr;
+ Sexp.list (mapEmpty ~f:case cases);
+ ]
+ | Pexp_try (expr, cases) ->
+ Sexp.list [
+ Sexp.atom "Pexp_try";
+ expression expr;
+ Sexp.list (mapEmpty ~f:case cases);
+ ]
+ | Pexp_tuple exprs ->
+ Sexp.list [
+ Sexp.atom "Pexp_tuple";
+ Sexp.list (mapEmpty ~f:expression exprs);
+ ]
+ | Pexp_construct (longidentLoc, exprOpt) ->
+ Sexp.list [
+ Sexp.atom "Pexp_construct";
+ longident longidentLoc.Asttypes.txt;
+ match exprOpt with
+ | None -> Sexp.atom "None"
+ | Some expr ->
+ Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]
+ ]
+ | Pexp_variant (lbl, exprOpt) ->
+ Sexp.list [
+ Sexp.atom "Pexp_variant";
+ string lbl;
+ match exprOpt with
+ | None -> Sexp.atom "None"
+ | Some expr ->
+ Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]
+ ]
+ | Pexp_record (rows, optExpr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_record";
+ Sexp.list (mapEmpty ~f:(fun (longidentLoc, expr) -> Sexp.list [
+ longident longidentLoc.Asttypes.txt;
+ expression expr;
+ ]) rows);
+ (match optExpr with
+ | None -> Sexp.atom "None"
+ | Some expr ->
+ Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]);
+ ]
+ | Pexp_field (expr, longidentLoc) ->
+ Sexp.list [
+ Sexp.atom "Pexp_field";
+ expression expr;
+ longident longidentLoc.Asttypes.txt;
+ ]
+ | Pexp_setfield (expr1, longidentLoc, expr2) ->
+ Sexp.list [
+ Sexp.atom "Pexp_setfield";
+ expression expr1;
+ longident longidentLoc.Asttypes.txt;
+ expression expr2;
+ ]
+ | Pexp_array exprs ->
+ Sexp.list [
+ Sexp.atom "Pexp_array";
+ Sexp.list (mapEmpty ~f:expression exprs);
+ ]
+ | Pexp_ifthenelse (expr1, expr2, optExpr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_ifthenelse";
+ expression expr1;
+ expression expr2;
+ (match optExpr with
+ | None -> Sexp.atom "None"
+ | Some expr ->
+ Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]);
+ ]
+ | Pexp_sequence (expr1, expr2) ->
+ Sexp.list [
+ Sexp.atom "Pexp_sequence";
+ expression expr1;
+ expression expr2;
+ ]
+ | Pexp_while (expr1, expr2) ->
+ Sexp.list [
+ Sexp.atom "Pexp_while";
+ expression expr1;
+ expression expr2;
+ ]
+ | Pexp_for (pat, e1, e2, flag, e3) ->
+ Sexp.list [
+ Sexp.atom "Pexp_for";
+ pattern pat;
+ expression e1;
+ expression e2;
+ directionFlag flag;
+ expression e3;
+ ]
+ | Pexp_constraint (expr, typexpr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_constraint";
+ expression expr;
+ coreType typexpr;
+ ]
+ | Pexp_coerce (expr, optTyp, typexpr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_coerce";
+ expression expr;
+ (match optTyp with
+ | None -> Sexp.atom "None"
+ | Some typ -> Sexp.list [
+ Sexp.atom "Some";
+ coreType typ;
+ ]);
+ coreType typexpr;
+ ]
+ | Pexp_send _ ->
+ Sexp.list [
+ Sexp.atom "Pexp_send";
+ ]
+ | Pexp_new _ ->
+ Sexp.list [
+ Sexp.atom "Pexp_new";
+ ]
+ | Pexp_setinstvar _ ->
+ Sexp.list [
+ Sexp.atom "Pexp_setinstvar";
+ ]
+ | Pexp_override _ ->
+ Sexp.list [
+ Sexp.atom "Pexp_override";
+ ]
+ | Pexp_letmodule (modName, modExpr, expr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_letmodule";
+ string modName.Asttypes.txt;
+ moduleExpression modExpr;
+ expression expr;
+ ]
+ | Pexp_letexception (extConstr, expr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_letexception";
+ extensionConstructor extConstr;
+ expression expr;
+ ]
+ | Pexp_assert expr ->
+ Sexp.list [
+ Sexp.atom "Pexp_assert";
+ expression expr;
+ ]
+ | Pexp_lazy expr ->
+ Sexp.list [
+ Sexp.atom "Pexp_lazy";
+ expression expr;
+ ]
+ | Pexp_poly _ ->
+ Sexp.list [
+ Sexp.atom "Pexp_poly";
+ ]
+ | Pexp_object _ ->
+ Sexp.list [
+ Sexp.atom "Pexp_object";
+ ]
+ | Pexp_newtype (lbl, expr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_newtype";
+ string lbl.Asttypes.txt;
+ expression expr;
+ ]
+ | Pexp_pack modExpr ->
+ Sexp.list [
+ Sexp.atom "Pexp_pack";
+ moduleExpression modExpr;
+ ]
+ | Pexp_open (flag, longidentLoc, expr) ->
+ Sexp.list [
+ Sexp.atom "Pexp_open";
+ overrideFlag flag;
+ longident longidentLoc.Asttypes.txt;
+ expression expr;
+ ]
+ | Pexp_extension ext ->
+ Sexp.list [
+ Sexp.atom "Pexp_extension";
+ extension ext;
+ ]
+ | Pexp_unreachable -> Sexp.atom "Pexp_unreachable"
+ in
+ Sexp.list [
+ Sexp.atom "expression";
+ desc;
+ ]
+
+ and case c =
+ Sexp.list [
+ Sexp.atom "case";
+ Sexp.list [
+ Sexp.atom "pc_lhs";
+ pattern c.pc_lhs;
+ ];
+ Sexp.list [
+ Sexp.atom "pc_guard";
+ match c.pc_guard with
+ | None -> Sexp.atom "None"
+ | Some expr -> Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]
+ ];
+ Sexp.list [
+ Sexp.atom "pc_rhs";
+ expression c.pc_rhs;
+ ]
+ ]
+
+ and pattern p =
+ let descr = match p.ppat_desc with
+ | Ppat_any ->
+ Sexp.atom "Ppat_any"
+ | Ppat_var var ->
+ Sexp.list [
+ Sexp.atom "Ppat_var";
+ string var.Location.txt;
+ ]
+ | Ppat_alias (p, alias) ->
+ Sexp.list [
+ Sexp.atom "Ppat_alias";
+ pattern p;
+ string alias.txt;
+ ]
+ | Ppat_constant c ->
+ Sexp.list [
+ Sexp.atom "Ppat_constant";
+ constant c;
+ ]
+ | Ppat_interval (lo, hi) ->
+ Sexp.list [
+ Sexp.atom "Ppat_interval";
+ constant lo;
+ constant hi;
+ ]
+ | Ppat_tuple (patterns) ->
+ Sexp.list [
+ Sexp.atom "Ppat_tuple";
+ Sexp.list (mapEmpty ~f:pattern patterns);
+ ]
+ | Ppat_construct (longidentLoc, optPattern) ->
+ Sexp.list [
+ Sexp.atom "Ppat_construct";
+ longident longidentLoc.Location.txt;
+ match optPattern with
+ | None -> Sexp.atom "None"
+ | Some p -> Sexp.list [
+ Sexp.atom "some";
+ pattern p;
+ ]
+ ]
+ | Ppat_variant (lbl, optPattern) ->
+ Sexp.list [
+ Sexp.atom "Ppat_variant";
+ string lbl;
+ match optPattern with
+ | None -> Sexp.atom "None"
+ | Some p -> Sexp.list [
+ Sexp.atom "Some";
+ pattern p;
+ ]
+ ]
+ | Ppat_record (rows, flag) ->
+ Sexp.list [
+ Sexp.atom "Ppat_record";
+ closedFlag flag;
+ Sexp.list (mapEmpty ~f:(fun (longidentLoc, p) ->
+ Sexp.list [
+ longident longidentLoc.Location.txt;
+ pattern p;
+ ]
+ ) rows)
+ ]
+ | Ppat_array patterns ->
+ Sexp.list [
+ Sexp.atom "Ppat_array";
+ Sexp.list (mapEmpty ~f:pattern patterns);
+ ]
+ | Ppat_or (p1, p2) ->
+ Sexp.list [
+ Sexp.atom "Ppat_or";
+ pattern p1;
+ pattern p2;
+ ]
+ | Ppat_constraint (p, typexpr) ->
+ Sexp.list [
+ Sexp.atom "Ppat_constraint";
+ pattern p;
+ coreType typexpr;
+ ]
+ | Ppat_type longidentLoc ->
+ Sexp.list [
+ Sexp.atom "Ppat_type";
+ longident longidentLoc.Location.txt
+ ]
+ | Ppat_lazy p ->
+ Sexp.list [
+ Sexp.atom "Ppat_lazy";
+ pattern p;
+ ]
+ | Ppat_unpack stringLoc ->
+ Sexp.list [
+ Sexp.atom "Ppat_unpack";
+ string stringLoc.Location.txt;
+ ]
+ | Ppat_exception p ->
+ Sexp.list [
+ Sexp.atom "Ppat_exception";
+ pattern p;
+ ]
+ | Ppat_extension ext ->
+ Sexp.list [
+ Sexp.atom "Ppat_extension";
+ extension ext;
+ ]
+ | Ppat_open (longidentLoc, p) ->
+ Sexp.list [
+ Sexp.atom "Ppat_open";
+ longident longidentLoc.Location.txt;
+ pattern p;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "pattern";
+ descr;
+ ]
+
+ and objectField field = match field with
+ | Otag (lblLoc, attrs, typexpr) ->
+ Sexp.list [
+ Sexp.atom "Otag";
+ string lblLoc.txt;
+ attributes attrs;
+ coreType typexpr;
+ ]
+ | Oinherit typexpr ->
+ Sexp.list [
+ Sexp.atom "Oinherit";
+ coreType typexpr;
+ ]
+
+ and rowField field = match field with
+ | Rtag (labelLoc, attrs, truth, types) ->
+ Sexp.list [
+ Sexp.atom "Rtag";
+ string labelLoc.txt;
+ attributes attrs;
+ Sexp.atom (if truth then "true" else "false");
+ Sexp.list (mapEmpty ~f:coreType types);
+ ]
+ | Rinherit typexpr ->
+ Sexp.list [
+ Sexp.atom "Rinherit";
+ coreType typexpr;
+ ]
+
+ and packageType (modNameLoc, packageConstraints) =
+ Sexp.list [
+ Sexp.atom "package_type";
+ longident modNameLoc.Asttypes.txt;
+ Sexp.list (mapEmpty ~f:(fun (modNameLoc, typexpr) ->
+ Sexp.list [
+ longident modNameLoc.Asttypes.txt;
+ coreType typexpr;
+ ]
+ ) packageConstraints)
+ ]
+
+ and coreType typexpr =
+ let desc = match typexpr.ptyp_desc with
+ | Ptyp_any -> Sexp.atom "Ptyp_any"
+ | Ptyp_var var -> Sexp.list [
+ Sexp.atom "Ptyp_var";
+ string var
+ ]
+ | Ptyp_arrow (argLbl, typ1, typ2) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_arrow";
+ argLabel argLbl;
+ coreType typ1;
+ coreType typ2;
+ ]
+ | Ptyp_tuple types ->
+ Sexp.list [
+ Sexp.atom "Ptyp_tuple";
+ Sexp.list (mapEmpty ~f:coreType types);
+ ]
+ | Ptyp_constr (longidentLoc, types) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_constr";
+ longident longidentLoc.txt;
+ Sexp.list (mapEmpty ~f:coreType types);
+ ]
+ | Ptyp_alias (typexpr, alias) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_alias";
+ coreType typexpr;
+ string alias;
+ ]
+ | Ptyp_object (fields, flag) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_object";
+ closedFlag flag;
+ Sexp.list (mapEmpty ~f:objectField fields)
+ ]
+ | Ptyp_class (longidentLoc, types) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_class";
+ longident longidentLoc.Location.txt;
+ Sexp.list (mapEmpty ~f:coreType types)
+ ]
+ | Ptyp_variant (fields, flag, optLabels) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_variant";
+ Sexp.list (mapEmpty ~f:rowField fields);
+ closedFlag flag;
+ match optLabels with
+ | None -> Sexp.atom "None"
+ | Some lbls -> Sexp.list (mapEmpty ~f:string lbls);
+ ]
+ | Ptyp_poly (lbls, typexpr) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_poly";
+ Sexp.list (mapEmpty ~f:(fun lbl -> string lbl.Asttypes.txt) lbls);
+ coreType typexpr;
+ ]
+ | Ptyp_package (package) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_package";
+ packageType package;
+ ]
+ | Ptyp_extension (ext) ->
+ Sexp.list [
+ Sexp.atom "Ptyp_extension";
+ extension ext;
+ ]
+ in
+ Sexp.list [
+ Sexp.atom "core_type";
+ desc;
+ ]
+
+ and payload p =
+ match p with
+ | PStr s ->
+ Sexp.list (
+ (Sexp.atom "PStr")::(mapEmpty ~f:structureItem s)
+ )
+ | PSig s ->
+ Sexp.list [
+ Sexp.atom "PSig";
+ signature s;
+ ]
+ | PTyp ct ->
+ Sexp.list [
+ Sexp.atom "PTyp";
+ coreType ct
+ ]
+ | PPat (pat, optExpr) ->
+ Sexp.list [
+ Sexp.atom "PPat";
+ pattern pat;
+ match optExpr with
+ | Some expr -> Sexp.list [
+ Sexp.atom "Some";
+ expression expr;
+ ]
+ | None -> Sexp.atom "None";
+ ]
+
+ and attribute (stringLoc, p) =
+ Sexp.list [
+ Sexp.atom "attribute";
+ Sexp.atom stringLoc.Asttypes.txt;
+ payload p;
+ ]
+
+ and extension (stringLoc, p) =
+ Sexp.list [
+ Sexp.atom "extension";
+ Sexp.atom stringLoc.Asttypes.txt;
+ payload p;
+ ]
+
+ and attributes attrs =
+ let sexprs = mapEmpty ~f:attribute attrs in
+ Sexp.list ((Sexp.atom "attributes")::sexprs)
+
+ let implementation = structure
+ let interface = signature
+end
+
+module IO: sig
+ val readFile: string -> string
+ val readStdin: unit -> string
+end = struct
+ (* random chunk size: 2^15, TODO: why do we guess randomly? *)
+ let chunkSize = 32768
+
+ let readFile filename =
+ let chan = open_in filename in
+ let buffer = Buffer.create chunkSize in
+ let chunk = (Bytes.create [@doesNotRaise]) chunkSize in
+ let rec loop () =
+ let len = try input chan chunk 0 chunkSize with Invalid_argument _ -> 0 in
+ if len == 0 then (
+ close_in_noerr chan;
+ Buffer.contents buffer
+ ) else (
+ Buffer.add_subbytes buffer chunk 0 len;
+ loop ()
+ )
+ in
+ loop ()
+
+ let readStdin () =
+ let buffer = Buffer.create chunkSize in
+ let chunk = (Bytes.create [@doesNotRaise]) chunkSize in
+ let rec loop () =
+ let len = try input stdin chunk 0 chunkSize with Invalid_argument _ -> 0 in
+ if len == 0 then (
+ close_in_noerr stdin;
+ Buffer.contents buffer
+ ) else (
+ Buffer.add_subbytes buffer chunk 0 len;
+ loop ()
+ )
+ in
+ loop ()
+end
+
+module CharacterCodes = struct
+ let eof = -1
+
+ let space = 0x0020
+ let newline = 0x0A (* \n *) [@@live]
+ let lineFeed = 0x0A (* \n *)
+ let carriageReturn = 0x0D (* \r *)
+ let lineSeparator = 0x2028
+ let paragraphSeparator = 0x2029
+
+ let tab = 0x09
+
+ let bang = 0x21
+ let dot = 0x2E
+ let colon = 0x3A
+ let comma = 0x2C
+ let backtick = 0x60
+ (* let question = 0x3F *)
+ let semicolon = 0x3B
+ let underscore = 0x5F
+ let singleQuote = 0x27
+ let doubleQuote = 0x22
+ let equal = 0x3D
+ let bar = 0x7C
+ let tilde = 0x7E
+ let question = 0x3F
+ let ampersand = 0x26
+ let at = 0x40
+ let dollar = 0x24
+ let percent = 0x25
+
+ let lparen = 0x28
+ let rparen = 0x29
+ let lbracket = 0x5B
+ let rbracket = 0x5D
+ let lbrace = 0x7B
+ let rbrace = 0x7D
+
+ let forwardslash = 0x2F (* / *)
+ let backslash = 0x5C (* \ *)
+
+ let greaterThan = 0x3E
+ let hash = 0x23
+ let lessThan = 0x3C
+
+ let minus = 0x2D
+ let plus = 0x2B
+ let asterisk = 0x2A
+
+ let _0 = 0x30
+ let _1 = 0x31 [@@live]
+ let _2 = 0x32 [@@live]
+ let _3 = 0x33 [@@live]
+ let _4 = 0x34 [@@live]
+ let _5 = 0x35 [@@live]
+ let _6 = 0x36 [@@live]
+ let _7 = 0x37 [@@live]
+ let _8 = 0x38 [@@live]
+ let _9 = 0x39
+
+ module Lower = struct
+ let a = 0x61
+ let b = 0x62
+ let c = 0x63 [@@live]
+ let d = 0x64 [@@live]
+ let e = 0x65
+ let f = 0x66
+ let g = 0x67
+ let h = 0x68 [@@live]
+ let i = 0x69 [@@live]
+ let j = 0x6A [@@live]
+ let k = 0x6B [@@live]
+ let l = 0x6C [@@live]
+ let m = 0x6D [@@live]
+ let n = 0x6E
+ let o = 0x6F
+ let p = 0x70
+ let q = 0x71 [@@live]
+ let r = 0x72
+ let s = 0x73 [@@live]
+ let t = 0x74
+ let u = 0x75 [@@live]
+ let v = 0x76 [@@live]
+ let w = 0x77 [@@live]
+ let x = 0x78
+ let y = 0x79 [@@live]
+ let z = 0x7A
+ end
+
+ module Upper = struct
+ let a = 0x41
+ (* let b = 0x42 *)
+ let c = 0x43 [@@live]
+ let d = 0x44 [@@live]
+ let e = 0x45 [@@live]
+ let f = 0x46 [@@live]
+ let g = 0x47
+ let h = 0x48 [@@live]
+ let i = 0x49 [@@live]
+ let j = 0x4A [@@live]
+ let k = 0x4B [@@live]
+ let l = 0x4C [@@live]
+ let m = 0x4D [@@live]
+ let b = 0x4E [@@live]
+ let o = 0x4F [@@live]
+ let p = 0x50 [@@live]
+ let q = 0x51 [@@live]
+ let r = 0x52 [@@live]
+ let s = 0x53 [@@live]
+ let t = 0x54 [@@live]
+ let u = 0x55 [@@live]
+ let v = 0x56 [@@live]
+ let w = 0x57 [@@live]
+ let x = 0x58 [@@live]
+ let y = 0x59 [@@live]
+ let z = 0x5a
+ end
+
+ (* returns lower-case ch, ch should be ascii *)
+ let lower ch =
+ (* if ch >= Lower.a && ch <= Lower.z then ch else ch + 32 *)
+ 32 lor ch
+
+ let isLetter ch =
+ Lower.a <= ch && ch <= Lower.z ||
+ Upper.a <= ch && ch <= Upper.z
+
+ let isUpperCase ch =
+ Upper.a <= ch && ch <= Upper.z
+
+ let isDigit ch = _0 <= ch && ch <= _9
+
+ let isHex ch =
+ (_0 <= ch && ch <= _9) ||
+ (Lower.a <= (lower ch) && (lower ch) <= Lower.f)
+
+ (*
+ // ES5 7.3:
+ // The ECMAScript line terminator characters are listed in Table 3.
+ // Table 3: Line Terminator Characters
+ // Code Unit Value Name Formal Name
+ // \u000A Line Feed
+ // \u000D Carriage Return
+ // \u2028 Line separator
+ // \u2029 Paragraph separator
+ // Only the characters in Table 3 are treated as line terminators. Other new line or line
+ // breaking characters are treated as white space but not as line terminators.
+ *)
+ let isLineBreak ch =
+ ch == lineFeed
+ || ch == carriageReturn
+ || ch == lineSeparator
+ || ch == paragraphSeparator
+
+ let digitValue ch =
+ if _0 <= ch && ch <= _9 then
+ ch - 48
+ else if Lower.a <= (lower ch) && (lower ch) <= Lower.f then
+ (lower ch) - Lower.a + 10
+ else
+ 16 (* larger than any legal value *)
+end
+
+module Comment: sig
+ type t
+
+ val toString: t -> string
+
+ val loc: t -> Location.t
+ val txt: t -> string
+ val prevTokEndPos: t -> Lexing.position
+
+ val setPrevTokEndPos: t -> Lexing.position -> unit
+
+ val isSingleLineComment: t -> bool
+
+ val makeSingleLineComment: loc:Location.t -> string -> t
+ val makeMultiLineComment: loc:Location.t -> string -> t
+ val fromOcamlComment:
+ loc:Location.t -> txt:string -> prevTokEndPos:Lexing.position -> t
+ val trimSpaces: string -> string
+end = struct
+ type style =
+ | SingleLine
+ | MultiLine
+
+ let styleToString s = match s with
+ | SingleLine -> "SingleLine"
+ | MultiLine -> "MultiLine"
+
+ type t = {
+ txt: string;
+ style: style;
+ loc: Location.t;
+ mutable prevTokEndPos: Lexing.position;
+ }
+
+ let loc t = t.loc
+ let txt t = t.txt
+ let prevTokEndPos t = t.prevTokEndPos
+
+ let setPrevTokEndPos t pos =
+ t.prevTokEndPos <- pos
+
+ let isSingleLineComment t = match t.style with
+ | SingleLine -> true
+ | MultiLine -> false
+
+ let toString t =
+ Format.sprintf
+ "(txt: %s\nstyle: %s\nlines: %d-%d)"
+ t.txt
+ (styleToString t.style)
+ t.loc.loc_start.pos_lnum
+ t.loc.loc_end.pos_lnum
+
+ let makeSingleLineComment ~loc txt = {
+ txt;
+ loc;
+ style = SingleLine;
+ prevTokEndPos = Lexing.dummy_pos;
+ }
+
+ let makeMultiLineComment ~loc txt = {
+ txt;
+ loc;
+ style = MultiLine;
+ prevTokEndPos = Lexing.dummy_pos;
+ }
+
+ let fromOcamlComment ~loc ~txt ~prevTokEndPos = {
+ txt;
+ loc;
+ style = MultiLine;
+ prevTokEndPos = prevTokEndPos
+ }
+
+ let trimSpaces s =
+ let len = String.length s in
+ if len = 0 then s
+ else if String.unsafe_get s 0 = ' ' || String.unsafe_get s (len - 1) = ' ' then (
+ let b = Bytes.of_string s in
+ let i = ref 0 in
+ while !i < len && (Bytes.unsafe_get b !i) = ' ' do
+ incr i
+ done;
+ let j = ref (len - 1) in
+ while !j >= !i && (Bytes.unsafe_get b !j) = ' ' do
+ decr j
+ done;
+ if !j >= !i then
+ (Bytes.sub [@doesNotRaise]) b !i (!j - !i + 1) |> Bytes.to_string
+ else
+ ""
+ ) else s
+end
+
+module Token = struct
+ type t =
+ | Open
+ | True | False
+ | Character of char
+ | Int of {i: string; suffix: char option}
+ | Float of {f: string; suffix: char option}
+ | String of string
+ | Lident of string
+ | Uident of string
+ | As
+ | Dot | DotDot | DotDotDot
+ | Bang
+ | Semicolon
+ | Let
+ | And
+ | Rec
+ | Underscore
+ | SingleQuote
+ | Equal | EqualEqual | EqualEqualEqual
+ | Bar
+ | Lparen
+ | Rparen
+ | Lbracket
+ | Rbracket
+ | Lbrace
+ | Rbrace
+ | Colon
+ | Comma
+ | Eof
+ | Exception
+ | Backslash [@live]
+ | Forwardslash | ForwardslashDot
+ | Asterisk | AsteriskDot | Exponentiation
+ | Minus | MinusDot
+ | Plus | PlusDot | PlusPlus | PlusEqual
+ | ColonGreaterThan
+ | GreaterThan
+ | LessThan
+ | LessThanSlash
+ | Hash | HashEqual | HashHash
+ | Assert
+ | Lazy
+ | Tilde
+ | Question
+ | If | Else | For | In | To | Downto | While | Switch
+ | When
+ | EqualGreater | MinusGreater
+ | External
+ | Typ
+ | Private
+ | Mutable
+ | Constraint
+ | Include
+ | Module
+ | Of
+ | With
+ | Land | Lor
+ | Band (* Bitwise and: & *)
+ | BangEqual | BangEqualEqual
+ | LessEqual | GreaterEqual
+ | ColonEqual
+ | At | AtAt
+ | Percent | PercentPercent
+ | Comment of Comment.t
+ | List
+ | TemplateTail of string
+ | TemplatePart of string
+ | Backtick
+ | BarGreater
+ | Try | Catch
+ | Import
+ | Export
+
+ let precedence = function
+ | HashEqual | ColonEqual -> 1
+ | Lor -> 2
+ | Land -> 3
+ | Equal | EqualEqual | EqualEqualEqual | LessThan | GreaterThan
+ | BangEqual | BangEqualEqual | LessEqual | GreaterEqual | BarGreater -> 4
+ | Plus | PlusDot | Minus | MinusDot | PlusPlus -> 5
+ | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot -> 6
+ | Exponentiation -> 7
+ | MinusGreater -> 8
+ | Dot -> 9
+ | _ -> 0
+
+ let toString = function
+ | Open -> "open"
+ | True -> "true" | False -> "false"
+ | Character c -> "'" ^ (Char.escaped c) ^ "'"
+ | String s -> s
+ | Lident str -> str
+ | Uident str -> str
+ | Dot -> "." | DotDot -> ".." | DotDotDot -> "..."
+ | Int {i} -> "int " ^ i
+ | Float {f} -> "Float: " ^ f
+ | Bang -> "!"
+ | Semicolon -> ";"
+ | Let -> "let"
+ | And -> "and"
+ | Rec -> "rec"
+ | Underscore -> "_"
+ | SingleQuote -> "'"
+ | Equal -> "=" | EqualEqual -> "==" | EqualEqualEqual -> "==="
+ | Eof -> "eof"
+ | Bar -> "|"
+ | As -> "as"
+ | Lparen -> "(" | Rparen -> ")"
+ | Lbracket -> "[" | Rbracket -> "]"
+ | Lbrace -> "{" | Rbrace -> "}"
+ | ColonGreaterThan -> ":>"
+ | Colon -> ":"
+ | Comma -> ","
+ | Minus -> "-" | MinusDot -> "-."
+ | Plus -> "+" | PlusDot -> "+." | PlusPlus -> "++" | PlusEqual -> "+="
+ | Backslash -> "\\"
+ | Forwardslash -> "/" | ForwardslashDot -> "/."
+ | Exception -> "exception"
+ | Hash -> "#" | HashHash -> "##" | HashEqual -> "#="
+ | GreaterThan -> ">"
+ | LessThan -> "<"
+ | LessThanSlash -> ""
+ | Asterisk -> "*" | AsteriskDot -> "*." | Exponentiation -> "**"
+ | Assert -> "assert"
+ | Lazy -> "lazy"
+ | Tilde -> "tilde"
+ | Question -> "?"
+ | If -> "if"
+ | Else -> "else"
+ | For -> "for"
+ | In -> "in"
+ | To -> "to"
+ | Downto -> "downto"
+ | While -> "while"
+ | Switch -> "switch"
+ | When -> "when"
+ | EqualGreater -> "=>" | MinusGreater -> "->"
+ | External -> "external"
+ | Typ -> "type"
+ | Private -> "private"
+ | Constraint -> "constraint"
+ | Mutable -> "mutable"
+ | Include -> "include"
+ | Module -> "module"
+ | Of -> "of"
+ | With -> "with"
+ | Lor -> "||"
+ | Band -> "&" | Land -> "&&"
+ | BangEqual -> "!=" | BangEqualEqual -> "!=="
+ | GreaterEqual -> ">=" | LessEqual -> "<="
+ | ColonEqual -> ":="
+ | At -> "@" | AtAt -> "@@"
+ | Percent -> "%" | PercentPercent -> "%%"
+ | Comment c -> "Comment(" ^ (Comment.toString c) ^ ")"
+ | List -> "list"
+ | TemplatePart text -> text ^ "${"
+ | TemplateTail text -> "TemplateTail(" ^ text ^ ")"
+ | Backtick -> "`"
+ | BarGreater -> "|>"
+ | Try -> "try" | Catch -> "catch"
+ | Import -> "import"
+ | Export -> "export"
+
+ let keywordTable = function
+ | "true" -> True
+ | "false" -> False
+ | "open" -> Open
+ | "let" -> Let
+ | "rec" -> Rec
+ | "and" -> And
+ | "as" -> As
+ | "exception" -> Exception
+ | "assert" -> Assert
+ | "lazy" -> Lazy
+ | "if" -> If
+ | "else" -> Else
+ | "for" -> For
+ | "in" -> In
+ | "to" -> To
+ | "downto" -> Downto
+ | "while" -> While
+ | "switch" -> Switch
+ | "when" -> When
+ | "external" -> External
+ | "type" -> Typ
+ | "private" -> Private
+ | "mutable" -> Mutable
+ | "constraint" -> Constraint
+ | "include" -> Include
+ | "module" -> Module
+ | "of" -> Of
+ | "list" -> List
+ | "with" -> With
+ | "try" -> Try
+ | "catch" -> Catch
+ | "import" -> Import
+ | "export" -> Export
+ | _ -> raise Not_found
+ [@@raises Not_found]
+
+ let isKeyword = function
+ | True | False | Open | Let | Rec | And | As
+ | Exception | Assert | Lazy | If | Else | For | In | To
+ | Downto | While | Switch | When | External | Typ | Private
+ | Mutable | Constraint | Include | Module | Of
+ | Land | Lor | List | With
+ | Try | Catch | Import | Export -> true
+ | _ -> false
+
+ let lookupKeyword str =
+ try keywordTable str with
+ | Not_found ->
+ if CharacterCodes.isUpperCase (int_of_char (str.[0] [@doesNotRaise])) then
+ Uident str
+ else Lident str
+
+ let isKeywordTxt str =
+ try let _ = keywordTable str in true with
+ | Not_found -> false
+end
+
+module Grammar = struct
+ type t =
+ | OpenDescription (* open Belt *)
+ | ModuleLongIdent (* Foo or Foo.Bar *) [@live]
+ | Ternary (* condExpr ? trueExpr : falseExpr *)
+ | Es6ArrowExpr
+ | Jsx
+ | JsxAttribute
+ | JsxChild [@live]
+ | ExprOperand
+ | ExprUnary
+ | ExprSetField
+ | ExprBinaryAfterOp of Token.t
+ | ExprBlock
+ | ExprCall
+ | ExprList
+ | ExprArrayAccess
+ | ExprArrayMutation
+ | ExprIf
+ | IfCondition | IfBranch | ElseBranch
+ | TypeExpression
+ | External
+ | PatternMatching
+ | PatternMatchCase
+ | LetBinding
+ | PatternList
+ | PatternOcamlList
+ | PatternRecord
+
+ | TypeDef
+ | TypeConstrName
+ | TypeParams
+ | TypeParam [@live]
+ | PackageConstraint
+
+ | TypeRepresentation
+
+ | RecordDecl
+ | ConstructorDeclaration
+ | ParameterList
+ | StringFieldDeclarations
+ | FieldDeclarations
+ | TypExprList
+ | FunctorArgs
+ | ModExprList
+ | TypeParameters
+ | RecordRows
+ | RecordRowsStringKey
+ | ArgumentList
+ | Signature
+ | Specification
+ | Structure
+ | Implementation
+ | Attribute
+ | TypeConstraint
+ | Primitive
+ | AtomicTypExpr
+ | ListExpr
+ | JsFfiImport
+
+ let toString = function
+ | OpenDescription -> "an open description"
+ | ModuleLongIdent -> "a module identifier"
+ | Ternary -> "a ternary expression"
+ | Es6ArrowExpr -> "an es6 arrow function"
+ | Jsx -> "a jsx expression"
+ | JsxAttribute -> "a jsx attribute"
+ | ExprOperand -> "a basic expression"
+ | ExprUnary -> "a unary expression"
+ | ExprBinaryAfterOp op -> "an expression after the operator \"" ^ Token.toString op ^ "\""
+ | ExprIf -> "an if expression"
+ | IfCondition -> "the condition of an if expression"
+ | IfBranch -> "the true-branch of an if expression"
+ | ElseBranch -> "the else-branch of an if expression"
+ | TypeExpression -> "a type"
+ | External -> "an external"
+ | PatternMatching -> "the cases of a pattern match"
+ | ExprBlock -> "a block with expressions"
+ | ExprSetField -> "a record field mutation"
+ | ExprCall -> "a function application"
+ | ExprArrayAccess -> "an array access expression"
+ | ExprArrayMutation -> "an array mutation"
+ | LetBinding -> "a let binding"
+ | TypeDef -> "a type definition"
+ | TypeParams -> "type parameters"
+ | TypeParam -> "a type parameter"
+ | TypeConstrName -> "a type-constructor name"
+ | TypeRepresentation -> "a type representation"
+ | RecordDecl -> "a record declaration"
+ | PatternMatchCase -> "a pattern match case"
+ | ConstructorDeclaration -> "a constructor declaration"
+ | ExprList -> "multiple expressions"
+ | PatternList -> "multiple patterns"
+ | PatternOcamlList -> "a list pattern"
+ | PatternRecord -> "a record pattern"
+ | ParameterList -> "parameters"
+ | StringFieldDeclarations -> "string field declarations"
+ | FieldDeclarations -> "field declarations"
+ | TypExprList -> "list of types"
+ | FunctorArgs -> "functor arguments"
+ | ModExprList -> "list of module expressions"
+ | TypeParameters -> "list of type parameters"
+ | RecordRows -> "rows of a record"
+ | RecordRowsStringKey -> "rows of a record with string keys"
+ | ArgumentList -> "arguments"
+ | Signature -> "signature"
+ | Specification -> "specification"
+ | Structure -> "structure"
+ | Implementation -> "implementation"
+ | Attribute -> "an attribute"
+ | TypeConstraint -> "constraints on a type"
+ | Primitive -> "an external primitive"
+ | AtomicTypExpr -> "a type"
+ | ListExpr -> "an ocaml list expr"
+ | PackageConstraint -> "a package constraint"
+ | JsFfiImport -> "js ffi import"
+ | JsxChild -> "jsx child"
+
+ let isSignatureItemStart = function
+ | Token.At
+ | Let
+ | Typ
+ | External
+ | Exception
+ | Open
+ | Include
+ | Module
+ | AtAt
+ | PercentPercent -> true
+ | _ -> false
+
+ let isAtomicPatternStart = function
+ | Token.Int _ | String _ | Character _
+ | Lparen | Lbracket | Lbrace
+ | Underscore
+ | Lident _ | Uident _ | List
+ | Exception | Lazy
+ | Percent -> true
+ | _ -> false
+
+ let isAtomicExprStart = function
+ | Token.True | False
+ | Int _ | String _ | Float _ | Character _
+ | Backtick
+ | Uident _ | Lident _ | Hash
+ | Lparen
+ | List
+ | Lbracket
+ | Lbrace
+ | LessThan
+ | Module
+ | Percent -> true
+ | _ -> false
+
+ let isAtomicTypExprStart = function
+ | Token.SingleQuote | Underscore
+ | Lparen | Lbrace
+ | Uident _ | Lident _ | List
+ | Percent -> true
+ | _ -> false
+
+ let isExprStart = function
+ | Token.True | False
+ | Int _ | String _ | Float _ | Character _ | Backtick
+ | Underscore (* _ => doThings() *)
+ | Uident _ | Lident _ | Hash
+ | Lparen | List | Module | Lbracket | Lbrace
+ | LessThan
+ | Minus | MinusDot | Plus | PlusDot | Bang
+ | Percent | At
+ | If | Switch | While | For | Assert | Lazy | Try -> true
+ | _ -> false
+
+ let isJsxAttributeStart = function
+ | Token.Lident _ | Question -> true
+ | _ -> false
+
+ let isStructureItemStart = function
+ | Token.Open
+ | Let
+ | Typ
+ | External | Import | Export
+ | Exception
+ | Include
+ | Module
+ | AtAt
+ | PercentPercent
+ | At -> true
+ | t when isExprStart t -> true
+ | _ -> false
+
+ let isPatternStart = function
+ | Token.Int _ | Float _ | String _ | Character _ | True | False | Minus | Plus
+ | Lparen | Lbracket | Lbrace | List
+ | Underscore
+ | Lident _ | Uident _ | Hash | HashHash
+ | Exception | Lazy | Percent | Module
+ | At -> true
+ | _ -> false
+
+ let isParameterStart = function
+ | Token.Typ | Tilde | Dot -> true
+ | token when isPatternStart token -> true
+ | _ -> false
+
+ (* TODO: overparse Uident ? *)
+ let isStringFieldDeclStart = function
+ | Token.String _ | At -> true
+ | _ -> false
+
+ (* TODO: overparse Uident ? *)
+ let isFieldDeclStart = function
+ | Token.At | Mutable | Lident _ | List -> true
+ (* recovery, TODO: this is not ideal… *)
+ | Uident _ -> true
+ | t when Token.isKeyword t -> true
+ | _ -> false
+
+ let isRecordDeclStart = function
+ | Token.At
+ | Mutable
+ | Lident _ | List -> true
+ | _ -> false
+
+ let isTypExprStart = function
+ | Token.At
+ | SingleQuote
+ | Underscore
+ | Lparen | Lbracket
+ | Uident _ | Lident _ | List
+ | Module
+ | Percent
+ | Lbrace -> true
+ | _ -> false
+
+ let isTypeParameterStart = function
+ | Token.Tilde | Dot -> true
+ | token when isTypExprStart token -> true
+ | _ -> false
+
+ let isTypeParamStart = function
+ | Token.Plus | Minus | SingleQuote | Underscore -> true
+ | _ -> false
+
+ let isFunctorArgStart = function
+ | Token.At | Uident _ | Underscore
+ | Percent
+ | Lbrace
+ | Lparen -> true
+ | _ -> false
+
+ let isModExprStart = function
+ | Token.At | Percent
+ | Uident _ | Lbrace | Lparen -> true
+ | _ -> false
+
+ let isRecordRowStart = function
+ | Token.DotDotDot -> true
+ | Token.Uident _ | Lident _ | List -> true
+ (* TODO *)
+ | t when Token.isKeyword t -> true
+ | _ -> false
+
+ let isRecordRowStringKeyStart = function
+ | Token.String _ -> true
+ | _ -> false
+
+ let isArgumentStart = function
+ | Token.Tilde | Dot | Underscore -> true
+ | t when isExprStart t -> true
+ | _ -> false
+
+ let isPatternMatchStart = function
+ | Token.Bar -> true
+ | t when isPatternStart t -> true
+ | _ -> false
+
+ let isPatternOcamlListStart = function
+ | Token.DotDotDot -> true
+ | t when isPatternStart t -> true
+ | _ -> false
+
+ let isPatternRecordItemStart = function
+ | Token.DotDotDot | Uident _ | Lident _ | List | Underscore -> true
+ | _ -> false
+
+ let isAttributeStart = function
+ | Token.At -> true
+ | _ -> false
+
+ let isJsFfiImportStart = function
+ | Token.Lident _ | At -> true
+ | _ -> false
+
+ let isJsxChildStart = isAtomicExprStart
+
+ let isBlockExprStart = function
+ | Token.At | Hash | Percent | Minus | MinusDot | Plus | PlusDot | Bang
+ | True | False | Int _ | String _ | Character _ | Lident _ | Uident _
+ | Lparen | List | Lbracket | Lbrace | Forwardslash | Assert
+ | Lazy | If | For | While | Switch | Open | Module | Exception | Let
+ | LessThan | Backtick | Try | Underscore -> true
+ | _ -> false
+
+ let isListElement grammar token =
+ match grammar with
+ | ExprList -> token = Token.DotDotDot || isExprStart token
+ | ListExpr -> token = DotDotDot || isExprStart token
+ | PatternList -> token = DotDotDot || isPatternStart token
+ | ParameterList -> isParameterStart token
+ | StringFieldDeclarations -> isStringFieldDeclStart token
+ | FieldDeclarations -> isFieldDeclStart token
+ | RecordDecl -> isRecordDeclStart token
+ | TypExprList -> isTypExprStart token || token = Token.LessThan
+ | TypeParams -> isTypeParamStart token
+ | FunctorArgs -> isFunctorArgStart token
+ | ModExprList -> isModExprStart token
+ | TypeParameters -> isTypeParameterStart token
+ | RecordRows -> isRecordRowStart token
+ | RecordRowsStringKey -> isRecordRowStringKeyStart token
+ | ArgumentList -> isArgumentStart token
+ | Signature | Specification -> isSignatureItemStart token
+ | Structure | Implementation -> isStructureItemStart token
+ | PatternMatching -> isPatternMatchStart token
+ | PatternOcamlList -> isPatternOcamlListStart token
+ | PatternRecord -> isPatternRecordItemStart token
+ | Attribute -> isAttributeStart token
+ | TypeConstraint -> token = Constraint
+ | PackageConstraint -> token = And
+ | ConstructorDeclaration -> token = Bar
+ | Primitive -> begin match token with Token.String _ -> true | _ -> false end
+ | JsxAttribute -> isJsxAttributeStart token
+ | JsFfiImport -> isJsFfiImportStart token
+ | _ -> false
+
+ let isListTerminator grammar token =
+ match grammar, token with
+ | _, Token.Eof
+ | ExprList, (Rparen | Forwardslash | Rbracket)
+ | ListExpr, Rparen
+ | ArgumentList, Rparen
+ | TypExprList, (Rparen | Forwardslash | GreaterThan | Equal)
+ | ModExprList, Rparen
+ | (PatternList | PatternOcamlList | PatternRecord),
+ (Forwardslash | Rbracket | Rparen | EqualGreater (* pattern matching => *) | In (* for expressions *) | Equal (* let {x} = foo *))
+ | ExprBlock, Rbrace
+ | (Structure | Signature), Rbrace
+ | TypeParams, Rparen
+ | ParameterList, (EqualGreater | Lbrace)
+ | JsxAttribute, (Forwardslash | GreaterThan)
+ | JsFfiImport, Rbrace
+ | StringFieldDeclarations, Rbrace -> true
+
+ | Attribute, token when token <> At -> true
+ | TypeConstraint, token when token <> Constraint -> true
+ | PackageConstraint, token when token <> And -> true
+ | ConstructorDeclaration, token when token <> Bar -> true
+ | Primitive, Semicolon -> true
+ | Primitive, token when isStructureItemStart token -> true
+
+ | _ -> false
+
+ let isPartOfList grammar token =
+ isListElement grammar token || isListTerminator grammar token
+end
+
+module Reporting = struct
+ module TerminalDoc = struct
+ type break =
+ | Never
+ | Always
+
+ type document =
+ | Nil
+ | Group of {break: break; doc: document}
+ | Text of string
+ | Indent of {amount: int; doc: document}
+ | Append of {doc1: document; doc2: document}
+
+ let group ~break doc = Group {break; doc}
+ let text txt = Text (txt)
+ let indent i d = Indent {amount = i; doc = d}
+ let append d1 d2 = Append {doc1 = d1; doc2 = d2}
+ let nil = Nil
+
+ type stack =
+ | Empty
+ | Cons of {doc: document; stack: stack}
+
+ let push stack doc = Cons {doc; stack}
+
+ type mode =
+ | Flat
+ | Break
+
+ let toString (* ~width *) (doc : document) =
+ let buffer = Buffer.create 100 in
+ let rec loop stack mode offset =
+ match stack with
+ | Empty -> ()
+ | Cons {doc; stack = rest} ->
+ begin match doc with
+ | Nil -> loop rest mode offset
+ | Text txt ->
+ Buffer.add_string buffer txt;
+ loop rest mode (offset + (String.length txt))
+ | Indent {amount = i; doc} ->
+ let indentation = (String.make [@doesNotRaise]) i ' ' in
+ Buffer.add_string buffer indentation;
+ loop (push rest doc) mode (offset + i)
+ | Append {doc1; doc2} ->
+ let rest = push rest doc2 in
+ let rest = push rest
+ (match mode = Flat with
+ | true -> Nil
+ | false -> text "\n")
+ in
+ let rest = push rest doc1 in
+ loop rest mode offset
+ | Group {break; doc} ->
+ let rest = push rest doc in
+ begin match break with
+ | Always -> loop rest Break offset
+ | Never -> loop rest Flat offset
+ end
+ end
+ in
+ loop (push Empty doc) Flat 0;
+ Buffer.contents buffer
+ end
+
+ type color =
+ | NoColor [@live]
+ | Red [@live]
+
+ type style = {
+ underline: bool; [@live]
+ color: color; [@live]
+ }
+
+ let highlight ~from ~len txt =
+ if from < 0 || (String.length txt) == 0 || (from >= String.length txt) then txt else
+ let before = try String.sub txt 0 from with Invalid_argument _ -> "" in
+ let content =
+ "\027[31m" ^ (try String.sub txt from len with Invalid_argument _ -> "") ^ "\027[0m"
+ in
+ let after = try String.sub txt (from + len) (String.length txt - (from + len)) with Invalid_argument _ -> "" in
+ before ^ content ^ after
+
+ let underline ~from ~len txt =
+ let open TerminalDoc in
+ let indent = (String.make [@doesNotRaise]) from ' ' in
+ let underline = (String.make [@doesNotRaise]) len '^' in
+ let line = highlight ~from:0 ~len underline in
+ group ~break:Always
+ (append (text txt) (text (indent ^ line)))
+
+ let rec drop n l =
+ if n == 1 then l
+ else drop (n - 1) (match l with | _x::xs -> xs | _ -> l)
+
+ let rec take n l =
+ match l with
+ | _ when n == 0 -> []
+ | [] -> []
+ | x::xs -> x::(take (n -1) xs)
+
+ (* TODO: cleanup *)
+ let renderCodeContext ~missing (src : string) startPos endPos =
+ let open Lexing in
+ let startCol = (startPos.pos_cnum - startPos.pos_bol) in
+ let endCol = endPos.pos_cnum - startPos.pos_cnum + startCol in
+ let startLine = max 1 (startPos.pos_lnum - 2) in (* 2 lines before *)
+ let lines = String.split_on_char '\n' src in
+ let endLine =
+ let len = List.length lines in
+ min len (startPos.pos_lnum + 3) (* 2 lines after *)
+ in
+ let lines =
+ lines
+ |> drop startLine
+ |> take (endLine - startLine)
+ |> Array.of_list
+ in
+
+ let renderLine x ix =
+ let x = if ix = startPos.pos_lnum then
+ begin match missing with
+ | Some _len -> x ^ (String.make 10 ' ' [@doesNotRaise])
+ | None -> x
+ end
+ else
+ x
+ in
+
+ let open TerminalDoc in
+ let rowNr =
+ let txt = string_of_int ix in
+ let len = String.length txt in
+ if ix = startPos.pos_lnum then
+ highlight ~from:0 ~len txt
+ else txt
+ in
+ let len =
+ let len = if endCol >= 0 then
+ endCol - startCol
+ else
+ 1
+ in
+ if (startCol + len) > String.length x then String.length x - startCol - 1 else len
+ in
+ let line =
+ if ix = startPos.pos_lnum then
+ begin match missing with
+ | Some len ->
+ underline
+ ~from:(
+ startCol + String.length (String.length (string_of_int ix) |> string_of_int) + 5
+ ) ~len x
+ | None ->
+ let len = if startCol + len > String.length x then
+ (String.length x) - startCol
+ else
+ len
+ in
+ text (highlight ~from:startCol ~len x)
+ end
+ else text x
+ in
+ group ~break:Never
+ (append
+ (append (text rowNr) (text " │"))
+ (indent 2 line))
+ in
+
+ let reportDoc = ref TerminalDoc.nil in
+
+ let linesLen = Array.length lines in
+ for i = 0 to (linesLen - 1) do
+ let line = try (Array.get [@doesNotRaise]) lines i with Invalid_argument _ -> "" in
+ reportDoc :=
+ let open TerminalDoc in
+ let ix = startLine + i in
+ group ~break:Always (append !reportDoc (renderLine line ix))
+ done;
+
+ TerminalDoc.toString !reportDoc
+
+ type problem =
+ | Unexpected of Token.t [@live]
+ | Expected of {token: Token.t; pos: Lexing.position; context: Grammar.t option} [@live]
+ | Message of string [@live]
+ | Uident [@live]
+ | Lident [@live]
+ | Unbalanced of Token.t [@live]
+
+ type parseError = Lexing.position * problem
+end
+
+module Diagnostics: sig
+ type t
+ type category
+ type report
+
+ type reportStyle
+ val parseReportStyle: string -> reportStyle
+
+ val unexpected: Token.t -> (Grammar.t * Lexing.position) list -> category
+ val expected: ?grammar:Grammar.t -> Lexing.position -> Token.t -> category
+ val uident: Token.t -> category
+ val lident: Token.t -> category
+ val unclosedString: category
+ val unclosedTemplate: category
+ val unclosedComment: category
+ val unknownUchar: int -> category
+ val message: string -> category
+
+ val make:
+ filename: string
+ -> startPos: Lexing.position
+ -> endPos: Lexing.position
+ -> category
+ -> t
+
+ val stringOfReport: style:reportStyle -> t list -> string -> string
+end = struct
+ type category =
+ | Unexpected of {token: Token.t; context: (Grammar.t * Lexing.position) list}
+ | Expected of {context: Grammar.t option; pos: Lexing.position (* prev token end*); token: Token.t}
+ | Message of string
+ | Uident of Token.t
+ | Lident of Token.t
+ | UnclosedString
+ | UnclosedTemplate
+ | UnclosedComment
+ | UnknownUchar of int
+
+ type t = {
+ filename: string;
+ startPos: Lexing.position;
+ endPos: Lexing.position;
+ category: category;
+ }
+
+ type report = t list
+
+ (* TODO: add json here *)
+ type reportStyle =
+ | Pretty
+ | Plain
+
+ let parseReportStyle txt = match (String.lowercase_ascii txt) with
+ | "plain" -> Plain
+ | _ -> Pretty
+
+ let defaultUnexpected token =
+ "I'm not sure what to parse here when looking at \"" ^ (Token.toString token) ^ "\"."
+
+ let explain t =
+ match t.category with
+ | Uident currentToken ->
+ begin match currentToken with
+ | Lident lident ->
+ let guess = String.capitalize_ascii lident in
+ "Did you mean `" ^ guess ^"` instead of `" ^ lident ^ "`?"
+ | t when Token.isKeyword t ->
+ let token = Token.toString t in
+ "`" ^ token ^ "` is a reserved keyword."
+ | _ ->
+ "At this point, I'm looking for an uppercased identifier like `Belt` or `Array`"
+ end
+ | Lident currentToken ->
+ begin match currentToken with
+ | Uident uident ->
+ let guess = String.uncapitalize_ascii uident in
+ "Did you mean `" ^ guess ^"` instead of `" ^ uident ^ "`?"
+ | t when Token.isKeyword t ->
+ let token = Token.toString t in
+ "`" ^ token ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" ^ token ^ "\""
+ | Underscore ->
+ "`_` isn't a valid name."
+ | _ ->
+ "I'm expecting an lowercased identifier like `name` or `age`"
+ end
+ | Message txt -> txt
+ | UnclosedString ->
+ "This string is missing a double quote at the end"
+ | UnclosedTemplate ->
+ "Did you forget to close this template expression with a backtick?"
+ | UnclosedComment ->
+ "This comment seems to be missing a closing `*/`"
+ | UnknownUchar uchar ->
+ begin match uchar with
+ | 94 (* ^ *) ->
+ "Hmm, not sure what I should do here with this character.\nIf you're trying to deref an expression, use `foo.contents` instead."
+ | _ ->
+ "Hmm, I have no idea what this character means…"
+ end
+ | Expected {context; token = t} ->
+ let hint = match context with
+ | Some grammar -> "It signals the start of " ^ (Grammar.toString grammar)
+ | None -> ""
+ in
+ "Did you forget a `" ^ (Token.toString t) ^ "` here? " ^ hint
+ | Unexpected {token = t; context = breadcrumbs} ->
+ let name = (Token.toString t) in
+ begin match breadcrumbs with
+ | (AtomicTypExpr, _)::breadcrumbs ->
+ begin match breadcrumbs, t with
+ | ((StringFieldDeclarations | FieldDeclarations) , _) :: _, (String _ | At | Rbrace | Comma | Eof) ->
+ "I'm missing a type here"
+ | _, t when Grammar.isStructureItemStart t || t = Eof ->
+ "Missing a type here"
+ | _ ->
+ defaultUnexpected t
+ end
+ | (ExprOperand, _)::breadcrumbs ->
+ begin match breadcrumbs, t with
+ | (ExprBlock, _) :: _, Rbrace ->
+ "It seems that this expression block is empty"
+ | (ExprBlock, _) :: _, Bar -> (* Pattern matching *)
+ "Looks like there might be an expression missing here"
+ | (ExprSetField, _) :: _, _ ->
+ "It seems that this record field mutation misses an expression"
+ | (ExprArrayMutation, _) :: _, _ ->
+ "Seems that an expression is missing, with what do I mutate the array?"
+ | ((ExprBinaryAfterOp _ | ExprUnary), _) ::_, _ ->
+ "Did you forget to write an expression here?"
+ | (Grammar.LetBinding, _)::_, _ ->
+ "This let-binding misses an expression"
+ | _::_, (Rbracket | Rbrace) ->
+ "Missing expression"
+ | _ ->
+ "I'm not sure what to parse here when looking at \"" ^ name ^ "\"."
+ end
+ | (TypeParam, _)::_ ->
+ begin match t with
+ | Lident ident ->
+ "Did you mean '" ^ ident ^"? A Type parameter starts with a quote."
+ | _ ->
+ "I'm not sure what to parse here when looking at \"" ^ name ^ "\"."
+ end
+ | _ ->
+ (* TODO: match on circumstance to verify Lident needed ? *)
+ if Token.isKeyword t then
+ "`" ^ name ^ "` is a reserved keyword. Keywords need to be escaped: \\\"" ^ (Token.toString t) ^ "\""
+ else
+ "I'm not sure what to parse here when looking at \"" ^ name ^ "\"."
+ end
+
+ let toPlainString t buffer =
+ Buffer.add_string buffer t.filename;
+ Buffer.add_char buffer '(';
+ Buffer.add_string buffer (string_of_int t.startPos.pos_cnum);
+ Buffer.add_char buffer ',';
+ Buffer.add_string buffer (string_of_int t.endPos.pos_cnum);
+ Buffer.add_char buffer ')';
+ Buffer.add_char buffer ':';
+ Buffer.add_string buffer (explain t)
+
+ let toString t src =
+ let open Lexing in
+ let startchar = t.startPos.pos_cnum - t.startPos.pos_bol in
+ let endchar = t.endPos.pos_cnum - t.startPos.pos_cnum + startchar in
+ let locationInfo =
+ Printf.sprintf (* ReasonLanguageServer requires the following format *)
+ "File \"%s\", line %d, characters %d-%d:"
+ t.filename
+ t.startPos.pos_lnum
+ startchar
+ endchar
+ in
+ let code =
+ let missing = match t.category with
+ | Expected {token = t} ->
+ Some (String.length (Token.toString t))
+ | _ -> None
+ in
+ Reporting.renderCodeContext ~missing src t.startPos t.endPos
+ in
+ let explanation = explain t in
+ Printf.sprintf "%s\n\n%s\n\n%s\n\n" locationInfo code explanation
+
+ let make ~filename ~startPos ~endPos category = {
+ filename;
+ startPos;
+ endPos;
+ category
+ }
+
+ let stringOfReport ~style diagnostics src =
+ match style with
+ | Pretty ->
+ List.fold_left (fun report diagnostic ->
+ report ^ (toString diagnostic src) ^ "\n"
+ ) "\n" (List.rev diagnostics)
+ | Plain ->
+ let buffer = Buffer.create 100 in
+ List.iter (fun diagnostic ->
+ toPlainString diagnostic buffer;
+ Buffer.add_char buffer '\n';
+ ) diagnostics;
+ Buffer.contents buffer
+
+ let unexpected token context =
+ Unexpected {token; context}
+
+ let expected ?grammar pos token =
+ Expected {context = grammar; pos; token}
+
+ let uident currentToken = Uident currentToken
+ let lident currentToken = Lident currentToken
+ let unclosedString = UnclosedString
+ let unclosedComment = UnclosedComment
+ let unclosedTemplate = UnclosedTemplate
+ let unknownUchar code = UnknownUchar code
+ let message txt = Message txt
+end
+
+(* Collection of utilities to view the ast in a more a convenient form,
+ * allowing for easier processing.
+ * Example: given a ptyp_arrow type, what are its arguments and what is the
+ * returnType? *)
+module ParsetreeViewer : sig
+ (* Restructures a nested tree of arrow types into its args & returnType
+ * The parsetree contains: a => b => c => d, for printing purposes
+ * we restructure the tree into (a, b, c) and its returnType d *)
+ val arrowType: Parsetree.core_type ->
+ Parsetree.attributes *
+ (Parsetree.attributes * Asttypes.arg_label * Parsetree.core_type) list *
+ Parsetree.core_type
+
+ val functorType: Parsetree.module_type ->
+ (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option) list *
+ Parsetree.module_type
+
+ (* filters @bs out of the provided attributes *)
+ val processUncurriedAttribute: Parsetree.attributes -> bool * Parsetree.attributes
+
+ (* if ... else if ... else ... is represented as nested expressions: if ... else { if ... }
+ * The purpose of this function is to flatten nested ifs into one sequence.
+ * Basically compute: ([if, else if, else if, else if], else) *)
+ val collectIfExpressions:
+ Parsetree.expression ->
+ (Parsetree.expression * Parsetree.expression) list * Parsetree.expression option
+
+ val collectListExpressions:
+ Parsetree.expression -> (Parsetree.expression list * Parsetree.expression option)
+
+ type funParamKind =
+ | Parameter of {
+ attrs: Parsetree.attributes;
+ lbl: Asttypes.arg_label;
+ defaultExpr: Parsetree.expression option;
+ pat: Parsetree.pattern;
+ }
+ | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list}
+
+ val funExpr:
+ Parsetree.expression ->
+ Parsetree.attributes *
+ funParamKind list *
+ Parsetree.expression
+
+ (* example:
+ * `makeCoordinate({
+ * x: 1,
+ * y: 2,
+ * })`
+ * Notice howe `({` and `})` "hug" or stick to each other *)
+ val isHuggableExpression: Parsetree.expression -> bool
+
+ val isHuggablePattern: Parsetree.pattern -> bool
+
+ val isHuggableRhs: Parsetree.expression -> bool
+
+ val operatorPrecedence: string -> int
+
+ val isUnaryExpression: Parsetree.expression -> bool
+ val isBinaryOperator: string -> bool
+ val isBinaryExpression: Parsetree.expression -> bool
+
+ val flattenableOperators: string -> string -> bool
+
+ val hasAttributes: Parsetree.attributes -> bool
+
+ val isArrayAccess: Parsetree.expression -> bool
+ val isTernaryExpr: Parsetree.expression -> bool
+
+ val collectTernaryParts: Parsetree.expression -> ((Parsetree.expression * Parsetree.expression) list * Parsetree.expression)
+
+ val parametersShouldHug:
+ funParamKind list -> bool
+
+ val filterTernaryAttributes: Parsetree.attributes -> Parsetree.attributes
+
+ val isJsxExpression: Parsetree.expression -> bool
+ val hasJsxAttribute: Parsetree.attributes -> bool
+
+ val shouldIndentBinaryExpr: Parsetree.expression -> bool
+ val shouldInlineRhsBinaryExpr: Parsetree.expression -> bool
+ val filterPrinteableAttributes: Parsetree.attributes -> Parsetree.attributes
+ val partitionPrinteableAttributes: Parsetree.attributes -> (Parsetree.attributes * Parsetree.attributes)
+
+ val requiresSpecialCallbackPrintingLastArg: (Asttypes.arg_label * Parsetree.expression) list -> bool
+ val requiresSpecialCallbackPrintingFirstArg: (Asttypes.arg_label * Parsetree.expression) list -> bool
+
+ val modExprApply : Parsetree.module_expr -> (
+ Parsetree.module_expr list * Parsetree.module_expr
+ )
+
+ val modExprFunctor : Parsetree.module_expr -> (
+ (Parsetree.attributes * string Asttypes.loc * Parsetree.module_type option) list *
+ Parsetree.module_expr
+ )
+
+ val splitGenTypeAttr : Parsetree.attributes -> (bool * Parsetree.attributes)
+
+ val collectPatternsFromListConstruct:
+ Parsetree.pattern list -> Parsetree.pattern ->
+ (Parsetree.pattern list * Parsetree.pattern)
+
+ val isBlockExpr : Parsetree.expression -> bool
+
+ val isTemplateLiteral: Parsetree.expression -> bool
+
+ val collectOrPatternChain:
+ Parsetree.pattern -> Parsetree.pattern list
+
+ val processBracesAttr : Parsetree.expression -> (Parsetree.attribute option * Parsetree.expression)
+
+ val filterParsingAttrs : Parsetree.attributes -> Parsetree.attributes
+
+ val isBracedExpr : Parsetree.expression -> bool
+
+ val isPipeExpr : Parsetree.expression -> bool
+
+ val extractValueDescriptionFromModExpr: Parsetree.module_expr -> Parsetree.value_description list
+
+ type jsImportScope =
+ | JsGlobalImport (* nothing *)
+ | JsModuleImport of string (* from "path" *)
+ | JsScopedImport of string list (* window.location *)
+
+ val classifyJsImport: Parsetree.value_description -> jsImportScope
+
+ (* (__x) => f(a, __x, c) -----> f(a, _, c) *)
+ val rewriteUnderscoreApply: Parsetree.expression -> Parsetree.expression
+
+ (* (__x) => f(a, __x, c) -----> f(a, _, c) *)
+ val isUnderscoreApplySugar: Parsetree.expression -> bool
+end = struct
+ open Parsetree
+
+ let arrowType ct =
+ let rec process attrsBefore acc typ = match typ with
+ | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = []} ->
+ let arg = ([], lbl, typ1) in
+ process attrsBefore (arg::acc) typ2
+ | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = [({txt ="bs"}, _) ] as attrs} ->
+ let arg = (attrs, lbl, typ1) in
+ process attrsBefore (arg::acc) typ2
+ | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs} as returnType ->
+ let args = List.rev acc in
+ (attrsBefore, args, returnType)
+ | {ptyp_desc = Ptyp_arrow ((Labelled _ | Optional _) as lbl, typ1, typ2); ptyp_attributes = attrs} ->
+ let arg = (attrs, lbl, typ1) in
+ process attrsBefore (arg::acc) typ2
+ | typ ->
+ (attrsBefore, List.rev acc, typ)
+ in
+ begin match ct with
+ | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as typ ->
+ process attrs [] {typ with ptyp_attributes = []}
+ | typ -> process [] [] typ
+ end
+
+ let functorType modtype =
+ let rec process acc modtype = match modtype with
+ | {pmty_desc = Pmty_functor (lbl, argType, returnType); pmty_attributes = attrs} ->
+ let arg = (attrs, lbl, argType) in
+ process (arg::acc) returnType
+ | modType ->
+ (List.rev acc, modType)
+ in
+ process [] modtype
+
+ let processUncurriedAttribute attrs =
+ let rec process uncurriedSpotted acc attrs =
+ match attrs with
+ | [] -> (uncurriedSpotted, List.rev acc)
+ | ({Location.txt = "bs"}, _)::rest -> process true acc rest
+ | attr::rest -> process uncurriedSpotted (attr::acc) rest
+ in
+ process false [] attrs
+
+ let collectIfExpressions expr =
+ let rec collect acc expr = match expr.pexp_desc with
+ | Pexp_ifthenelse (ifExpr, thenExpr, Some elseExpr) ->
+ collect ((ifExpr, thenExpr)::acc) elseExpr
+ | Pexp_ifthenelse (ifExpr, thenExpr, (None as elseExpr)) ->
+ let ifs = List.rev ((ifExpr, thenExpr)::acc) in
+ (ifs, elseExpr)
+ | _ ->
+ (List.rev acc, Some expr)
+ in
+ collect [] expr
+
+ let collectListExpressions expr =
+ let rec collect acc expr = match expr.pexp_desc with
+ | Pexp_construct ({txt = Longident.Lident "[]"}, _) ->
+ (List.rev acc, None)
+ | Pexp_construct (
+ {txt = Longident.Lident "::"},
+ Some {pexp_desc = Pexp_tuple (hd::[tail])}
+ ) ->
+ collect (hd::acc) tail
+ | _ ->
+ (List.rev acc, Some expr)
+ in
+ collect [] expr
+
+ (* (__x) => f(a, __x, c) -----> f(a, _, c) *)
+ let rewriteUnderscoreApply expr =
+ match expr.pexp_desc with
+ | Pexp_fun (
+ Nolabel,
+ None,
+ {ppat_desc = Ppat_var {txt="__x"}},
+ ({pexp_desc = Pexp_apply (callExpr, args)} as e)
+ ) ->
+ let newArgs = List.map (fun arg ->
+ match arg with
+ | (
+ lbl,
+ ({pexp_desc = Pexp_ident ({txt = Longident.Lident "__x"} as lid)} as argExpr)
+ ) ->
+ (lbl, {argExpr with pexp_desc = Pexp_ident ({lid with txt = Longident.Lident "_"})})
+ | arg -> arg
+ ) args in
+ {e with pexp_desc = Pexp_apply (callExpr, newArgs)}
+ | _ -> expr
+
+ type funParamKind =
+ | Parameter of {
+ attrs: Parsetree.attributes;
+ lbl: Asttypes.arg_label;
+ defaultExpr: Parsetree.expression option;
+ pat: Parsetree.pattern;
+ }
+ | NewTypes of {attrs: Parsetree.attributes; locs: string Asttypes.loc list}
+
+ let funExpr expr =
+ (* Turns (type t, type u, type z) into "type t u z" *)
+ let rec collectNewTypes acc returnExpr =
+ match returnExpr with
+ | {pexp_desc = Pexp_newtype (stringLoc, returnExpr); pexp_attributes = []} ->
+ collectNewTypes (stringLoc::acc) returnExpr
+ | returnExpr ->
+ (List.rev acc, returnExpr)
+ in
+ let rec collect attrsBefore acc expr = match expr with
+ | {pexp_desc = Pexp_fun (
+ Nolabel,
+ None,
+ {ppat_desc = Ppat_var {txt="__x"}},
+ {pexp_desc = Pexp_apply _}
+ )} ->
+ (attrsBefore, List.rev acc, rewriteUnderscoreApply expr)
+ | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = []} ->
+ let parameter = Parameter {
+ attrs = [];
+ lbl = lbl;
+ defaultExpr = defaultExpr;
+ pat = pattern;
+ } in
+ collect attrsBefore (parameter::acc) returnExpr
+ | {pexp_desc = Pexp_newtype (stringLoc, rest); pexp_attributes = attrs} ->
+ let (stringLocs, returnExpr) = collectNewTypes [stringLoc] rest in
+ let param = NewTypes {attrs; locs = stringLocs} in
+ collect attrsBefore (param::acc) returnExpr
+ | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = [({txt = "bs"}, _)] as attrs} ->
+ let parameter = Parameter {
+ attrs = attrs;
+ lbl = lbl;
+ defaultExpr = defaultExpr;
+ pat = pattern;
+ } in
+ collect attrsBefore (parameter::acc) returnExpr
+ | {
+ pexp_desc = Pexp_fun ((Labelled _ | Optional _) as lbl, defaultExpr, pattern, returnExpr);
+ pexp_attributes = attrs
+ } ->
+ let parameter = Parameter {
+ attrs = attrs;
+ lbl = lbl;
+ defaultExpr = defaultExpr;
+ pat = pattern;
+ } in
+ collect attrsBefore (parameter::acc) returnExpr
+ | expr ->
+ (attrsBefore, List.rev acc, expr)
+ in
+ begin match expr with
+ | {pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr); pexp_attributes = attrs} as expr ->
+ collect attrs [] {expr with pexp_attributes = []}
+ | expr -> collect [] [] expr
+ end
+
+ let processBracesAttr expr =
+ match expr.pexp_attributes with
+ | (({txt = "ns.braces"}, _) as attr)::attrs ->
+ (Some attr, {expr with pexp_attributes = attrs})
+ | _ ->
+ (None, expr)
+
+ let filterParsingAttrs attrs =
+ List.filter (fun attr ->
+ match attr with
+ | ({Location.txt = ("ns.ternary" | "ns.braces" | "bs" | "ns.namedArgLoc")}, _) -> false
+ | _ -> true
+ ) attrs
+
+ let isBlockExpr expr =
+ match expr.pexp_desc with
+ | Pexp_letmodule _
+ | Pexp_letexception _
+ | Pexp_let _
+ | Pexp_open _
+ | Pexp_sequence _ -> true
+ | _ -> false
+
+ let isBracedExpr expr =
+ match processBracesAttr expr with
+ | (Some _, _) -> true
+ | _ -> false
+
+ let isHuggableExpression expr =
+ match expr.pexp_desc with
+ | Pexp_array _
+ | Pexp_tuple _
+ | Pexp_construct ({txt = Longident.Lident ("::" | "[]")}, _)
+ | Pexp_extension ({txt = "bs.obj"}, _)
+ | Pexp_record _ -> true
+ | _ when isBlockExpr expr -> true
+ | _ when isBracedExpr expr -> true
+ | _ -> false
+
+ let isHuggableRhs expr =
+ match expr.pexp_desc with
+ | Pexp_array _
+ | Pexp_tuple _
+ | Pexp_construct ({txt = Longident.Lident ("::" | "[]")}, _)
+ | Pexp_extension ({txt = "bs.obj"}, _)
+ | Pexp_record _ -> true
+ | _ when isBracedExpr expr -> true
+ | _ -> false
+
+ let isHuggablePattern pattern =
+ match pattern.ppat_desc with
+ | Ppat_array _
+ | Ppat_tuple _
+ | Ppat_record _
+ | Ppat_construct _ -> true
+ | _ -> false
+
+ let operatorPrecedence operator = match operator with
+ | ":=" -> 1
+ | "||" -> 2
+ | "&&" -> 3
+ | "=" | "==" | "<" | ">" | "!=" | "<>" | "!==" | "<=" | ">=" | "|>" -> 4
+ | "+" | "+." | "-" | "-." | "^" -> 5
+ | "*" | "*." | "/" | "/." -> 6
+ | "**" -> 7
+ | "#" | "##" | "|." -> 8
+ | _ -> 0
+
+ let isUnaryOperator operator = match operator with
+ | "~+" | "~+." | "~-" | "~-." | "not" -> true
+ | _ -> false
+
+ let isUnaryExpression expr = match expr.pexp_desc with
+ | Pexp_apply(
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [Nolabel, _arg]
+ ) when isUnaryOperator operator -> true
+ | _ -> false
+
+ let isBinaryOperator operator = match operator with
+ | ":="
+ | "||"
+ | "&&"
+ | "=" | "==" | "<" | ">" | "!=" | "!==" | "<=" | ">=" | "|>"
+ | "+" | "+." | "-" | "-." | "^"
+ | "*" | "*." | "/" | "/."
+ | "**"
+ | "|." | "<>" -> true
+ | _ -> false
+
+ let isBinaryExpression expr = match expr.pexp_desc with
+ | Pexp_apply(
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [(Nolabel, _operand1); (Nolabel, _operand2)]
+ ) when isBinaryOperator operator -> true
+ | _ -> false
+
+ let isEqualityOperator operator = match operator with
+ | "=" | "==" | "<>" | "!=" -> true
+ | _ -> false
+
+ let flattenableOperators parentOperator childOperator =
+ let precParent = operatorPrecedence parentOperator in
+ let precChild = operatorPrecedence childOperator in
+ if precParent == precChild then
+ not (
+ isEqualityOperator parentOperator &&
+ isEqualityOperator childOperator
+ )
+ else
+ false
+
+ let hasAttributes attrs =
+ List.exists (fun attr -> match attr with
+ | ({Location.txt = "bs" | "ns.ternary" | "ns.braces"}, _) -> false
+ | _ -> true
+ ) attrs
+
+ let isArrayAccess expr = match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}},
+ [Nolabel, _parentExpr; Nolabel, _memberExpr]
+ ) -> true
+ | _ -> false
+
+ let rec hasTernaryAttribute attrs =
+ match attrs with
+ | [] -> false
+ | ({Location.txt="ns.ternary"},_)::_ -> true
+ | _::attrs -> hasTernaryAttribute attrs
+
+ let isTernaryExpr expr = match expr with
+ | {
+ pexp_attributes = attrs;
+ pexp_desc = Pexp_ifthenelse _
+ } when hasTernaryAttribute attrs -> true
+ | _ -> false
+
+ let collectTernaryParts expr =
+ let rec collect acc expr = match expr with
+ | {
+ pexp_attributes = attrs;
+ pexp_desc = Pexp_ifthenelse (condition, consequent, Some(alternate))
+ } when hasTernaryAttribute attrs -> collect ((condition, consequent)::acc) alternate
+ | alternate -> (List.rev acc, alternate)
+ in
+ collect [] expr
+
+ let parametersShouldHug parameters = match parameters with
+ | [Parameter {
+ attrs = [];
+ lbl = Asttypes.Nolabel;
+ defaultExpr = None;
+ pat = pat
+ }] when isHuggablePattern pat -> true
+ | _ -> false
+
+ let filterTernaryAttributes attrs =
+ List.filter (fun attr -> match attr with
+ |({Location.txt="ns.ternary"},_) -> false
+ | _ -> true
+ ) attrs
+
+ let isJsxExpression expr =
+ let rec loop attrs =
+ match attrs with
+ | [] -> false
+ | ({Location.txt = "JSX"}, _)::_ -> true
+ | _::attrs -> loop attrs
+ in
+ match expr.pexp_desc with
+ | Pexp_apply _ ->
+ loop expr.Parsetree.pexp_attributes
+ | _ -> false
+
+ let hasJsxAttribute attributes = match attributes with
+ | ({Location.txt = "JSX"},_)::_ -> true
+ | _ -> false
+
+ let shouldIndentBinaryExpr expr =
+ let samePrecedenceSubExpression operator subExpression =
+ match subExpression with
+ | {pexp_desc = Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident subOperator}},
+ [Nolabel, _lhs; Nolabel, _rhs]
+ )} when isBinaryOperator subOperator ->
+ flattenableOperators operator subOperator
+ | _ -> true
+ in
+ match expr with
+ | {pexp_desc = Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [Nolabel, lhs; Nolabel, _rhs]
+ )} when isBinaryOperator operator ->
+ isEqualityOperator operator ||
+ not (samePrecedenceSubExpression operator lhs) ||
+ operator = ":="
+ | _ -> false
+
+ let shouldInlineRhsBinaryExpr rhs = match rhs.pexp_desc with
+ | Parsetree.Pexp_constant _
+ | Pexp_let _
+ | Pexp_letmodule _
+ | Pexp_letexception _
+ | Pexp_sequence _
+ | Pexp_open _
+ | Pexp_ifthenelse _
+ | Pexp_for _
+ | Pexp_while _
+ | Pexp_try _
+ | Pexp_array _
+ | Pexp_record _ -> true
+ | _ -> false
+
+ let filterPrinteableAttributes attrs =
+ List.filter (fun attr -> match attr with
+ | ({Location.txt="bs" | "ns.ternary"}, _) -> false
+ | _ -> true
+ ) attrs
+
+ let partitionPrinteableAttributes attrs =
+ List.partition (fun attr -> match attr with
+ | ({Location.txt="bs" | "ns.ternary"}, _) -> false
+ | _ -> true
+ ) attrs
+
+ let requiresSpecialCallbackPrintingLastArg args =
+ let rec loop args = match args with
+ | [] -> false
+ | [(_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})] -> true
+ | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})::_ -> false
+ | _::rest -> loop rest
+ in
+ loop args
+
+ let requiresSpecialCallbackPrintingFirstArg args =
+ let rec loop args = match args with
+ | [] -> true
+ | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})::_ -> false
+ | _::rest -> loop rest
+ in
+ match args with
+ | [(_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})] -> false
+ | (_, {pexp_desc = Pexp_fun _ | Pexp_newtype _})::rest -> loop rest
+ | _ -> false
+
+ let modExprApply modExpr =
+ let rec loop acc modExpr = match modExpr with
+ | {pmod_desc = Pmod_apply (next, arg)} ->
+ loop (arg::acc) next
+ | _ -> (acc, modExpr)
+ in
+ loop [] modExpr
+
+ let modExprFunctor modExpr =
+ let rec loop acc modExpr = match modExpr with
+ | {pmod_desc = Pmod_functor (lbl, modType, returnModExpr); pmod_attributes = attrs} ->
+ let param = (attrs, lbl, modType) in
+ loop (param::acc) returnModExpr
+ | returnModExpr ->
+ (List.rev acc, returnModExpr)
+ in
+ loop [] modExpr
+
+ let splitGenTypeAttr attrs =
+ match attrs with
+ | ({Location.txt = "genType"}, _)::attrs -> (true, attrs)
+ | attrs -> (false, attrs)
+
+ let rec collectPatternsFromListConstruct acc pattern =
+ let open Parsetree in
+ match pattern.ppat_desc with
+ | Ppat_construct(
+ {txt = Longident.Lident "::"},
+ Some {ppat_desc=Ppat_tuple (pat::rest::[])}
+ ) ->
+ collectPatternsFromListConstruct (pat::acc) rest
+ | _ -> List.rev acc, pattern
+
+ let rec isTemplateLiteral expr =
+ let isPexpConstantString expr = match expr.pexp_desc with
+ | Pexp_constant (Pconst_string (_, Some _)) -> true
+ | _ -> false
+ in
+ match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "^"}},
+ [Nolabel, arg1; Nolabel, arg2]
+ ) when not (isPexpConstantString arg1 && isPexpConstantString arg2) ->
+ isTemplateLiteral arg1 || isTemplateLiteral arg2
+ | Pexp_constant (Pconst_string (_, Some _)) -> true
+ | _ -> false
+
+ (* Blue | Red | Green -> [Blue; Red; Green] *)
+ let collectOrPatternChain pat =
+ let rec loop pattern chain =
+ match pattern.ppat_desc with
+ | Ppat_or (left, right) -> loop left (right::chain)
+ | _ -> pattern::chain
+ in
+ loop pat []
+
+ let isPipeExpr expr = match expr.pexp_desc with
+ | Pexp_apply(
+ {pexp_desc = Pexp_ident {txt = Longident.Lident ("|." | "|>") }},
+ [(Nolabel, _operand1); (Nolabel, _operand2)]
+ ) -> true
+ | _ -> false
+
+ let extractValueDescriptionFromModExpr modExpr =
+ let rec loop structure acc =
+ match structure with
+ | [] -> List.rev acc
+ | structureItem::structure ->
+ begin match structureItem.Parsetree.pstr_desc with
+ | Pstr_primitive vd -> loop structure (vd::acc)
+ | _ -> loop structure acc
+ end
+ in
+ match modExpr.pmod_desc with
+ | Pmod_structure structure -> loop structure []
+ | _ -> []
+
+ type jsImportScope =
+ | JsGlobalImport (* nothing *)
+ | JsModuleImport of string (* from "path" *)
+ | JsScopedImport of string list (* window.location *)
+
+ let classifyJsImport valueDescription =
+ let rec loop attrs =
+ let open Parsetree in
+ match attrs with
+ | [] -> JsGlobalImport
+ | ({Location.txt = "bs.scope"}, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (s, _))}, _)}])::_ ->
+ JsScopedImport [s]
+ | ({Location.txt = "genType.import"}, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (s, _))}, _)}])::_ ->
+ JsModuleImport s
+ | ({Location.txt = "bs.scope"}, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_tuple exprs}, _)}])::_ ->
+ let scopes = List.fold_left (fun acc curr ->
+ match curr.Parsetree.pexp_desc with
+ | Pexp_constant (Pconst_string (s, _)) -> s::acc
+ | _ -> acc
+ ) [] exprs
+ in
+ JsScopedImport (List.rev scopes)
+ | _::attrs ->
+ loop attrs
+ in
+ loop valueDescription.pval_attributes
+
+ let isUnderscoreApplySugar expr =
+ match expr.pexp_desc with
+ | Pexp_fun (
+ Nolabel,
+ None,
+ {ppat_desc = Ppat_var {txt="__x"}},
+ {pexp_desc = Pexp_apply _}
+ ) -> true
+ | _ -> false
+end
+
+module Parens: sig
+ type kind = Parenthesized | Braced of Location.t | Nothing
+
+ val expr: Parsetree.expression -> kind
+ val structureExpr: Parsetree.expression -> kind
+
+ val unaryExprOperand: Parsetree.expression -> kind
+
+ val binaryExprOperand: isLhs:bool -> Parsetree.expression -> kind
+ val subBinaryExprOperand: string -> string -> bool
+ val rhsBinaryExprOperand: string -> Parsetree.expression -> bool
+ val flattenOperandRhs: string -> Parsetree.expression -> bool
+
+ val lazyOrAssertExprRhs: Parsetree.expression -> kind
+
+ val fieldExpr: Parsetree.expression -> kind
+
+ val setFieldExprRhs: Parsetree.expression -> kind
+
+ val ternaryOperand: Parsetree.expression -> kind
+
+ val jsxPropExpr: Parsetree.expression -> kind
+ val jsxChildExpr: Parsetree.expression -> kind
+
+ val binaryExpr: Parsetree.expression -> kind
+ val modTypeFunctorReturn: Parsetree.module_type -> bool
+ val modTypeWithOperand: Parsetree.module_type -> bool
+ val modExprFunctorConstraint: Parsetree.module_type -> bool
+
+ val bracedExpr: Parsetree.expression -> bool
+ val callExpr: Parsetree.expression -> kind
+
+ val includeModExpr : Parsetree.module_expr -> bool
+end = struct
+ type kind = Parenthesized | Braced of Location.t | Nothing
+
+ let expr expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | _ ->
+ begin match expr with
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_constraint _ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let callExpr expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | _ ->
+ begin match expr with
+ | {Parsetree.pexp_attributes = attrs} when
+ begin match ParsetreeViewer.filterParsingAttrs attrs with
+ | _::_ -> true
+ | [] -> false
+ end
+ -> Parenthesized
+ | _ when ParsetreeViewer.isUnaryExpression expr || ParsetreeViewer.isBinaryExpression expr -> Parenthesized
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_fun _}
+ when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
+ | {pexp_desc =
+ Pexp_lazy _
+ | Pexp_assert _
+ | Pexp_fun _
+ | Pexp_newtype _
+ | Pexp_function _
+ | Pexp_constraint _
+ | Pexp_setfield _
+ | Pexp_match _
+ | Pexp_try _
+ | Pexp_while _
+ | Pexp_for _
+ | Pexp_ifthenelse _
+ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let structureExpr expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | _ when ParsetreeViewer.hasAttributes expr.pexp_attributes &&
+ not (ParsetreeViewer.isJsxExpression expr) -> Parenthesized
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_constraint _ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let unaryExprOperand expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_attributes = attrs} when
+ begin match ParsetreeViewer.filterParsingAttrs attrs with
+ | _::_ -> true
+ | [] -> false
+ end
+ -> Parenthesized
+ | expr when
+ ParsetreeViewer.isUnaryExpression expr ||
+ ParsetreeViewer.isBinaryExpression expr
+ -> Parenthesized
+ | {pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_fun _}
+ when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
+ | {pexp_desc =
+ Pexp_lazy _
+ | Pexp_assert _
+ | Pexp_fun _
+ | Pexp_newtype _
+ | Pexp_function _
+ | Pexp_constraint _
+ | Pexp_setfield _
+ | Pexp_extension _ (* readability? maybe remove *)
+ | Pexp_match _
+ | Pexp_try _
+ | Pexp_while _
+ | Pexp_for _
+ | Pexp_ifthenelse _
+ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let binaryExprOperand ~isLhs expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_fun _}
+ when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
+ | {pexp_desc = Pexp_constraint _ | Pexp_fun _ | Pexp_function _ | Pexp_newtype _} -> Parenthesized
+ | expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
+ | expr when ParsetreeViewer.isTernaryExpr expr -> Parenthesized
+ | {pexp_desc =
+ Pexp_lazy _
+ | Pexp_assert _
+ } when isLhs -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let subBinaryExprOperand parentOperator childOperator =
+ let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
+ let precChild = ParsetreeViewer.operatorPrecedence childOperator in
+ precParent > precChild ||
+ (precParent == precChild &&
+ not (ParsetreeViewer.flattenableOperators parentOperator childOperator)) ||
+ (* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… *)
+ (parentOperator = "||" && childOperator = "&&")
+
+ let rhsBinaryExprOperand parentOperator rhs =
+ match rhs.Parsetree.pexp_desc with
+ | Parsetree.Pexp_apply(
+ {pexp_attributes = [];
+ pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [_, _left; _, _right]
+ ) when ParsetreeViewer.isBinaryOperator operator ->
+ let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
+ let precChild = ParsetreeViewer.operatorPrecedence operator in
+ precParent == precChild
+ | _ -> false
+
+ let flattenOperandRhs parentOperator rhs =
+ match rhs.Parsetree.pexp_desc with
+ | Parsetree.Pexp_apply(
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [_, _left; _, _right]
+ ) when ParsetreeViewer.isBinaryOperator operator ->
+ let precParent = ParsetreeViewer.operatorPrecedence parentOperator in
+ let precChild = ParsetreeViewer.operatorPrecedence operator in
+ precParent >= precChild || rhs.pexp_attributes <> []
+ | Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ ) -> false
+ | Pexp_fun _ when ParsetreeViewer.isUnderscoreApplySugar rhs -> false
+ | Pexp_fun _
+ | Pexp_newtype _
+ | Pexp_setfield _
+ | Pexp_constraint _ -> true
+ | _ when ParsetreeViewer.isTernaryExpr rhs -> true
+ | _ -> false
+
+ let lazyOrAssertExprRhs expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_attributes = attrs} when
+ begin match ParsetreeViewer.filterParsingAttrs attrs with
+ | _::_ -> true
+ | [] -> false
+ end
+ -> Parenthesized
+ | expr when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
+ | {pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_fun _}
+ when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
+ | {pexp_desc =
+ Pexp_lazy _
+ | Pexp_assert _
+ | Pexp_fun _
+ | Pexp_newtype _
+ | Pexp_function _
+ | Pexp_constraint _
+ | Pexp_setfield _
+ | Pexp_match _
+ | Pexp_try _
+ | Pexp_while _
+ | Pexp_for _
+ | Pexp_ifthenelse _
+ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let isNegativeConstant constant =
+ let isNeg txt =
+ let len = String.length txt in
+ len > 0 && (String.get [@doesNotRaise]) txt 0 = '-'
+ in
+ match constant with
+ | Parsetree.Pconst_integer (i, _) | Pconst_float (i, _) when isNeg i -> true
+ | _ -> false
+
+ let fieldExpr expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_attributes = attrs} when
+ begin match ParsetreeViewer.filterParsingAttrs attrs with
+ | _::_ -> true
+ | [] -> false
+ end
+ -> Parenthesized
+ | expr when
+ ParsetreeViewer.isBinaryExpression expr ||
+ ParsetreeViewer.isUnaryExpression expr
+ -> Parenthesized
+ | {pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_constant c } when isNegativeConstant c -> Parenthesized
+ | {pexp_desc = Pexp_fun _}
+ when ParsetreeViewer.isUnderscoreApplySugar expr -> Nothing
+ | {pexp_desc =
+ Pexp_lazy _
+ | Pexp_assert _
+ | Pexp_extension _ (* %extension.x vs (%extension).x *)
+ | Pexp_fun _
+ | Pexp_newtype _
+ | Pexp_function _
+ | Pexp_constraint _
+ | Pexp_setfield _
+ | Pexp_match _
+ | Pexp_try _
+ | Pexp_while _
+ | Pexp_for _
+ | Pexp_ifthenelse _
+ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let setFieldExprRhs expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_constraint _ } -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let ternaryOperand expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ )} -> Nothing
+ | {pexp_desc = Pexp_constraint _ } -> Parenthesized
+ | {pexp_desc = Pexp_fun _ | Pexp_newtype _} ->
+ let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr expr in
+ begin match returnExpr.pexp_desc with
+ | Pexp_constraint _ -> Parenthesized
+ | _ -> Nothing
+ end
+ | _ -> Nothing
+ end
+
+ let startsWithMinus txt =
+ let len = String.length txt in
+ if len == 0 then
+ false
+ else
+ let s = (String.get [@doesNotRaise]) txt 0 in
+ s = '-'
+
+ let jsxPropExpr expr =
+ match expr.Parsetree.pexp_desc with
+ | Parsetree.Pexp_let _
+ | Pexp_sequence _
+ | Pexp_letexception _
+ | Pexp_letmodule _
+ | Pexp_open _ -> Nothing
+ | _ ->
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ begin match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_desc =
+ Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));
+ pexp_attributes = []}
+ when startsWithMinus x -> Parenthesized
+ | {Parsetree.pexp_desc =
+ Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ |
+ Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ |
+ Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ |
+ Pexp_let _ | Pexp_tuple _;
+ pexp_attributes = []
+ } -> Nothing
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ ); pexp_attributes = []} -> Nothing
+ | _ -> Parenthesized
+ end
+ end
+
+ let jsxChildExpr expr =
+ match expr.Parsetree.pexp_desc with
+ | Parsetree.Pexp_let _
+ | Pexp_sequence _
+ | Pexp_letexception _
+ | Pexp_letmodule _
+ | Pexp_open _ -> Nothing
+ | _ ->
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ begin match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | _ ->
+ begin match expr with
+ | {Parsetree.pexp_desc = Pexp_constant (Pconst_integer (x, _) | Pconst_float (x, _));
+ pexp_attributes = []
+ } when startsWithMinus x -> Parenthesized
+ | {Parsetree.pexp_desc =
+ Pexp_ident _ | Pexp_constant _ | Pexp_field _ | Pexp_construct _ | Pexp_variant _ |
+ Pexp_array _ | Pexp_pack _ | Pexp_record _ | Pexp_extension _ |
+ Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ | Pexp_sequence _ |
+ Pexp_let _;
+ pexp_attributes = []
+ } -> Nothing
+ | {Parsetree.pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ ); pexp_attributes = []} -> Nothing
+ | expr when ParsetreeViewer.isJsxExpression expr -> Nothing
+ | _ -> Parenthesized
+ end
+ end
+
+ let binaryExpr expr =
+ let optBraces, _ = ParsetreeViewer.processBracesAttr expr in
+ match optBraces with
+ | Some ({Location.loc = bracesLoc}, _) -> Braced(bracesLoc)
+ | None ->
+ begin match expr with
+ | {Parsetree.pexp_attributes = _::_} as expr
+ when ParsetreeViewer.isBinaryExpression expr -> Parenthesized
+ | _ -> Nothing
+ end
+
+ let modTypeFunctorReturn modType = match modType with
+ | {Parsetree.pmty_desc = Pmty_with _} -> true
+ | _ -> false
+
+ (* Add parens for readability:
+ module type Functor = SetLike => Set with type t = A.t
+ This is actually:
+ module type Functor = (SetLike => Set) with type t = A.t
+ *)
+ let modTypeWithOperand modType = match modType with
+ | {Parsetree.pmty_desc = Pmty_functor _} -> true
+ | _ -> false
+
+ let modExprFunctorConstraint modType = match modType with
+ | {Parsetree.pmty_desc = Pmty_functor _ | Pmty_with _} -> true
+ | _ -> false
+
+ let bracedExpr expr = match expr.Parsetree.pexp_desc with
+ | Pexp_constraint (
+ {pexp_desc = Pexp_pack _},
+ {ptyp_desc = Ptyp_package _}
+ ) -> false
+ | Pexp_constraint _ -> true
+ | _ -> false
+
+ let includeModExpr modExpr = match modExpr.Parsetree.pmod_desc with
+ | Parsetree.Pmod_constraint _ -> true
+ | _ -> false
+end
+
+module CommentTable = struct
+ type t = {
+ leading: (Location.t, Comment.t list) Hashtbl.t;
+ inside: (Location.t, Comment.t list) Hashtbl.t;
+ trailing: (Location.t, Comment.t list) Hashtbl.t;
+ }
+
+ let make () = {
+ leading = Hashtbl.create 100;
+ inside = Hashtbl.create 100;
+ trailing = Hashtbl.create 100;
+ }
+
+ let empty = make ()
+
+ let log t =
+ let open Location in
+ let leadingStuff = Hashtbl.fold (fun (k : Location.t) (v : Comment.t list) acc ->
+ let loc = Doc.concat [
+ Doc.lbracket;
+ Doc.text (string_of_int k.loc_start.pos_lnum);
+ Doc.text ":";
+ Doc.text (string_of_int (k.loc_start.pos_cnum - k.loc_start.pos_bol));
+ Doc.text "-";
+ Doc.text (string_of_int k.loc_end.pos_lnum);
+ Doc.text ":";
+ Doc.text (string_of_int (k.loc_end.pos_cnum - k.loc_end.pos_bol));
+ Doc.rbracket;
+ ] in
+ let doc = Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ loc;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.join ~sep:Doc.comma (List.map (fun c -> Doc.text (Comment.txt c)) v)
+ ]
+ );
+ Doc.line;
+ ]
+ ) in
+ doc::acc
+ ) t.leading []
+ in
+ let trailingStuff = Hashtbl.fold (fun (k : Location.t) (v : Comment.t list) acc ->
+ let loc = Doc.concat [
+ Doc.lbracket;
+ Doc.text (string_of_int k.loc_start.pos_lnum);
+ Doc.text ":";
+ Doc.text (string_of_int (k.loc_start.pos_cnum - k.loc_start.pos_bol));
+ Doc.text "-";
+ Doc.text (string_of_int k.loc_end.pos_lnum);
+ Doc.text ":";
+ Doc.text (string_of_int (k.loc_end.pos_cnum - k.loc_end.pos_bol));
+ Doc.rbracket;
+ ] in
+ let doc = Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ loc;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (List.map (fun c -> Doc.text (Comment.txt c)) v)
+ ]
+ );
+ Doc.line;
+ ]
+ ) in
+ doc::acc
+ ) t.trailing []
+ in
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.text "leading comments:";
+ Doc.line;
+ Doc.indent (Doc.concat leadingStuff);
+ Doc.line;
+ Doc.line;
+ Doc.text "trailing comments:";
+ Doc.indent (Doc.concat trailingStuff);
+ Doc.line;
+ Doc.line;
+ ]
+ ) |> Doc.toString ~width:80 |> print_endline
+ [@@live]
+ let attach tbl loc comments =
+ match comments with
+ | [] -> ()
+ | comments -> Hashtbl.replace tbl loc comments
+
+ let partitionByLoc comments loc =
+ let rec loop (leading, inside, trailing) comments =
+ let open Location in
+ match comments with
+ | comment::rest ->
+ let cmtLoc = Comment.loc comment in
+ if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then
+ loop (comment::leading, inside, trailing) rest
+ else if cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum then
+ loop (leading, inside, comment::trailing) rest
+ else
+ loop (leading, comment::inside, trailing) rest
+ | [] -> (List.rev leading, List.rev inside, List.rev trailing)
+ in
+ loop ([], [], []) comments
+
+ let partitionLeadingTrailing comments loc =
+ let rec loop (leading, trailing) comments =
+ let open Location in
+ match comments with
+ | comment::rest ->
+ let cmtLoc = Comment.loc comment in
+ if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum then
+ loop (comment::leading, trailing) rest
+ else
+ loop (leading, comment::trailing) rest
+ | [] -> (List.rev leading, List.rev trailing)
+ in
+ loop ([], []) comments
+
+ let partitionByOnSameLine loc comments =
+ let rec loop (onSameLine, onOtherLine) comments =
+ let open Location in
+ match comments with
+ | [] -> (List.rev onSameLine, List.rev onOtherLine)
+ | comment::rest ->
+ let cmtLoc = Comment.loc comment in
+ if cmtLoc.loc_start.pos_lnum == loc.loc_end.pos_lnum then
+ loop (comment::onSameLine, onOtherLine) rest
+ else
+ loop (onSameLine, comment::onOtherLine) rest
+ in
+ loop ([], []) comments
+
+ let partitionAdjacentTrailing loc1 comments =
+ let open Location in
+ let open Lexing in
+ let rec loop ~prevEndPos afterLoc1 comments =
+ match comments with
+ | [] -> (List.rev afterLoc1, [])
+ | (comment::rest) as comments ->
+ let cmtPrevEndPos = Comment.prevTokEndPos comment in
+ if prevEndPos.Lexing.pos_cnum == cmtPrevEndPos.pos_cnum then
+ let commentEnd = (Comment.loc comment).loc_end in
+ loop ~prevEndPos:commentEnd (comment::afterLoc1) rest
+ else
+ (List.rev afterLoc1, comments)
+ in
+ loop ~prevEndPos:loc1.loc_end [] comments
+
+ let rec collectListPatterns acc pattern =
+ let open Parsetree in
+ match pattern.ppat_desc with
+ | Ppat_construct(
+ {txt = Longident.Lident "::"},
+ Some {ppat_desc=Ppat_tuple (pat::rest::[])}
+ ) ->
+ collectListPatterns (pat::acc) rest
+ | Ppat_construct ({txt = Longident.Lident "[]"}, None) ->
+ List.rev acc
+ | _ -> List.rev (pattern::acc)
+
+ let rec collectListExprs acc expr =
+ let open Parsetree in
+ match expr.pexp_desc with
+ | Pexp_construct(
+ {txt = Longident.Lident "::"},
+ Some {pexp_desc=Pexp_tuple (expr::rest::[])}
+ ) ->
+ collectListExprs (expr::acc) rest
+ | Pexp_construct ({txt = Longident.Lident "[]"}, _) ->
+ List.rev acc
+ | _ -> List.rev (expr::acc)
+
+ (* TODO: use ParsetreeViewer *)
+ let arrowType ct =
+ let open Parsetree in
+ let rec process attrsBefore acc typ = match typ with
+ | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = []} ->
+ let arg = ([], lbl, typ1) in
+ process attrsBefore (arg::acc) typ2
+ | {ptyp_desc = Ptyp_arrow (Nolabel as lbl, typ1, typ2); ptyp_attributes = [({txt ="bs"}, _) ] as attrs} ->
+ let arg = (attrs, lbl, typ1) in
+ process attrsBefore (arg::acc) typ2
+ | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = _attrs} as returnType ->
+ let args = List.rev acc in
+ (attrsBefore, args, returnType)
+ | {ptyp_desc = Ptyp_arrow ((Labelled _ | Optional _) as lbl, typ1, typ2); ptyp_attributes = attrs} ->
+ let arg = (attrs, lbl, typ1) in
+ process attrsBefore (arg::acc) typ2
+ | typ ->
+ (attrsBefore, List.rev acc, typ)
+ in
+ begin match ct with
+ | {ptyp_desc = Ptyp_arrow (Nolabel, _typ1, _typ2); ptyp_attributes = attrs} as typ ->
+ process attrs [] {typ with ptyp_attributes = []}
+ | typ -> process [] [] typ
+ end
+
+ (* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *)
+ let modExprApply modExpr =
+ let rec loop acc modExpr = match modExpr with
+ | {Parsetree.pmod_desc = Pmod_apply (next, arg)} ->
+ loop (arg::acc) next
+ | _ -> (modExpr::acc)
+ in
+ loop [] modExpr
+
+ (* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? *)
+ let modExprFunctor modExpr =
+ let rec loop acc modExpr = match modExpr with
+ | {Parsetree.pmod_desc = Pmod_functor (lbl, modType, returnModExpr); pmod_attributes = attrs} ->
+ let param = (attrs, lbl, modType) in
+ loop (param::acc) returnModExpr
+ | returnModExpr ->
+ (List.rev acc, returnModExpr)
+ in
+ loop [] modExpr
+
+ let functorType modtype =
+ let rec process acc modtype = match modtype with
+ | {Parsetree.pmty_desc = Pmty_functor (lbl, argType, returnType); pmty_attributes = attrs} ->
+ let arg = (attrs, lbl, argType) in
+ process (arg::acc) returnType
+ | modType ->
+ (List.rev acc, modType)
+ in
+ process [] modtype
+
+ let funExpr expr =
+ let open Parsetree in
+ (* Turns (type t, type u, type z) into "type t u z" *)
+ let rec collectNewTypes acc returnExpr =
+ match returnExpr with
+ | {pexp_desc = Pexp_newtype (stringLoc, returnExpr); pexp_attributes = []} ->
+ collectNewTypes (stringLoc::acc) returnExpr
+ | returnExpr ->
+ let loc = match (acc, List.rev acc) with
+ | (_startLoc::_, endLoc::_) -> { endLoc.loc with loc_end = endLoc.loc.loc_end }
+ | _ -> Location.none
+ in
+ let txt = List.fold_right (fun curr acc -> acc ^ " " ^ curr.Location.txt) acc "type" in
+ (Location.mkloc txt loc, returnExpr)
+ in
+ (* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter,
+ * otherwise this function would need to return a variant:
+ * | NormalParamater(...)
+ * | NewType(...)
+ * This complicates printing with an extra variant/boxing/allocation for a code-path
+ * that is not often used. Lets just keep it simple for now *)
+ let rec collect attrsBefore acc expr = match expr with
+ | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = []} ->
+ let parameter = ([], lbl, defaultExpr, pattern) in
+ collect attrsBefore (parameter::acc) returnExpr
+ | {pexp_desc = Pexp_newtype (stringLoc, rest); pexp_attributes = attrs} ->
+ let (var, returnExpr) = collectNewTypes [stringLoc] rest in
+ let parameter = (
+ attrs,
+ Asttypes.Nolabel,
+ None,
+ Ast_helper.Pat.var ~loc:stringLoc.loc var
+ ) in
+ collect attrsBefore (parameter::acc) returnExpr
+ | {pexp_desc = Pexp_fun (lbl, defaultExpr, pattern, returnExpr); pexp_attributes = [({txt = "bs"}, _)] as attrs} ->
+ let parameter = (attrs, lbl, defaultExpr, pattern) in
+ collect attrsBefore (parameter::acc) returnExpr
+ | {
+ pexp_desc = Pexp_fun ((Labelled _ | Optional _) as lbl, defaultExpr, pattern, returnExpr);
+ pexp_attributes = attrs
+ } ->
+ let parameter = (attrs, lbl, defaultExpr, pattern) in
+ collect attrsBefore (parameter::acc) returnExpr
+ | expr ->
+ (attrsBefore, List.rev acc, expr)
+ in
+ begin match expr with
+ | {pexp_desc = Pexp_fun (Nolabel, _defaultExpr, _pattern, _returnExpr); pexp_attributes = attrs} as expr ->
+ collect attrs [] {expr with pexp_attributes = []}
+ | expr -> collect [] [] expr
+ end
+
+ let rec isBlockExpr expr =
+ let open Parsetree in
+ match expr.pexp_desc with
+ | Pexp_letmodule _
+ | Pexp_letexception _
+ | Pexp_let _
+ | Pexp_open _
+ | Pexp_sequence _ -> true
+ | Pexp_apply (callExpr, _) when isBlockExpr callExpr -> true
+ | Pexp_constraint (expr, _) when isBlockExpr expr -> true
+ | Pexp_field (expr, _) when isBlockExpr expr -> true
+ | Pexp_setfield (expr, _, _) when isBlockExpr expr -> true
+ | _ -> false
+
+ let rec walkStructure s t comments =
+ match s with
+ | _ when comments = [] -> ()
+ | [] -> attach t.inside Location.none comments
+ | s ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pstr_loc)
+ ~walkNode:walkStructureItem
+ s
+ t
+ comments
+
+ and walkStructureItem si t comments =
+ match si.Parsetree.pstr_desc with
+ | _ when comments = [] -> ()
+ | Pstr_primitive valueDescription ->
+ walkValueDescription valueDescription t comments
+ | Pstr_open openDescription ->
+ walkOpenDescription openDescription t comments
+ | Pstr_value (_, valueBindings) ->
+ walkValueBindings valueBindings t comments
+ | Pstr_type (_, typeDeclarations) ->
+ walkTypeDeclarations typeDeclarations t comments
+ | Pstr_eval (expr, _) ->
+ walkExpr expr t comments
+ | Pstr_module moduleBinding ->
+ walkModuleBinding moduleBinding t comments
+ | Pstr_recmodule moduleBindings ->
+ walkList
+ ~getLoc:(fun mb -> mb.Parsetree.pmb_loc)
+ ~walkNode:walkModuleBinding
+ moduleBindings
+ t
+ comments
+ | Pstr_modtype modTypDecl ->
+ walkModuleTypeDeclaration modTypDecl t comments
+ | Pstr_attribute attribute ->
+ walkAttribute attribute t comments
+ | Pstr_extension (extension, _) ->
+ walkExtension extension t comments
+ | Pstr_include includeDeclaration ->
+ walkIncludeDeclaration includeDeclaration t comments
+ | Pstr_exception extensionConstructor ->
+ walkExtConstr extensionConstructor t comments
+ | Pstr_typext typeExtension ->
+ walkTypeExtension typeExtension t comments
+ | Pstr_class_type _ | Pstr_class _ -> ()
+
+ and walkValueDescription vd t comments =
+ let (leading, trailing) =
+ partitionLeadingTrailing comments vd.pval_name.loc in
+ attach t.leading vd.pval_name.loc leading;
+ let (afterName, rest) =
+ partitionAdjacentTrailing vd.pval_name.loc trailing in
+ attach t.trailing vd.pval_name.loc afterName;
+ let (before, inside, after) =
+ partitionByLoc rest vd.pval_type.ptyp_loc
+ in
+ attach t.leading vd.pval_type.ptyp_loc before;
+ walkTypExpr vd.pval_type t inside;
+ attach t.trailing vd.pval_type.ptyp_loc after
+
+ and walkTypeExtension te t comments =
+ let (leading, trailing) =
+ partitionLeadingTrailing comments te.ptyext_path.loc in
+ attach t.leading te.ptyext_path.loc leading;
+ let (afterPath, rest) =
+ partitionAdjacentTrailing te.ptyext_path.loc trailing in
+ attach t.trailing te.ptyext_path.loc afterPath;
+
+ (* type params *)
+ let rest = match te.ptyext_params with
+ | [] -> rest
+ | typeParams ->
+ visitListButContinueWithRemainingComments
+ ~getLoc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc)
+ ~walkNode:walkTypeParam
+ ~newlineDelimited:false
+ typeParams
+ t
+ rest
+ in
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pext_loc)
+ ~walkNode:walkExtConstr
+ te.ptyext_constructors
+ t
+ rest
+
+ and walkIncludeDeclaration inclDecl t comments =
+ let (before, inside, after) =
+ partitionByLoc comments inclDecl.pincl_mod.pmod_loc in
+ attach t.leading inclDecl.pincl_mod.pmod_loc before;
+ walkModExpr inclDecl.pincl_mod t inside;
+ attach t.trailing inclDecl.pincl_mod.pmod_loc after
+
+ and walkModuleTypeDeclaration mtd t comments =
+ let (leading, trailing) =
+ partitionLeadingTrailing comments mtd.pmtd_name.loc in
+ attach t.leading mtd.pmtd_name.loc leading;
+ begin match mtd.pmtd_type with
+ | None ->
+ attach t.trailing mtd.pmtd_name.loc trailing
+ | Some modType ->
+ let (afterName, rest) = partitionAdjacentTrailing mtd.pmtd_name.loc trailing in
+ attach t.trailing mtd.pmtd_name.loc afterName;
+ let (before, inside, after) = partitionByLoc rest modType.pmty_loc in
+ attach t.leading modType.pmty_loc before;
+ walkModType modType t inside;
+ attach t.trailing modType.pmty_loc after
+ end
+
+ and walkModuleBinding mb t comments =
+ let (leading, trailing) = partitionLeadingTrailing comments mb.pmb_name.loc in
+ attach t.leading mb.pmb_name.loc leading;
+ let (afterName, rest) = partitionAdjacentTrailing mb.pmb_name.loc trailing in
+ attach t.trailing mb.pmb_name.loc afterName;
+ let (leading, inside, trailing) = partitionByLoc rest mb.pmb_expr.pmod_loc in
+ begin match mb.pmb_expr.pmod_desc with
+ | Pmod_constraint _ ->
+ walkModExpr mb.pmb_expr t (List.concat [leading; inside]);
+ | _ ->
+ attach t.leading mb.pmb_expr.pmod_loc leading;
+ walkModExpr mb.pmb_expr t inside;
+ end;
+ attach t.trailing mb.pmb_expr.pmod_loc trailing
+
+ and walkSignature signature t comments =
+ match signature with
+ | _ when comments = [] -> ()
+ | [] -> attach t.inside Location.none comments
+ | _s ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.psig_loc)
+ ~walkNode:walkSignatureItem
+ signature
+ t
+ comments
+
+ and walkSignatureItem si t comments =
+ match si.psig_desc with
+ | _ when comments = [] -> ()
+ | Psig_value valueDescription ->
+ walkValueDescription valueDescription t comments
+ | Psig_type (_, typeDeclarations) ->
+ walkTypeDeclarations typeDeclarations t comments
+ | Psig_typext typeExtension ->
+ walkTypeExtension typeExtension t comments
+ | Psig_exception extensionConstructor ->
+ walkExtConstr extensionConstructor t comments
+ | Psig_module moduleDeclaration ->
+ walkModuleDeclaration moduleDeclaration t comments
+ | Psig_recmodule moduleDeclarations ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pmd_loc)
+ ~walkNode:walkModuleDeclaration
+ moduleDeclarations
+ t
+ comments
+ | Psig_modtype moduleTypeDeclaration ->
+ walkModuleTypeDeclaration moduleTypeDeclaration t comments
+ | Psig_open openDescription ->
+ walkOpenDescription openDescription t comments
+ | Psig_include includeDescription ->
+ walkIncludeDescription includeDescription t comments
+ | Psig_attribute attribute ->
+ walkAttribute attribute t comments
+ | Psig_extension (extension, _) ->
+ walkExtension extension t comments
+ | Psig_class _ | Psig_class_type _ -> ()
+
+ and walkIncludeDescription id t comments =
+ let (before, inside, after) =
+ partitionByLoc comments id.pincl_mod.pmty_loc in
+ attach t.leading id.pincl_mod.pmty_loc before;
+ walkModType id.pincl_mod t inside;
+ attach t.trailing id.pincl_mod.pmty_loc after
+
+ and walkModuleDeclaration md t comments =
+ let (leading, trailing) = partitionLeadingTrailing comments md.pmd_name.loc in
+ attach t.leading md.pmd_name.loc leading;
+ let (afterName, rest) = partitionAdjacentTrailing md.pmd_name.loc trailing in
+ attach t.trailing md.pmd_name.loc afterName;
+ let (leading, inside, trailing) = partitionByLoc rest md.pmd_type.pmty_loc in
+ attach t.leading md.pmd_type.pmty_loc leading;
+ walkModType md.pmd_type t inside;
+ attach t.trailing md.pmd_type.pmty_loc trailing
+
+ and walkList:
+ 'node.
+ ?prevLoc:Location.t ->
+ getLoc:('node -> Location.t) ->
+ walkNode:('node -> t -> Comment.t list -> unit) ->
+ 'node list -> t -> Comment.t list -> unit
+ = fun ?prevLoc ~getLoc ~walkNode l t comments ->
+ let open Location in
+ match l with
+ | _ when comments = [] -> ()
+ | [] ->
+ begin match prevLoc with
+ | Some loc ->
+ attach t.trailing loc comments
+ | None -> ()
+ end
+ | node::rest ->
+ let currLoc = getLoc node in
+ let (leading, inside, trailing) = partitionByLoc comments currLoc in
+ begin match prevLoc with
+ | None -> (* first node, all leading comments attach here *)
+ attach t.leading currLoc leading
+ | Some prevLoc ->
+ (* Same line *)
+ if prevLoc.loc_end.pos_lnum == currLoc.loc_start.pos_lnum then
+ let (afterPrev, beforeCurr) = partitionAdjacentTrailing prevLoc leading in
+ let () = attach t.trailing prevLoc afterPrev in
+ attach t.leading currLoc beforeCurr
+ else
+ let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine prevLoc leading in
+ let () = attach t.trailing prevLoc onSameLineAsPrev in
+ let (leading, _inside, _trailing) = partitionByLoc afterPrev currLoc in
+ attach t.leading currLoc leading
+ end;
+ walkNode node t inside;
+ walkList ~prevLoc:currLoc ~getLoc ~walkNode rest t trailing
+
+ (* The parsetree doesn't always contain location info about the opening or
+ * closing token of a "list-of-things". This routine visits the whole list,
+ * but returns any remaining comments that likely fall after the whole list. *)
+ and visitListButContinueWithRemainingComments:
+ 'node.
+ ?prevLoc:Location.t ->
+ newlineDelimited:bool ->
+ getLoc:('node -> Location.t) ->
+ walkNode:('node -> t -> Comment.t list -> unit) ->
+ 'node list -> t -> Comment.t list -> Comment.t list
+ = fun ?prevLoc ~newlineDelimited ~getLoc ~walkNode l t comments ->
+ let open Location in
+ match l with
+ | _ when comments = [] -> []
+ | [] ->
+ begin match prevLoc with
+ | Some loc ->
+ let (afterPrev, rest) =
+ if newlineDelimited then
+ partitionByOnSameLine loc comments
+ else
+ partitionAdjacentTrailing loc comments
+ in
+ attach t.trailing loc afterPrev;
+ rest
+ | None -> comments
+ end
+ | node::rest ->
+ let currLoc = getLoc node in
+ let (leading, inside, trailing) = partitionByLoc comments currLoc in
+ let () = match prevLoc with
+ | None -> (* first node, all leading comments attach here *)
+ attach t.leading currLoc leading;
+ ()
+ | Some prevLoc ->
+ (* Same line *)
+ if prevLoc.loc_end.pos_lnum == currLoc.loc_start.pos_lnum then
+ let (afterPrev, beforeCurr) = partitionAdjacentTrailing prevLoc leading in
+ let () = attach t.trailing prevLoc afterPrev in
+ let () = attach t.leading currLoc beforeCurr in
+ ()
+ else
+ let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine prevLoc leading in
+ let () = attach t.trailing prevLoc onSameLineAsPrev in
+ let (leading, _inside, _trailing) = partitionByLoc afterPrev currLoc in
+ let () = attach t.leading currLoc leading in
+ ()
+ in
+ walkNode node t inside;
+ visitListButContinueWithRemainingComments
+ ~prevLoc:currLoc ~getLoc ~walkNode ~newlineDelimited
+ rest t trailing
+
+ and walkValueBindings vbs t comments =
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pvb_loc)
+ ~walkNode:walkValueBinding
+ vbs
+ t
+ comments
+
+ and walkOpenDescription openDescription t comments =
+ let loc = openDescription.popen_lid.loc in
+ let (leading, trailing) = partitionLeadingTrailing comments loc in
+ attach t.leading loc leading;
+ attach t.trailing loc trailing;
+
+ and walkTypeDeclarations typeDeclarations t comments =
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.ptype_loc)
+ ~walkNode:walkTypeDeclaration
+ typeDeclarations
+ t
+ comments
+
+ and walkTypeParam (typexpr, _variance) t comments =
+ walkTypExpr typexpr t comments
+
+ and walkTypeDeclaration td t comments =
+ let (beforeName, rest) =
+ partitionLeadingTrailing comments td.ptype_name.loc in
+ attach t.leading td.ptype_name.loc beforeName;
+
+ let (afterName, rest) =
+ partitionAdjacentTrailing td.ptype_name.loc rest in
+ attach t.trailing td.ptype_name.loc afterName;
+
+ (* type params *)
+ let rest = match td.ptype_params with
+ | [] -> rest
+ | typeParams ->
+ visitListButContinueWithRemainingComments
+ ~getLoc:(fun (typexpr, _variance) -> typexpr.Parsetree.ptyp_loc)
+ ~walkNode:walkTypeParam
+ ~newlineDelimited:false
+ typeParams
+ t
+ rest
+ in
+
+ (* manifest: = typexpr *)
+ let rest = match td.ptype_manifest with
+ | Some typexpr ->
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ let (afterTyp, rest) =
+ partitionAdjacentTrailing typexpr.Parsetree.ptyp_loc afterTyp in
+ attach t.trailing typexpr.ptyp_loc afterTyp;
+ rest
+ | None -> rest
+ in
+
+ let rest = match td.ptype_kind with
+ | Ptype_abstract | Ptype_open -> rest
+ | Ptype_record labelDeclarations ->
+ let () = walkList
+ ~getLoc:(fun ld -> ld.Parsetree.pld_loc)
+ ~walkNode:walkLabelDeclaration
+ labelDeclarations
+ t
+ rest
+ in
+ []
+ | Ptype_variant constructorDeclarations ->
+ walkConstructorDeclarations constructorDeclarations t rest
+ in
+ attach t.trailing td.ptype_loc rest
+
+ and walkLabelDeclarations lds t comments =
+ visitListButContinueWithRemainingComments
+ ~getLoc:(fun ld -> ld.Parsetree.pld_loc)
+ ~walkNode:walkLabelDeclaration
+ ~newlineDelimited:false
+ lds
+ t
+ comments
+
+ and walkLabelDeclaration ld t comments =
+ let (beforeName, rest) =
+ partitionLeadingTrailing comments ld.pld_name.loc in
+ attach t.leading ld.pld_name.loc beforeName;
+ let (afterName, rest) = partitionAdjacentTrailing ld.pld_name.loc rest in
+ attach t.trailing ld.pld_name.loc afterName;
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc rest ld.pld_type.ptyp_loc in
+ attach t.leading ld.pld_type.ptyp_loc beforeTyp;
+ walkTypExpr ld.pld_type t insideTyp;
+ attach t.trailing ld.pld_type.ptyp_loc afterTyp
+
+ and walkConstructorDeclarations cds t comments =
+ visitListButContinueWithRemainingComments
+ ~getLoc:(fun cd -> cd.Parsetree.pcd_loc)
+ ~walkNode:walkConstructorDeclaration
+ ~newlineDelimited:false
+ cds
+ t
+ comments
+
+ and walkConstructorDeclaration cd t comments =
+ let (beforeName, rest) =
+ partitionLeadingTrailing comments cd.pcd_name.loc in
+ attach t.leading cd.pcd_name.loc beforeName;
+ let (afterName, rest) =
+ partitionAdjacentTrailing cd.pcd_name.loc rest in
+ attach t.trailing cd.pcd_name.loc afterName;
+ let rest = walkConstructorArguments cd.pcd_args t rest in
+
+ let rest = match cd.pcd_res with
+ | Some typexpr ->
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ let (afterTyp, rest) =
+ partitionAdjacentTrailing typexpr.Parsetree.ptyp_loc afterTyp in
+ attach t.trailing typexpr.ptyp_loc afterTyp;
+ rest
+ | None -> rest
+ in
+ attach t.trailing cd.pcd_loc rest
+
+ and walkConstructorArguments args t comments =
+ match args with
+ | Pcstr_tuple typexprs ->
+ visitListButContinueWithRemainingComments
+ ~getLoc:(fun n -> n.Parsetree.ptyp_loc)
+ ~walkNode:walkTypExpr
+ ~newlineDelimited:false
+ typexprs
+ t
+ comments
+ | Pcstr_record labelDeclarations ->
+ walkLabelDeclarations labelDeclarations t comments
+
+ and walkValueBinding vb t comments =
+ let open Location in
+
+ let vb =
+ let open Parsetree in
+ match (vb.pvb_pat, vb.pvb_expr) with
+ | {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], t)})},
+ {pexp_desc = Pexp_constraint (expr, _typ)} ->
+ {vb with
+ pvb_pat = Ast_helper.Pat.constraint_
+ ~loc:{pat.ppat_loc with loc_end = t.Parsetree.ptyp_loc.loc_end} pat t;
+ pvb_expr = expr;
+ }
+ | {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly (_::_, t)})},
+ {pexp_desc = Pexp_fun _} ->
+ {vb with
+ pvb_pat = {vb.pvb_pat with
+ ppat_loc = {pat.ppat_loc with loc_end = t.ptyp_loc.loc_end}}}
+ | _ -> vb
+ in
+ let patternLoc = vb.Parsetree.pvb_pat.ppat_loc in
+ let exprLoc = vb.Parsetree.pvb_expr.pexp_loc in
+
+ let (leading, inside, trailing) =
+ partitionByLoc comments patternLoc in
+
+ (* everything before start of pattern can only be leading on the pattern:
+ * let |* before *| a = 1 *)
+ attach t.leading patternLoc leading;
+ walkPattern vb.Parsetree.pvb_pat t inside;
+ (* let pattern = expr -> pattern and expr on the same line *)
+ (* if patternLoc.loc_end.pos_lnum == exprLoc.loc_start.pos_lnum then ( *)
+ let (afterPat, surroundingExpr) =
+ partitionAdjacentTrailing patternLoc trailing
+ in
+ attach t.trailing patternLoc afterPat;
+ let (beforeExpr, insideExpr, afterExpr) =
+ partitionByLoc surroundingExpr exprLoc in
+ if isBlockExpr vb.pvb_expr then (
+ walkExpr vb.pvb_expr t (List.concat [beforeExpr; insideExpr; afterExpr])
+ ) else (
+ attach t.leading exprLoc beforeExpr;
+ walkExpr vb.Parsetree.pvb_expr t insideExpr;
+ attach t.trailing exprLoc afterExpr
+ )
+
+ and walkExpr expr t comments =
+ let open Location in
+ match expr.Parsetree.pexp_desc with
+ | _ when comments = [] -> ()
+ | Pexp_constant _ ->
+ let (leading, trailing) =
+ partitionLeadingTrailing comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ attach t.trailing expr.pexp_loc trailing;
+ | Pexp_ident longident ->
+ let (leading, trailing) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc leading;
+ attach t.trailing longident.loc trailing;
+ | Pexp_let (_recFlag, valueBindings, expr2) ->
+ let comments = visitListButContinueWithRemainingComments
+ ~getLoc:(fun n ->
+ if n.Parsetree.pvb_pat.ppat_loc.loc_ghost then
+ n.pvb_expr.pexp_loc
+ else
+ n.Parsetree.pvb_loc
+ )
+ ~walkNode:walkValueBinding
+ ~newlineDelimited:true
+ valueBindings
+ t
+ comments
+ in
+ if isBlockExpr expr2 then (
+ walkExpr expr2 t comments;
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc comments expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_sequence (expr1, expr2) ->
+ let (leading, inside, trailing) = partitionByLoc comments expr1.pexp_loc in
+ let comments = if isBlockExpr expr1 then (
+ let (afterExpr, comments) = partitionByOnSameLine expr1.pexp_loc trailing in
+ walkExpr expr1 t (List.concat [leading; inside; afterExpr]);
+ comments
+ ) else (
+ attach t.leading expr1.pexp_loc leading;
+ walkExpr expr1 t inside;
+ let (afterExpr, comments) = partitionByOnSameLine expr1.pexp_loc trailing in
+ attach t.trailing expr1.pexp_loc afterExpr;
+ comments
+ ) in
+ if isBlockExpr expr2 then (
+ walkExpr expr2 t comments
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc comments expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_open (_override, longident, expr2) ->
+ let (leading, comments) =
+ partitionLeadingTrailing comments expr.pexp_loc in
+ attach
+ t.leading
+ {expr.pexp_loc with loc_end = longident.loc.loc_end}
+ leading;
+ let (leading, trailing) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc leading;
+ let (afterLongident, rest) =
+ partitionByOnSameLine longident.loc trailing in
+ attach t.trailing longident.loc afterLongident;
+ if isBlockExpr expr2 then (
+ walkExpr expr2 t rest
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_extension (
+ {txt = "bs.obj"},
+ PStr [{
+ pstr_desc = Pstr_eval({pexp_desc = Pexp_record (rows, _)}, [])
+ }]
+ ) ->
+ walkList
+ ~getLoc:(fun (
+ (longident, expr): (Longident.t Asttypes.loc * Parsetree.expression)
+ ) -> {
+ longident.loc with loc_end = expr.pexp_loc.loc_end
+ })
+ ~walkNode:walkExprRecordRow
+ rows
+ t
+ comments
+ | Pexp_extension extension ->
+ walkExtension extension t comments
+ | Pexp_letexception (extensionConstructor, expr2) ->
+ let (leading, comments) =
+ partitionLeadingTrailing comments expr.pexp_loc in
+ attach
+ t.leading
+ {expr.pexp_loc with loc_end = extensionConstructor.pext_loc.loc_end}
+ leading;
+ let (leading, inside, trailing) =
+ partitionByLoc comments extensionConstructor.pext_loc in
+ attach t.leading extensionConstructor.pext_loc leading;
+ walkExtConstr extensionConstructor t inside;
+ let (afterExtConstr, rest) =
+ partitionByOnSameLine extensionConstructor.pext_loc trailing in
+ attach t.trailing extensionConstructor.pext_loc afterExtConstr;
+ if isBlockExpr expr2 then (
+ walkExpr expr2 t rest
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_letmodule (stringLoc, modExpr, expr2) ->
+ let (leading, comments) =
+ partitionLeadingTrailing comments expr.pexp_loc in
+ attach t.leading {expr.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} leading;
+ let (leading, trailing) = partitionLeadingTrailing comments stringLoc.loc in
+ attach t.leading stringLoc.loc leading;
+ let (afterString, rest) =
+ partitionAdjacentTrailing stringLoc.loc trailing in
+ attach t.trailing stringLoc.loc afterString;
+ let (beforeModExpr, insideModExpr, afterModExpr) =
+ partitionByLoc rest modExpr.pmod_loc in
+ attach t.leading modExpr.pmod_loc beforeModExpr;
+ walkModExpr modExpr t insideModExpr;
+ let (afterModExpr, rest) =
+ partitionByOnSameLine modExpr.pmod_loc afterModExpr in
+ attach t.trailing modExpr.pmod_loc afterModExpr;
+ if isBlockExpr expr2 then (
+ walkExpr expr2 t rest;
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_assert expr
+ | Pexp_lazy expr ->
+ if isBlockExpr expr then (
+ walkExpr expr t comments
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc trailing
+ )
+ | Pexp_coerce (expr, optTypexpr, typexpr) ->
+ let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing expr.pexp_loc trailing in
+ attach t.trailing expr.pexp_loc afterExpr;
+ let rest = match optTypexpr with
+ | Some typexpr ->
+ let (leading, inside, trailing) = partitionByLoc comments typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc leading;
+ walkTypExpr typexpr t inside;
+ let (afterTyp, rest) =
+ partitionAdjacentTrailing typexpr.ptyp_loc trailing in
+ attach t.trailing typexpr.ptyp_loc afterTyp;
+ rest
+ | None -> rest
+ in
+ let (leading, inside, trailing) = partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc leading;
+ walkTypExpr typexpr t inside;
+ attach t.trailing typexpr.ptyp_loc trailing
+ | Pexp_constraint (expr, typexpr) ->
+ let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing expr.pexp_loc trailing in
+ attach t.trailing expr.pexp_loc afterExpr;
+ let (leading, inside, trailing) = partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc leading;
+ walkTypExpr typexpr t inside;
+ attach t.trailing typexpr.ptyp_loc trailing
+ | Pexp_tuple []
+ | Pexp_array []
+ | Pexp_construct({txt = Longident.Lident "[]"}, _) ->
+ attach t.inside expr.pexp_loc comments
+ | Pexp_construct({txt = Longident.Lident "::"}, _) ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pexp_loc)
+ ~walkNode:walkExpr
+ (collectListExprs [] expr)
+ t
+ comments
+ | Pexp_construct (longident, args) ->
+ let (leading, trailing) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc leading;
+ begin match args with
+ | Some expr ->
+ let (afterLongident, rest) =
+ partitionAdjacentTrailing longident.loc trailing in
+ attach t.trailing longident.loc afterLongident;
+ walkExpr expr t rest
+ | None ->
+ attach t.trailing longident.loc trailing
+ end
+ | Pexp_variant (_label, None) ->
+ ()
+ | Pexp_variant (_label, Some expr) ->
+ walkExpr expr t comments
+ | Pexp_array exprs | Pexp_tuple exprs ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pexp_loc)
+ ~walkNode:walkExpr
+ exprs
+ t
+ comments
+ | Pexp_record (rows, spreadExpr) ->
+ let comments = match spreadExpr with
+ | None -> comments
+ | Some expr ->
+ let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc trailing in
+ attach t.trailing expr.pexp_loc afterExpr;
+ rest
+ in
+ walkList
+ ~getLoc:(fun (
+ (longident, expr): (Longident.t Asttypes.loc * Parsetree.expression)
+ ) -> {
+ longident.loc with loc_end = expr.pexp_loc.loc_end
+ })
+ ~walkNode:walkExprRecordRow
+ rows
+ t
+ comments
+ | Pexp_field (expr, longident) ->
+ let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in
+ let trailing = if isBlockExpr expr then (
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing expr.pexp_loc trailing in
+ walkExpr expr t (List.concat [leading; inside; afterExpr]);
+ rest
+ ) else (
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ trailing
+ ) in
+ let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc trailing in
+ attach t.trailing expr.pexp_loc afterExpr;
+ let (leading, trailing) = partitionLeadingTrailing rest longident.loc in
+ attach t.leading longident.loc leading;
+ attach t.trailing longident.loc trailing
+ | Pexp_setfield (expr1, longident, expr2) ->
+ let (leading, inside, trailing) = partitionByLoc comments expr1.pexp_loc in
+ let rest = if isBlockExpr expr1 then (
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing expr1.pexp_loc trailing in
+ walkExpr expr1 t (List.concat [leading; inside; afterExpr]);
+ rest
+ ) else (
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing expr1.pexp_loc trailing in
+ attach t.leading expr1.pexp_loc leading;
+ walkExpr expr1 t inside;
+ attach t.trailing expr1.pexp_loc afterExpr;
+ rest
+ ) in
+ let (beforeLongident, afterLongident) = partitionLeadingTrailing rest longident.loc in
+ attach t.leading longident.loc beforeLongident;
+ let (afterLongident, rest) = partitionAdjacentTrailing longident.loc afterLongident in
+ attach t.trailing longident.loc afterLongident;
+ if isBlockExpr expr2 then
+ walkExpr expr2 t rest
+ else (
+ let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_ifthenelse (ifExpr, thenExpr, elseExpr) ->
+ let (leading, inside, trailing) = partitionByLoc comments ifExpr.pexp_loc in
+ let comments = if isBlockExpr ifExpr then (
+ let (afterExpr, comments) = partitionAdjacentTrailing ifExpr.pexp_loc trailing in
+ walkExpr ifExpr t (List.concat [leading; inside; afterExpr]);
+ comments
+ ) else (
+ attach t.leading ifExpr.pexp_loc leading;
+ walkExpr ifExpr t inside;
+ let (afterExpr, comments) = partitionAdjacentTrailing ifExpr.pexp_loc trailing in
+ attach t.trailing ifExpr.pexp_loc afterExpr;
+ comments
+ ) in
+ let (leading, inside, trailing) = partitionByLoc comments thenExpr.pexp_loc in
+ let comments = if isBlockExpr thenExpr then (
+ let (afterExpr, trailing) = partitionAdjacentTrailing thenExpr.pexp_loc trailing in
+ walkExpr thenExpr t (List.concat [leading; inside; afterExpr]);
+ trailing
+ ) else (
+ attach t.leading thenExpr.pexp_loc leading;
+ walkExpr thenExpr t inside;
+ let (afterExpr, comments) = partitionAdjacentTrailing thenExpr.pexp_loc trailing in
+ attach t.trailing thenExpr.pexp_loc afterExpr;
+ comments
+ ) in
+ begin match elseExpr with
+ | None -> ()
+ | Some expr ->
+ if isBlockExpr expr then
+ walkExpr expr t comments
+ else (
+ let (leading, inside, trailing) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc trailing
+ )
+ end
+ | Pexp_while (expr1, expr2) ->
+ let (leading, inside, trailing) = partitionByLoc comments expr1.pexp_loc in
+ let rest = if isBlockExpr expr1 then
+ let (afterExpr, rest) = partitionAdjacentTrailing expr1.pexp_loc trailing in
+ walkExpr expr1 t (List.concat [leading; inside; afterExpr]);
+ rest
+ else (
+ attach t.leading expr1.pexp_loc leading;
+ walkExpr expr1 t inside;
+ let (afterExpr, rest) = partitionAdjacentTrailing expr1.pexp_loc trailing in
+ attach t.trailing expr1.pexp_loc afterExpr;
+ rest
+ ) in
+ if isBlockExpr expr2 then (
+ walkExpr expr2 t rest
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ attach t.trailing expr2.pexp_loc trailing
+ )
+ | Pexp_for (pat, expr1, expr2, _, expr3) ->
+ let (leading, inside, trailing) = partitionByLoc comments pat.ppat_loc in
+ attach t.leading pat.ppat_loc leading;
+ walkPattern pat t inside;
+ let (afterPat, rest) = partitionAdjacentTrailing pat.ppat_loc trailing in
+ attach t.trailing pat.ppat_loc afterPat;
+ let (leading, inside, trailing) = partitionByLoc rest expr1.pexp_loc in
+ attach t.leading expr1.pexp_loc leading;
+ walkExpr expr1 t inside;
+ let (afterExpr, rest) = partitionAdjacentTrailing expr1.pexp_loc trailing in
+ attach t.trailing expr1.pexp_loc afterExpr;
+ let (leading, inside, trailing) = partitionByLoc rest expr2.pexp_loc in
+ attach t.leading expr2.pexp_loc leading;
+ walkExpr expr2 t inside;
+ let (afterExpr, rest) = partitionAdjacentTrailing expr2.pexp_loc trailing in
+ attach t.trailing expr2.pexp_loc afterExpr;
+ if isBlockExpr expr3 then (
+ walkExpr expr3 t rest
+ ) else (
+ let (leading, inside, trailing) = partitionByLoc rest expr3.pexp_loc in
+ attach t.leading expr3.pexp_loc leading;
+ walkExpr expr3 t inside;
+ attach t.trailing expr3.pexp_loc trailing
+ )
+ | Pexp_pack modExpr ->
+ let (before, inside, after) = partitionByLoc comments modExpr.pmod_loc in
+ attach t.leading modExpr.pmod_loc before;
+ walkModExpr modExpr t inside;
+ attach t.trailing modExpr.pmod_loc after
+ | Pexp_match (expr, cases) | Pexp_try (expr, cases) ->
+ let (before, inside, after) = partitionByLoc comments expr.pexp_loc in
+ let after = if isBlockExpr expr then (
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing expr.pexp_loc after in
+ walkExpr expr t (List.concat [before; inside; afterExpr]);
+ rest
+ ) else (
+ attach t.leading expr.pexp_loc before;
+ walkExpr expr t inside;
+ after
+ ) in
+ let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc after in
+ attach t.trailing expr.pexp_loc afterExpr;
+ walkList
+ ~getLoc:(fun n -> {n.Parsetree.pc_lhs.ppat_loc with
+ loc_end = n.pc_rhs.pexp_loc.loc_end})
+ ~walkNode:walkCase
+ cases
+ t
+ rest
+ (* unary expression: todo use parsetreeviewer *)
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident
+ ("~+" | "~+." | "~-" | "~-." | "not" | "!")
+ }},
+ [Nolabel, argExpr]
+ ) ->
+ let (before, inside, after) = partitionByLoc comments argExpr.pexp_loc in
+ attach t.leading argExpr.pexp_loc before;
+ walkExpr argExpr t inside;
+ attach t.trailing argExpr.pexp_loc after
+ (* binary expression *)
+ | Pexp_apply(
+ {pexp_desc = Pexp_ident {txt = Longident.Lident
+ (":=" | "||" | "&&" | "=" | "==" | "<" | ">"
+ | "!=" | "!==" | "<=" | ">=" | "|>" | "+" | "+."
+ | "-" | "-." | "++" | "^" | "*" | "*." | "/"
+ | "/." | "**" | "|." | "<>") }},
+ [(Nolabel, operand1); (Nolabel, operand2)]
+ ) ->
+ let (before, inside, after) = partitionByLoc comments operand1.pexp_loc in
+ attach t.leading operand1.pexp_loc before;
+ walkExpr operand1 t inside;
+ let (afterOperand1, rest) =
+ partitionAdjacentTrailing operand1.pexp_loc after in
+ attach t.trailing operand1.pexp_loc afterOperand1;
+ let (before, inside, after) = partitionByLoc rest operand2.pexp_loc in
+ attach t.leading operand2.pexp_loc before;
+ walkExpr operand2 t inside; (* (List.concat [inside; after]); *)
+ attach t.trailing operand2.pexp_loc after;
+ | Pexp_apply (callExpr, arguments) ->
+ let (before, inside, after) = partitionByLoc comments callExpr.pexp_loc in
+ let after = if isBlockExpr callExpr then (
+ let (afterExpr, rest) =
+ partitionAdjacentTrailing callExpr.pexp_loc after in
+ walkExpr callExpr t (List.concat [before; inside; afterExpr]);
+ rest
+ ) else (
+ attach t.leading callExpr.pexp_loc before;
+ walkExpr callExpr t inside;
+ after
+ ) in
+ let (afterExpr, rest) = partitionAdjacentTrailing callExpr.pexp_loc after in
+ attach t.trailing callExpr.pexp_loc afterExpr;
+ walkList
+ ~getLoc:(fun (_argLabel, expr) ->
+ let loc = match expr.Parsetree.pexp_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_attrs ->
+ {loc with loc_end = expr.pexp_loc.loc_end}
+ | _ ->
+ expr.pexp_loc
+ in
+ loc)
+ ~walkNode:walkExprArgument
+ arguments
+ t
+ rest
+ | Pexp_fun (_, _, _, _) | Pexp_newtype _ ->
+ let (_, parameters, returnExpr) = funExpr expr in
+ let comments = visitListButContinueWithRemainingComments
+ ~newlineDelimited:false
+ ~walkNode:walkExprPararameter
+ ~getLoc:(fun (_attrs, _argLbl, exprOpt, pattern) ->
+ let open Parsetree in
+ let startPos = match pattern.ppat_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_attrs ->
+ loc.loc_start
+ | _ ->
+ pattern.ppat_loc.loc_start
+ in
+ match exprOpt with
+ | None -> {pattern.ppat_loc with loc_start = startPos}
+ | Some expr -> {
+ pattern.ppat_loc with
+ loc_start = startPos;
+ loc_end = expr.pexp_loc.loc_end
+ }
+ )
+ parameters
+ t
+ comments
+ in
+ begin match returnExpr.pexp_desc with
+ | Pexp_constraint (expr, typ)
+ when expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum
+ ->
+ let (leading, inside, trailing) = partitionByLoc comments typ.ptyp_loc in
+ attach t.leading typ.ptyp_loc leading;
+ walkTypExpr typ t inside;
+ let (afterTyp, comments) =
+ partitionAdjacentTrailing typ.ptyp_loc trailing in
+ attach t.trailing typ.ptyp_loc afterTyp;
+ if isBlockExpr expr then
+ walkExpr expr t comments
+ else (
+ let (leading, inside, trailing) =
+ partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc trailing
+ )
+ | _ ->
+ if isBlockExpr returnExpr then
+ walkExpr returnExpr t comments
+ else (
+ let (leading, inside, trailing) =
+ partitionByLoc comments returnExpr.pexp_loc in
+ attach t.leading returnExpr.pexp_loc leading;
+ walkExpr returnExpr t inside;
+ attach t.trailing returnExpr.pexp_loc trailing
+ )
+ end
+ | _ -> ()
+
+ and walkExprPararameter (_attrs, _argLbl, exprOpt, pattern) t comments =
+ let (leading, inside, trailing) = partitionByLoc comments pattern.ppat_loc in
+ attach t.leading pattern.ppat_loc leading;
+ walkPattern pattern t inside;
+ begin match exprOpt with
+ | Some expr ->
+ let (_afterPat, rest) =
+ partitionAdjacentTrailing pattern.ppat_loc trailing in
+ attach t.trailing pattern.ppat_loc trailing;
+ if isBlockExpr expr then
+ walkExpr expr t rest
+ else (
+ let (leading, inside, trailing) = partitionByLoc rest expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc trailing
+ )
+ | None ->
+ attach t.trailing pattern.ppat_loc trailing
+ end
+
+ and walkExprArgument (_argLabel, expr) t comments =
+ match expr.Parsetree.pexp_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_attrs ->
+ let (leading, trailing) = partitionLeadingTrailing comments loc in
+ attach t.leading loc leading;
+ let (afterLabel, rest) = partitionAdjacentTrailing loc trailing in
+ attach t.trailing loc afterLabel;
+ let (before, inside, after) = partitionByLoc rest expr.pexp_loc in
+ attach t.leading expr.pexp_loc before;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc after
+ | _ ->
+ let (before, inside, after) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc before;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc after
+
+ and walkCase case t comments =
+ let (before, inside, after) = partitionByLoc comments case.pc_lhs.ppat_loc in
+ (* cases don't have a location on their own, leading comments should go
+ * after the bar on the pattern *)
+ walkPattern case.pc_lhs t (List.concat [before; inside]);
+ let (afterPat, rest) = partitionAdjacentTrailing case.pc_lhs.ppat_loc after in
+ attach t.trailing case.pc_lhs.ppat_loc afterPat;
+ let comments = match case.pc_guard with
+ | Some expr ->
+ let (before, inside, after) = partitionByLoc rest expr.pexp_loc in
+ let (afterExpr, rest) = partitionAdjacentTrailing expr.pexp_loc after in
+ if isBlockExpr expr then (
+ walkExpr expr t (List.concat [before; inside; afterExpr])
+ ) else (
+ attach t.leading expr.pexp_loc before;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc afterExpr;
+ );
+ rest
+ | None -> rest
+ in
+ if isBlockExpr case.pc_rhs then (
+ walkExpr case.pc_rhs t comments
+ ) else (
+ let (before, inside, after) = partitionByLoc comments case.pc_rhs.pexp_loc in
+ attach t.leading case.pc_rhs.pexp_loc before;
+ walkExpr case.pc_rhs t inside;
+ attach t.trailing case.pc_rhs.pexp_loc after
+ )
+
+ and walkExprRecordRow (longident, expr) t comments =
+ let (beforeLongident, afterLongident) =
+ partitionLeadingTrailing comments longident.loc
+ in
+ attach t.leading longident.loc beforeLongident;
+ let (afterLongident, rest) =
+ partitionAdjacentTrailing longident.loc afterLongident in
+ attach t.trailing longident.loc afterLongident;
+ let (leading, inside, trailing) = partitionByLoc rest expr.pexp_loc in
+ attach t.leading expr.pexp_loc leading;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc trailing
+
+ and walkExtConstr extConstr t comments =
+ let (leading, trailing) =
+ partitionLeadingTrailing comments extConstr.pext_name.loc in
+ attach t.leading extConstr.pext_name.loc leading;
+ let (afterName, rest) =
+ partitionAdjacentTrailing extConstr.pext_name.loc trailing in
+ attach t.trailing extConstr.pext_name.loc afterName;
+ walkExtensionConstructorKind extConstr.pext_kind t rest
+
+ and walkExtensionConstructorKind kind t comments =
+ match kind with
+ | Pext_rebind longident ->
+ let (leading, trailing) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc leading;
+ attach t.trailing longident.loc trailing
+ | Pext_decl (constructorArguments, maybeTypExpr) ->
+ let rest = walkConstructorArguments constructorArguments t comments in
+ begin match maybeTypExpr with
+ | None -> ()
+ | Some typexpr ->
+ let (before, inside, after) = partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc before;
+ walkTypExpr typexpr t inside;
+ attach t.trailing typexpr.ptyp_loc after
+ end
+
+ and walkModExpr modExpr t comments =
+ match modExpr.pmod_desc with
+ | Pmod_ident longident ->
+ let (before, after) = partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc before;
+ attach t.trailing longident.loc after
+ | Pmod_structure structure ->
+ walkStructure structure t comments
+ | Pmod_extension extension ->
+ walkExtension extension t comments
+ | Pmod_unpack expr ->
+ let (before, inside, after) = partitionByLoc comments expr.pexp_loc in
+ attach t.leading expr.pexp_loc before;
+ walkExpr expr t inside;
+ attach t.trailing expr.pexp_loc after
+ | Pmod_constraint (modexpr, modtype) ->
+ if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end then (
+ let (before, inside, after) = partitionByLoc comments modexpr.pmod_loc in
+ attach t.leading modexpr.pmod_loc before;
+ walkModExpr modexpr t inside;
+ let (after, rest) = partitionAdjacentTrailing modexpr.pmod_loc after in
+ attach t.trailing modexpr.pmod_loc after;
+ let (before, inside, after) = partitionByLoc rest modtype.pmty_loc in
+ attach t.leading modtype.pmty_loc before;
+ walkModType modtype t inside;
+ attach t.trailing modtype.pmty_loc after
+ ) else (
+ let (before, inside, after) = partitionByLoc comments modtype.pmty_loc in
+ attach t.leading modtype.pmty_loc before;
+ walkModType modtype t inside;
+ let (after, rest) = partitionAdjacentTrailing modtype.pmty_loc after in
+ attach t.trailing modtype.pmty_loc after;
+ let (before, inside, after) = partitionByLoc rest modexpr.pmod_loc in
+ attach t.leading modexpr.pmod_loc before;
+ walkModExpr modexpr t inside;
+ attach t.trailing modexpr.pmod_loc after;
+ )
+ | Pmod_apply (_callModExpr, _argModExpr) ->
+ let modExprs = modExprApply modExpr in
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.pmod_loc)
+ ~walkNode:walkModExpr
+ modExprs
+ t
+ comments
+ | Pmod_functor _ ->
+ let (parameters, returnModExpr) = modExprFunctor modExpr in
+ let comments = visitListButContinueWithRemainingComments
+ ~getLoc:(fun
+ (_, lbl, modTypeOption) -> match modTypeOption with
+ | None -> lbl.Asttypes.loc
+ | Some modType -> {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end}
+ )
+ ~walkNode:walkModExprParameter
+ ~newlineDelimited:false
+ parameters
+ t
+ comments
+ in
+ begin match returnModExpr.pmod_desc with
+ | Pmod_constraint (modExpr, modType)
+ when modType.pmty_loc.loc_end.pos_cnum <= modExpr.pmod_loc.loc_start.pos_cnum ->
+ let (before, inside, after) = partitionByLoc comments modType.pmty_loc in
+ attach t.leading modType.pmty_loc before;
+ walkModType modType t inside;
+ let (after, rest) = partitionAdjacentTrailing modType.pmty_loc after in
+ attach t.trailing modType.pmty_loc after;
+ let (before, inside, after) = partitionByLoc rest modExpr.pmod_loc in
+ attach t.leading modExpr.pmod_loc before;
+ walkModExpr modExpr t inside;
+ attach t.trailing modExpr.pmod_loc after
+ | _ ->
+ let (before, inside, after) = partitionByLoc comments returnModExpr.pmod_loc in
+ attach t.leading returnModExpr.pmod_loc before;
+ walkModExpr returnModExpr t inside;
+ attach t.trailing returnModExpr.pmod_loc after
+ end
+
+ and walkModExprParameter parameter t comments =
+ let (_attrs, lbl, modTypeOption) = parameter in
+ let (leading, trailing) = partitionLeadingTrailing comments lbl.loc in
+ attach t.leading lbl.loc leading;
+ begin match modTypeOption with
+ | None -> attach t.trailing lbl.loc trailing
+ | Some modType ->
+ let (afterLbl, rest) = partitionAdjacentTrailing lbl.loc trailing in
+ attach t.trailing lbl.loc afterLbl;
+ let (before, inside, after) = partitionByLoc rest modType.pmty_loc in
+ attach t.leading modType.pmty_loc before;
+ walkModType modType t inside;
+ attach t.trailing modType.pmty_loc after;
+ end
+
+ and walkModType modType t comments =
+ match modType.pmty_desc with
+ | Pmty_ident longident | Pmty_alias longident ->
+ let (leading, trailing) = partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc leading;
+ attach t.trailing longident.loc trailing;
+ | Pmty_signature signature ->
+ walkSignature signature t comments
+ | Pmty_extension extension ->
+ walkExtension extension t comments
+ | Pmty_typeof modExpr ->
+ let (before, inside, after) = partitionByLoc comments modExpr.pmod_loc in
+ attach t.leading modExpr.pmod_loc before;
+ walkModExpr modExpr t inside;
+ attach t.trailing modExpr.pmod_loc after;
+ | Pmty_with (modType, _withConstraints) ->
+ let (before, inside, after) = partitionByLoc comments modType.pmty_loc in
+ attach t.leading modType.pmty_loc before;
+ walkModType modType t inside;
+ attach t.trailing modType.pmty_loc after
+ (* TODO: withConstraints*)
+ | Pmty_functor _ ->
+ let (parameters, returnModType) = functorType modType in
+ let comments = visitListButContinueWithRemainingComments
+ ~getLoc:(fun
+ (_, lbl, modTypeOption) -> match modTypeOption with
+ | None -> lbl.Asttypes.loc
+ | Some modType ->
+ if lbl.txt = "_" then modType.Parsetree.pmty_loc
+ else {lbl.loc with loc_end = modType.Parsetree.pmty_loc.loc_end}
+ )
+ ~walkNode:walkModTypeParameter
+ ~newlineDelimited:false
+ parameters
+ t
+ comments
+ in
+ let (before, inside, after) = partitionByLoc comments returnModType.pmty_loc in
+ attach t.leading returnModType.pmty_loc before;
+ walkModType returnModType t inside;
+ attach t.trailing returnModType.pmty_loc after
+
+ and walkModTypeParameter (_, lbl, modTypeOption) t comments =
+ let (leading, trailing) = partitionLeadingTrailing comments lbl.loc in
+ attach t.leading lbl.loc leading;
+ begin match modTypeOption with
+ | None -> attach t.trailing lbl.loc trailing
+ | Some modType ->
+ let (afterLbl, rest) = partitionAdjacentTrailing lbl.loc trailing in
+ attach t.trailing lbl.loc afterLbl;
+ let (before, inside, after) = partitionByLoc rest modType.pmty_loc in
+ attach t.leading modType.pmty_loc before;
+ walkModType modType t inside;
+ attach t.trailing modType.pmty_loc after;
+ end
+
+ and walkPattern pat t comments =
+ let open Location in
+ match pat.Parsetree.ppat_desc with
+ | _ when comments = [] -> ()
+ | Ppat_alias (pat, alias) ->
+ let (leading, inside, trailing) = partitionByLoc comments pat.ppat_loc in
+ attach t.leading pat.ppat_loc leading;
+ walkPattern pat t inside;
+ let (afterPat, rest) = partitionAdjacentTrailing pat.ppat_loc trailing in
+ attach t.leading pat.ppat_loc leading;
+ attach t.trailing pat.ppat_loc afterPat;
+ let (beforeAlias, afterAlias) = partitionLeadingTrailing rest alias.loc in
+ attach t.leading alias.loc beforeAlias;
+ attach t.trailing alias.loc afterAlias
+ | Ppat_tuple []
+ | Ppat_array []
+ | Ppat_construct({txt = Longident.Lident "()"}, _)
+ | Ppat_construct({txt = Longident.Lident "[]"}, _) ->
+ attach t.inside pat.ppat_loc comments;
+ | Ppat_array patterns ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.ppat_loc)
+ ~walkNode:walkPattern
+ patterns
+ t
+ comments
+ | Ppat_tuple patterns ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.ppat_loc)
+ ~walkNode:walkPattern
+ patterns
+ t
+ comments
+ | Ppat_construct({txt = Longident.Lident "::"}, _) ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.ppat_loc)
+ ~walkNode:walkPattern
+ (collectListPatterns [] pat)
+ t
+ comments
+ | Ppat_construct (constr, None) ->
+ let (beforeConstr, afterConstr) =
+ partitionLeadingTrailing comments constr.loc
+ in
+ attach t.leading constr.loc beforeConstr;
+ attach t.trailing constr.loc afterConstr
+ | Ppat_construct (constr, Some pat) ->
+ let (leading, trailing) = partitionLeadingTrailing comments constr.loc in
+ attach t.leading constr.loc leading;
+ let (leading, inside, trailing) = partitionByLoc trailing pat.ppat_loc in
+ attach t.leading pat.ppat_loc leading;
+ walkPattern pat t inside;
+ attach t.trailing pat.ppat_loc trailing
+ | Ppat_variant (_label, None) ->
+ ()
+ | Ppat_variant (_label, Some pat) ->
+ walkPattern pat t comments
+ | Ppat_type _ ->
+ ()
+ | Ppat_record (recordRows, _) ->
+ walkList
+ ~getLoc:(fun (
+ (longidentLoc, pattern): (Longident.t Asttypes.loc * Parsetree.pattern)
+ ) -> {
+ longidentLoc.loc with
+ loc_end = pattern.Parsetree.ppat_loc.loc_end
+ })
+ ~walkNode:walkPatternRecordRow
+ recordRows
+ t
+ comments
+ | Ppat_or (pattern1, pattern2) ->
+ let (beforePattern1, insidePattern1, afterPattern1) =
+ partitionByLoc comments pattern1.ppat_loc
+ in
+ attach t.leading pattern1.ppat_loc beforePattern1;
+ walkPattern pattern1 t insidePattern1;
+ let (afterPattern1, rest) =
+ partitionAdjacentTrailing pattern1.ppat_loc afterPattern1
+ in
+ attach t.trailing pattern1.ppat_loc afterPattern1;
+ let (beforePattern2, insidePattern2, afterPattern2) =
+ partitionByLoc rest pattern2.ppat_loc
+ in
+ attach t.leading pattern2.ppat_loc beforePattern2;
+ walkPattern pattern2 t insidePattern2;
+ attach t.trailing pattern2.ppat_loc afterPattern2
+ | Ppat_constraint (pattern, typ) ->
+ let (beforePattern, insidePattern, afterPattern) =
+ partitionByLoc comments pattern.ppat_loc
+ in
+ attach t.leading pattern.ppat_loc beforePattern;
+ walkPattern pattern t insidePattern;
+ let (afterPattern, rest) =
+ partitionAdjacentTrailing pattern.ppat_loc afterPattern
+ in
+ attach t.trailing pattern.ppat_loc afterPattern;
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc rest typ.ptyp_loc
+ in
+ attach t.leading typ.ptyp_loc beforeTyp;
+ walkTypExpr typ t insideTyp;
+ attach t.trailing typ.ptyp_loc afterTyp
+ | Ppat_lazy pattern | Ppat_exception pattern ->
+ let (leading, inside, trailing) = partitionByLoc comments pattern.ppat_loc in
+ attach t.leading pattern.ppat_loc leading;
+ walkPattern pattern t inside;
+ attach t.trailing pattern.ppat_loc trailing
+ | Ppat_unpack stringLoc ->
+ let (leading, trailing) = partitionLeadingTrailing comments stringLoc.loc in
+ attach t.leading stringLoc.loc leading;
+ attach t.trailing stringLoc.loc trailing
+ | Ppat_extension extension ->
+ walkExtension extension t comments
+ | _ -> ()
+
+ (* name: firstName *)
+ and walkPatternRecordRow row t comments =
+ match row with
+ (* punned {x}*)
+ | ({Location.txt=Longident.Lident ident; loc = longidentLoc},
+ {Parsetree.ppat_desc=Ppat_var {txt;_}}) when ident = txt ->
+ let (beforeLbl, afterLbl) =
+ partitionLeadingTrailing comments longidentLoc
+ in
+ attach t.leading longidentLoc beforeLbl;
+ attach t.trailing longidentLoc afterLbl
+ | (longident, pattern) ->
+ let (beforeLbl, afterLbl) =
+ partitionLeadingTrailing comments longident.loc
+ in
+ attach t.leading longident.loc beforeLbl;
+ let (afterLbl, rest) = partitionAdjacentTrailing longident.loc afterLbl in
+ attach t.trailing longident.loc afterLbl;
+ let (leading, inside, trailing) = partitionByLoc rest pattern.ppat_loc in
+ attach t.leading pattern.ppat_loc leading;
+ walkPattern pattern t inside;
+ attach t.trailing pattern.ppat_loc trailing
+
+ and walkTypExpr typ t comments =
+ match typ.Parsetree.ptyp_desc with
+ | _ when comments = [] -> ()
+ | Ptyp_tuple typexprs ->
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.ptyp_loc)
+ ~walkNode:walkTypExpr
+ typexprs
+ t
+ comments
+ | Ptyp_extension extension ->
+ walkExtension extension t comments
+ | Ptyp_package packageType ->
+ walkPackageType packageType t comments
+ | Ptyp_alias (typexpr, _alias) ->
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc comments typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ attach t.trailing typexpr.ptyp_loc afterTyp;
+ | Ptyp_poly (strings, typexpr) ->
+ let comments = visitListButContinueWithRemainingComments
+ ~getLoc:(fun n -> n.Asttypes.loc)
+ ~walkNode:(fun longident t comments ->
+ let (beforeLongident, afterLongident) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc beforeLongident;
+ attach t.trailing longident.loc afterLongident
+ )
+ ~newlineDelimited:false
+ strings
+ t
+ comments
+ in
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc comments typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ attach t.trailing typexpr.ptyp_loc afterTyp
+ | Ptyp_constr (longident, typexprs) ->
+ let (beforeLongident, _afterLongident) =
+ partitionLeadingTrailing comments longident.loc in
+ let (afterLongident, rest) =
+ partitionAdjacentTrailing longident.loc comments in
+ attach t.leading longident.loc beforeLongident;
+ attach t.trailing longident.loc afterLongident;
+ walkList
+ ~getLoc:(fun n -> n.Parsetree.ptyp_loc)
+ ~walkNode:walkTypExpr
+ typexprs
+ t
+ rest
+ | Ptyp_arrow _ ->
+ let (_, parameters, typexpr) = arrowType typ in
+ let comments = walkTypeParameters parameters t comments in
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc comments typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ attach t.trailing typexpr.ptyp_loc afterTyp
+ | Ptyp_object (fields, _) ->
+ walkTypObjectFields fields t comments
+ | _ -> ()
+
+ and walkTypObjectFields fields t comments =
+ walkList
+ ~getLoc:(fun field ->
+ match field with
+ | Parsetree.Otag (lbl, _, typ) ->
+ {lbl.loc with loc_end = typ.ptyp_loc.loc_end}
+ | _ -> Location.none
+ )
+ ~walkNode:walkTypObjectField
+ fields
+ t
+ comments
+
+ and walkTypObjectField field t comments =
+ match field with
+ | Otag (lbl, _, typexpr) ->
+ let (beforeLbl, afterLbl) = partitionLeadingTrailing comments lbl.loc in
+ attach t.leading lbl.loc beforeLbl;
+ let (afterLbl, rest) = partitionAdjacentTrailing lbl.loc afterLbl in
+ attach t.trailing lbl.loc afterLbl;
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ attach t.trailing typexpr.ptyp_loc afterTyp
+ | _ -> ()
+
+ and walkTypeParameters typeParameters t comments =
+ visitListButContinueWithRemainingComments
+ ~getLoc:(fun (_, _, typexpr) -> typexpr.Parsetree.ptyp_loc)
+ ~walkNode:walkTypeParameter
+ ~newlineDelimited:false
+ typeParameters
+ t
+ comments
+
+ and walkTypeParameter (_attrs, _lbl, typexpr) t comments =
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc comments typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ attach t.trailing typexpr.ptyp_loc afterTyp
+
+ and walkPackageType packageType t comments =
+ let (longident, packageConstraints) = packageType in
+ let (beforeLongident, afterLongident) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc beforeLongident;
+ let (afterLongident, rest) =
+ partitionAdjacentTrailing longident.loc afterLongident in
+ attach t.trailing longident.loc afterLongident;
+ walkPackageConstraints packageConstraints t rest
+
+ and walkPackageConstraints packageConstraints t comments =
+ walkList
+ ~getLoc:(fun (longident, typexpr) -> {longident.Asttypes.loc with
+ loc_end = typexpr.Parsetree.ptyp_loc.loc_end
+ })
+ ~walkNode:walkPackageConstraint
+ packageConstraints
+ t
+ comments
+
+ and walkPackageConstraint packageConstraint t comments =
+ let (longident, typexpr) = packageConstraint in
+ let (beforeLongident, afterLongident) =
+ partitionLeadingTrailing comments longident.loc in
+ attach t.leading longident.loc beforeLongident;
+ let (afterLongident, rest) =
+ partitionAdjacentTrailing longident.loc afterLongident in
+ attach t.trailing longident.loc afterLongident;
+ let (beforeTyp, insideTyp, afterTyp) =
+ partitionByLoc rest typexpr.ptyp_loc in
+ attach t.leading typexpr.ptyp_loc beforeTyp;
+ walkTypExpr typexpr t insideTyp;
+ attach t.trailing typexpr.ptyp_loc afterTyp;
+
+ and walkExtension extension t comments =
+ let (id, payload) = extension in
+ let (beforeId, afterId) = partitionLeadingTrailing comments id.loc in
+ attach t.leading id.loc beforeId;
+ let (afterId, rest) = partitionAdjacentTrailing id.loc afterId in
+ attach t.trailing id.loc afterId;
+ walkPayload payload t rest
+
+ and walkAttribute (id, payload) t comments =
+ let (beforeId, afterId) = partitionLeadingTrailing comments id.loc in
+ attach t.leading id.loc beforeId;
+ let (afterId, rest) = partitionAdjacentTrailing id.loc afterId in
+ attach t.trailing id.loc afterId;
+ walkPayload payload t rest
+
+ and walkPayload payload t comments =
+ match payload with
+ | PStr s -> walkStructure s t comments
+ | _ -> ()
+
+end
+
+module Printer = struct
+ let addParens doc =
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ doc
+ ]
+ );
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+
+ let addBraces doc =
+ Doc.group (
+ Doc.concat [
+ Doc.lbrace;
+ doc;
+ Doc.rbrace;
+ ]
+ )
+
+ let getFirstLeadingComment tbl loc =
+ match Hashtbl.find tbl.CommentTable.leading loc with
+ | comment::_ -> Some comment
+ | [] -> None
+ | exception Not_found -> None
+
+ let printMultilineCommentContent txt =
+ (* Turns
+ * |* first line
+ * * second line
+ * * third line *|
+ * Into
+ * |* first line
+ * * second line
+ * * third line *|
+ *
+ * What makes a comment suitable for this kind of indentation?
+ * -> multiple lines + every line starts with a star
+ *)
+ let rec indentStars lines acc =
+ match lines with
+ | [] -> Doc.nil
+ | [lastLine] ->
+ let line = String.trim lastLine in
+ let doc = Doc.text (" " ^ line) in
+ let trailingSpace = if String.length line > 0 then Doc.space else Doc.nil in
+ List.rev (trailingSpace::doc::acc) |> Doc.concat
+ | line::lines ->
+ let line = String.trim line in
+ let len = String.length line in
+ if len > 0 && (String.get [@doesNotRaise]) line 0 == '*' then
+ let doc = Doc.text (" " ^ (String.trim line)) in
+ indentStars lines (Doc.hardLine::doc::acc)
+ else
+ let trailingSpace =
+ let len = String.length txt in
+ if len > 0 && (String.unsafe_get txt (len - 1) = ' ') then
+ Doc.space
+ else Doc.nil
+ in
+ let content = Comment.trimSpaces txt in
+ Doc.concat [Doc.text content; trailingSpace]
+ in
+ let lines = String.split_on_char '\n' txt in
+ match lines with
+ | [] -> Doc.text "/* */"
+ | [line] -> Doc.concat [
+ Doc.text "/* ";
+ Doc.text (Comment.trimSpaces line);
+ Doc.text " */";
+ ]
+ | first::rest ->
+ let firstLine = Comment.trimSpaces first in
+ Doc.concat [
+ Doc.text "/*";
+ if String.length firstLine > 0 && not (String.equal firstLine "*") then
+ Doc.space else Doc.nil;
+ indentStars rest [Doc.hardLine; Doc.text firstLine];
+ Doc.text "*/";
+ ]
+
+ let printTrailingComment (nodeLoc : Location.t) comment =
+ let singleLine = Comment.isSingleLineComment comment in
+ let content =
+ let txt = Comment.txt comment in
+ if singleLine then
+ Doc.text ("// " ^ String.trim txt)
+ else
+ printMultilineCommentContent txt
+ in
+ let diff =
+ let cmtStart = (Comment.loc comment).loc_start in
+ let prevTokEndPos = Comment.prevTokEndPos comment in
+ cmtStart.pos_lnum - prevTokEndPos.pos_lnum
+ in
+ let isBelow =
+ (Comment.loc comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum in
+ if diff > 0 || isBelow then
+ Doc.concat [
+ Doc.breakParent;
+ Doc.lineSuffix(
+ (Doc.concat [Doc.hardLine; if diff > 1 then Doc.hardLine else Doc.nil; content])
+ )
+ ]
+ else if not singleLine then
+ Doc.concat [Doc.space; content]
+ else
+ Doc.lineSuffix (Doc.concat [Doc.space; content])
+
+ let printLeadingComment ?nextComment comment =
+ let singleLine = Comment.isSingleLineComment comment in
+ let content =
+ let txt = Comment.txt comment in
+ if singleLine then
+ Doc.text ("// " ^ String.trim txt)
+ else
+ printMultilineCommentContent txt
+ in
+ let separator = Doc.concat [
+ if singleLine then Doc.concat [
+ Doc.hardLine;
+ Doc.breakParent;
+ ] else Doc.nil;
+ (match nextComment with
+ | Some next ->
+ let nextLoc = Comment.loc next in
+ let currLoc = Comment.loc comment in
+ let diff =
+ nextLoc.Location.loc_start.pos_lnum -
+ currLoc.Location.loc_end.pos_lnum
+ in
+ let nextSingleLine = Comment.isSingleLineComment next in
+ if singleLine && nextSingleLine then
+ if diff > 1 then Doc.hardLine else Doc.nil
+ else if singleLine && not nextSingleLine then
+ if diff > 1 then Doc.hardLine else Doc.nil
+ else
+ if diff > 1 then Doc.concat [Doc.hardLine; Doc.hardLine]
+ else if diff == 1 then Doc.hardLine
+ else
+ Doc.space
+ | None -> Doc.nil)
+ ]
+ in
+ Doc.concat [
+ content;
+ separator;
+ ]
+
+ let printCommentsInside cmtTbl loc =
+ let rec loop acc comments =
+ match comments with
+ | [] -> Doc.nil
+ | [comment] ->
+ let cmtDoc = printLeadingComment comment in
+ let doc = Doc.group (
+ Doc.concat [
+ Doc.concat (List.rev (cmtDoc::acc));
+ ]
+ )
+ in
+ doc
+ | comment::((nextComment::_comments) as rest) ->
+ let cmtDoc = printLeadingComment ~nextComment comment in
+ loop (cmtDoc::acc) rest
+ in
+ match Hashtbl.find cmtTbl.CommentTable.inside loc with
+ | exception Not_found -> Doc.nil
+ | comments ->
+ Hashtbl.remove cmtTbl.inside loc;
+ Doc.group (
+ loop [] comments
+ )
+
+ let printLeadingComments node tbl loc =
+ let rec loop acc comments =
+ match comments with
+ | [] -> node
+ | [comment] ->
+ let cmtDoc = printLeadingComment comment in
+ let diff =
+ loc.Location.loc_start.pos_lnum -
+ (Comment.loc comment).Location.loc_end.pos_lnum
+ in
+ let separator =
+ if Comment.isSingleLineComment comment then
+ if diff > 1 then Doc.hardLine else Doc.nil
+ else if diff == 0 then
+ Doc.space
+ else if diff > 1 then Doc.concat [Doc.hardLine; Doc.hardLine]
+ else
+ Doc.hardLine
+ in
+ let doc = Doc.group (
+ Doc.concat [
+ Doc.concat (List.rev (cmtDoc::acc));
+ separator;
+ node
+ ]
+ )
+ in
+ doc
+ | comment::((nextComment::_comments) as rest) ->
+ let cmtDoc = printLeadingComment ~nextComment comment in
+ loop (cmtDoc::acc) rest
+ in
+ match Hashtbl.find tbl loc with
+ | exception Not_found -> node
+ | comments ->
+ (* Remove comments from tbl: Some ast nodes have the same location.
+ * We only want to print comments once *)
+ Hashtbl.remove tbl loc;
+ loop [] comments
+
+ let printTrailingComments node tbl loc =
+ let rec loop acc comments =
+ match comments with
+ | [] -> Doc.concat (List.rev acc)
+ | comment::comments ->
+ let cmtDoc = printTrailingComment loc comment in
+ loop (cmtDoc::acc) comments
+ in
+ match Hashtbl.find tbl loc with
+ | exception Not_found -> node
+ | [] -> node
+ | (_first::_) as comments ->
+ (* Remove comments from tbl: Some ast nodes have the same location.
+ * We only want to print comments once *)
+ Hashtbl.remove tbl loc;
+ let cmtsDoc = loop [] comments in
+ Doc.concat [
+ node;
+ cmtsDoc;
+ ]
+
+ let printComments doc (tbl: CommentTable.t) loc =
+ let docWithLeadingComments = printLeadingComments doc tbl.leading loc in
+ printTrailingComments docWithLeadingComments tbl.trailing loc
+
+ let printList ~getLoc ~nodes ~print ?(forceBreak=false) t =
+ let rec loop (prevLoc: Location.t) acc nodes =
+ match nodes with
+ | [] -> (prevLoc, Doc.concat (List.rev acc))
+ | node::nodes ->
+ let loc = getLoc node in
+ let startPos = match getFirstLeadingComment t loc with
+ | None -> loc.loc_start
+ | Some comment -> (Comment.loc comment).loc_start
+ in
+ let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 then
+ Doc.concat [Doc.hardLine; Doc.hardLine]
+ else
+ Doc.hardLine
+ in
+ let doc = printComments (print node t) t loc in
+ loop loc (doc::sep::acc) nodes
+ in
+ match nodes with
+ | [] -> Doc.nil
+ | node::nodes ->
+ let firstLoc = getLoc node in
+ let doc = printComments (print node t) t firstLoc in
+ let (lastLoc, docs) = loop firstLoc [doc] nodes in
+ let forceBreak =
+ forceBreak ||
+ firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum
+ in
+ Doc.breakableGroup ~forceBreak docs
+
+ let printListi ~getLoc ~nodes ~print ?(forceBreak=false) t =
+ let rec loop i (prevLoc: Location.t) acc nodes =
+ match nodes with
+ | [] -> (prevLoc, Doc.concat (List.rev acc))
+ | node::nodes ->
+ let loc = getLoc node in
+ let startPos = match getFirstLeadingComment t loc with
+ | None -> loc.loc_start
+ | Some comment -> (Comment.loc comment).loc_start
+ in
+ let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 then
+ Doc.concat [Doc.hardLine; Doc.hardLine]
+ else
+ Doc.line
+ in
+ let doc = printComments (print node t i) t loc in
+ loop (i + 1) loc (doc::sep::acc) nodes
+ in
+ match nodes with
+ | [] -> Doc.nil
+ | node::nodes ->
+ let firstLoc = getLoc node in
+ let doc = printComments (print node t 0) t firstLoc in
+ let (lastLoc, docs) = loop 1 firstLoc [doc] nodes in
+ let forceBreak =
+ forceBreak ||
+ firstLoc.loc_start.pos_lnum != lastLoc.loc_end.pos_lnum
+ in
+ Doc.breakableGroup ~forceBreak docs
+
+ let rec printLongidentAux accu = function
+ | Longident.Lident s -> (Doc.text s) :: accu
+ | Ldot(lid, s) -> printLongidentAux ((Doc.text s) :: accu) lid
+ | Lapply(lid1, lid2) ->
+ let d1 = Doc.join ~sep:Doc.dot (printLongidentAux [] lid1) in
+ let d2 = Doc.join ~sep:Doc.dot (printLongidentAux [] lid2) in
+ (Doc.concat [d1; Doc.lparen; d2; Doc.rparen]) :: accu
+
+ let printLongident = function
+ | Longident.Lident txt -> Doc.text txt
+ | lid -> Doc.join ~sep:Doc.dot (printLongidentAux [] lid)
+
+ type identifierStyle =
+ | ExoticIdent
+ | NormalIdent
+
+ let classifyIdentContent ?(allowUident=false) txt =
+ let len = String.length txt in
+ let rec go i =
+ if i == len then NormalIdent
+ else
+ let c = String.unsafe_get txt i in
+ if i == 0 && not (
+ (allowUident && (c >= 'A' && c <= 'Z')) ||
+ (c >= 'a' && c <= 'z') || c = '_' || (c >= '0' && c <= '9')) then
+ ExoticIdent
+ else if not (
+ (c >= 'a' && c <= 'z')
+ || (c >= 'A' && c <= 'Z')
+ || c = '\''
+ || c = '_'
+ || (c >= '0' && c <= '9'))
+ then
+ ExoticIdent
+ else
+ go (i + 1)
+ in
+ if Token.isKeywordTxt txt && txt <> "list" then
+ ExoticIdent
+ else
+ go 0
+
+ let printIdentLike ?allowUident txt =
+ match classifyIdentContent ?allowUident txt with
+ | ExoticIdent -> Doc.concat [
+ Doc.text "\\\"";
+ Doc.text txt;
+ Doc.text"\""
+ ]
+ | NormalIdent -> Doc.text txt
+
+ let printLident l = match l with
+ | Longident.Lident txt -> printIdentLike txt
+ | Longident.Ldot (path, txt) ->
+ let txts = Longident.flatten path in
+ Doc.concat [
+ Doc.join ~sep:Doc.dot (List.map Doc.text txts);
+ Doc.dot;
+ printIdentLike txt;
+ ]
+ | _ -> Doc.text("printLident: Longident.Lapply is not supported")
+
+ let printLongidentLocation l cmtTbl =
+ let doc = printLongident l.Location.txt in
+ printComments doc cmtTbl l.loc
+
+ (* Module.SubModule.x *)
+ let printLidentPath path cmtTbl =
+ let doc = printLident path.Location.txt in
+ printComments doc cmtTbl path.loc
+
+ (* Module.SubModule.x or Module.SubModule.X *)
+ let printIdentPath path cmtTbl =
+ let doc = printLident path.Location.txt in
+ printComments doc cmtTbl path.loc
+
+ let printStringLoc sloc cmtTbl =
+ let doc = printIdentLike sloc.Location.txt in
+ printComments doc cmtTbl sloc.loc
+
+ let printConstant c = match c with
+ | Parsetree.Pconst_integer (s, suffix) ->
+ begin match suffix with
+ | Some c -> Doc.text (s ^ (Char.escaped c))
+ | None -> Doc.text s
+ end
+ | Pconst_string (txt, None) ->
+ Doc.text ("\"" ^ txt ^ "\"")
+ | Pconst_string (txt, Some prefix) ->
+ Doc.concat [
+ if prefix = "" then Doc.nil else Doc.text prefix;
+ Doc.text ("`" ^ txt ^ "`")
+ ]
+ | Pconst_float (s, _) -> Doc.text s
+ | Pconst_char c -> Doc.text ("'" ^ (Char.escaped c) ^ "'")
+
+ let rec printStructure (s : Parsetree.structure) t =
+ match s with
+ | [] -> printCommentsInside t Location.none
+ | structure ->
+ printList
+ ~getLoc:(fun s -> s.Parsetree.pstr_loc)
+ ~nodes:structure
+ ~print:printStructureItem
+ t
+
+ and printStructureItem (si: Parsetree.structure_item) cmtTbl =
+ match si.pstr_desc with
+ | Pstr_value(rec_flag, valueBindings) ->
+ let recFlag = match rec_flag with
+ | Asttypes.Nonrecursive -> Doc.nil
+ | Asttypes.Recursive -> Doc.text "rec "
+ in
+ printValueBindings ~recFlag valueBindings cmtTbl
+ | Pstr_type(recFlag, typeDeclarations) ->
+ let recFlag = match recFlag with
+ | Asttypes.Nonrecursive -> Doc.nil
+ | Asttypes.Recursive -> Doc.text "rec "
+ in
+ printTypeDeclarations ~recFlag typeDeclarations cmtTbl
+ | Pstr_primitive valueDescription ->
+ printValueDescription valueDescription cmtTbl
+ | Pstr_eval (expr, attrs) ->
+ let exprDoc =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.structureExpr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ printAttributes attrs;
+ exprDoc;
+ ]
+ | Pstr_attribute attr -> Doc.concat [
+ Doc.text "@";
+ printAttributeWithComments attr cmtTbl
+ ]
+ | Pstr_extension (extension, attrs) -> Doc.concat [
+ printAttributes attrs;
+ Doc.concat [printExtensionWithComments ~atModuleLvl:true extension cmtTbl];
+ ]
+ | Pstr_include includeDeclaration ->
+ printIncludeDeclaration includeDeclaration cmtTbl
+ | Pstr_open openDescription ->
+ printOpenDescription openDescription cmtTbl
+ | Pstr_modtype modTypeDecl ->
+ printModuleTypeDeclaration modTypeDecl cmtTbl
+ | Pstr_module moduleBinding ->
+ printModuleBinding ~isRec:false moduleBinding cmtTbl 0
+ | Pstr_recmodule moduleBindings ->
+ printListi
+ ~getLoc:(fun mb -> mb.Parsetree.pmb_loc)
+ ~nodes:moduleBindings
+ ~print:(printModuleBinding ~isRec:true)
+ cmtTbl
+ | Pstr_exception extensionConstructor ->
+ printExceptionDef extensionConstructor cmtTbl
+ | Pstr_typext typeExtension ->
+ printTypeExtension typeExtension cmtTbl
+ | Pstr_class _ | Pstr_class_type _ -> Doc.nil
+
+ and printTypeExtension (te : Parsetree.type_extension) cmtTbl =
+ let prefix = Doc.text "type " in
+ let name = printLidentPath te.ptyext_path cmtTbl in
+ let typeParams = printTypeParams te.ptyext_params cmtTbl in
+ let extensionConstructors =
+ let ecs = te.ptyext_constructors in
+ let forceBreak =
+ match (ecs, List.rev ecs) with
+ | (first::_, last::_) ->
+ first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum ||
+ first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum
+ | _ -> false
+ in
+ let privateFlag = match te.ptyext_private with
+ | Asttypes.Private -> Doc.concat [
+ Doc.text "private";
+ Doc.line;
+ ]
+ | Public -> Doc.nil
+ in
+ let rows =
+ printListi
+ ~getLoc:(fun n -> n.Parsetree.pext_loc)
+ ~print:printExtensionConstructor
+ ~nodes: ecs
+ ~forceBreak
+ cmtTbl
+ in
+ Doc.breakableGroup ~forceBreak (
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ privateFlag;
+ rows;
+ (* Doc.join ~sep:Doc.line ( *)
+ (* List.mapi printExtensionConstructor ecs *)
+ (* ) *)
+ ]
+ )
+ )
+ in
+ Doc.group (
+ Doc.concat [
+ printAttributes ~loc: te.ptyext_path.loc te.ptyext_attributes;
+ prefix;
+ name;
+ typeParams;
+ Doc.text " +=";
+ extensionConstructors;
+ ]
+ )
+
+ and printModuleBinding ~isRec moduleBinding cmtTbl i =
+ let prefix = if i = 0 then
+ Doc.concat [
+ Doc.text "module ";
+ if isRec then Doc.text "rec " else Doc.nil;
+ ]
+ else
+ Doc.text "and "
+ in
+ let (modExprDoc, modConstraintDoc) =
+ match moduleBinding.pmb_expr with
+ | {pmod_desc = Pmod_constraint (modExpr, modType)} ->
+ (
+ printModExpr modExpr cmtTbl,
+ Doc.concat [
+ Doc.text ": ";
+ printModType modType cmtTbl
+ ]
+ )
+ | modExpr ->
+ (printModExpr modExpr cmtTbl, Doc.nil)
+ in
+ let modName =
+ let doc = Doc.text moduleBinding.pmb_name.Location.txt in
+ printComments doc cmtTbl moduleBinding.pmb_name.loc
+ in
+ let doc = Doc.concat [
+ printAttributes ~loc:moduleBinding.pmb_name.loc moduleBinding.pmb_attributes;
+ prefix;
+ modName;
+ modConstraintDoc;
+ Doc.text " = ";
+ modExprDoc;
+ ] in
+ printComments doc cmtTbl moduleBinding.pmb_loc
+
+ and printModuleTypeDeclaration (modTypeDecl : Parsetree.module_type_declaration) cmtTbl =
+ let modName =
+ let doc = Doc.text modTypeDecl.pmtd_name.txt in
+ printComments doc cmtTbl modTypeDecl.pmtd_name.loc
+ in
+ Doc.concat [
+ printAttributes modTypeDecl.pmtd_attributes;
+ Doc.text "module type ";
+ modName;
+ (match modTypeDecl.pmtd_type with
+ | None -> Doc.nil
+ | Some modType -> Doc.concat [
+ Doc.text " = ";
+ printModType modType cmtTbl;
+ ]);
+ ]
+
+ and printModType modType cmtTbl =
+ let modTypeDoc = match modType.pmty_desc with
+ | Parsetree.Pmty_ident longident ->
+ Doc.concat [
+ printAttributes ~loc:longident.loc modType.pmty_attributes;
+ printLongidentLocation longident cmtTbl
+ ]
+ | Pmty_signature signature ->
+ let signatureDoc = Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printSignature signature cmtTbl;
+ ]
+ );
+ Doc.line;
+ Doc.rbrace;
+ ]
+ ) in
+ Doc.concat [
+ printAttributes modType.pmty_attributes;
+ signatureDoc
+ ]
+ | Pmty_functor _ ->
+ let (parameters, returnType) = ParsetreeViewer.functorType modType in
+ let parametersDoc = match parameters with
+ | [] -> Doc.nil
+ | [attrs, {Location.txt = "_"; loc}, Some modType] ->
+ let cmtLoc =
+ {loc with loc_end = modType.Parsetree.pmty_loc.loc_end}
+ in
+ let attrs = match attrs with
+ | [] -> Doc.nil
+ | attrs -> Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs);
+ Doc.line;
+ ] in
+ let doc = Doc.concat [
+ attrs;
+ printModType modType cmtTbl
+ ] in
+ printComments doc cmtTbl cmtLoc
+ | params ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun (attrs, lbl, modType) ->
+ let cmtLoc = match modType with
+ | None -> lbl.Asttypes.loc
+ | Some modType ->
+ {lbl.Asttypes.loc with loc_end = modType.Parsetree.pmty_loc.loc_end}
+ in
+ let attrs = match attrs with
+ | [] -> Doc.nil
+ | attrs -> Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs);
+ Doc.line;
+ ] in
+ let lblDoc = if lbl.Location.txt = "_" then Doc.nil
+ else
+ let doc = Doc.text lbl.txt in
+ printComments doc cmtTbl lbl.loc
+ in
+ let doc = Doc.concat [
+ attrs;
+ lblDoc;
+ (match modType with
+ | None -> Doc.nil
+ | Some modType -> Doc.concat [
+ if lbl.txt = "_" then Doc.nil else Doc.text ": ";
+ printModType modType cmtTbl;
+ ]);
+ ] in
+ printComments doc cmtTbl cmtLoc
+ ) params
+ );
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ in
+ let returnDoc =
+ let doc = printModType returnType cmtTbl in
+ if Parens.modTypeFunctorReturn returnType then addParens doc else doc
+ in
+ Doc.group (
+ Doc.concat [
+ parametersDoc;
+ Doc.group (
+ Doc.concat [
+ Doc.text " =>";
+ Doc.line;
+ returnDoc;
+ ]
+ )
+ ]
+ )
+ | Pmty_typeof modExpr -> Doc.concat [
+ Doc.text "module type of ";
+ printModExpr modExpr cmtTbl
+ ]
+ | Pmty_extension extension -> printExtensionWithComments ~atModuleLvl:false extension cmtTbl
+ | Pmty_alias longident -> Doc.concat [
+ Doc.text "module ";
+ printLongidentLocation longident cmtTbl;
+ ]
+ | Pmty_with (modType, withConstraints) ->
+ let operand =
+ let doc = printModType modType cmtTbl in
+ if Parens.modTypeWithOperand modType then addParens doc else doc
+ in
+ Doc.group (
+ Doc.concat [
+ operand;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printWithConstraints withConstraints cmtTbl;
+ ]
+ )
+ ]
+ )
+ in
+ let attrsAlreadyPrinted = match modType.pmty_desc with
+ | Pmty_functor _ | Pmty_signature _ | Pmty_ident _ -> true
+ | _ -> false
+ in
+ let doc =Doc.concat [
+ if attrsAlreadyPrinted then Doc.nil else printAttributes modType.pmty_attributes;
+ modTypeDoc;
+ ] in
+ printComments doc cmtTbl modType.pmty_loc
+
+ and printWithConstraints withConstraints cmtTbl =
+ let rows = List.mapi (fun i withConstraint ->
+ Doc.group (
+ Doc.concat [
+ if i == 0 then Doc.text "with " else Doc.text "and ";
+ printWithConstraint withConstraint cmtTbl;
+ ]
+ )
+ ) withConstraints
+ in
+ Doc.join ~sep:Doc.line rows
+
+ and printWithConstraint (withConstraint : Parsetree.with_constraint) cmtTbl =
+ match withConstraint with
+ (* with type X.t = ... *)
+ | Pwith_type (longident, typeDeclaration) ->
+ Doc.group (printTypeDeclaration
+ ~name:(printLidentPath longident cmtTbl)
+ ~equalSign:"="
+ ~recFlag:Doc.nil
+ 0
+ typeDeclaration
+ CommentTable.empty)
+ (* with module X.Y = Z *)
+ | Pwith_module ({txt = longident1}, {txt = longident2}) ->
+ Doc.concat [
+ Doc.text "module ";
+ printLongident longident1;
+ Doc.text " =";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printLongident longident2;
+ ]
+ )
+ ]
+ (* with type X.t := ..., same format as [Pwith_type] *)
+ | Pwith_typesubst (longident, typeDeclaration) ->
+ Doc.group(printTypeDeclaration
+ ~name:(printLidentPath longident cmtTbl)
+ ~equalSign:":="
+ ~recFlag:Doc.nil
+ 0
+ typeDeclaration
+ CommentTable.empty)
+ | Pwith_modsubst ({txt = longident1}, {txt = longident2}) ->
+ Doc.concat [
+ Doc.text "module ";
+ printLongident longident1;
+ Doc.text " :=";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printLongident longident2;
+ ]
+ )
+ ]
+
+ and printSignature signature cmtTbl =
+ match signature with
+ | [] -> printCommentsInside cmtTbl Location.none
+ | signature ->
+ printList
+ ~getLoc:(fun s -> s.Parsetree.psig_loc)
+ ~nodes:signature
+ ~print:printSignatureItem
+ cmtTbl
+
+ and printSignatureItem (si : Parsetree.signature_item) cmtTbl =
+ match si.psig_desc with
+ | Parsetree.Psig_value valueDescription ->
+ printValueDescription valueDescription cmtTbl
+ | Psig_type (recFlag, typeDeclarations) ->
+ let recFlag = match recFlag with
+ | Asttypes.Nonrecursive -> Doc.nil
+ | Asttypes.Recursive -> Doc.text "rec "
+ in
+ printTypeDeclarations ~recFlag typeDeclarations cmtTbl
+ | Psig_typext typeExtension ->
+ printTypeExtension typeExtension cmtTbl
+ | Psig_exception extensionConstructor ->
+ printExceptionDef extensionConstructor cmtTbl
+ | Psig_module moduleDeclaration ->
+ printModuleDeclaration moduleDeclaration cmtTbl
+ | Psig_recmodule moduleDeclarations ->
+ printRecModuleDeclarations moduleDeclarations cmtTbl
+ | Psig_modtype modTypeDecl ->
+ printModuleTypeDeclaration modTypeDecl cmtTbl
+ | Psig_open openDescription ->
+ printOpenDescription openDescription cmtTbl
+ | Psig_include includeDescription ->
+ printIncludeDescription includeDescription cmtTbl
+ | Psig_attribute attr -> Doc.concat [
+ Doc.text "@";
+ printAttributeWithComments attr cmtTbl
+ ]
+ | Psig_extension (extension, attrs) -> Doc.concat [
+ printAttributes attrs;
+ Doc.concat [printExtensionWithComments ~atModuleLvl:true extension cmtTbl];
+ ]
+ | Psig_class _ | Psig_class_type _ -> Doc.nil
+
+ and printRecModuleDeclarations moduleDeclarations cmtTbl =
+ printListi
+ ~getLoc:(fun n -> n.Parsetree.pmd_loc)
+ ~nodes:moduleDeclarations
+ ~print:printRecModuleDeclaration
+ cmtTbl
+
+ and printRecModuleDeclaration md cmtTbl i =
+ let body = match md.pmd_type.pmty_desc with
+ | Parsetree.Pmty_alias longident ->
+ Doc.concat [Doc.text " = "; printLongidentLocation longident cmtTbl]
+ | _ ->
+ let needsParens = match md.pmd_type.pmty_desc with
+ | Pmty_with _ -> true
+ | _ -> false
+ in
+ let modTypeDoc =
+ let doc = printModType md.pmd_type cmtTbl in
+ if needsParens then addParens doc else doc
+ in
+ Doc.concat [Doc.text ": "; modTypeDoc]
+ in
+ let prefix = if i < 1 then "module rec " else "and " in
+ Doc.concat [
+ printAttributes ~loc:md.pmd_name.loc md.pmd_attributes;
+ Doc.text prefix;
+ printComments (Doc.text md.pmd_name.txt) cmtTbl md.pmd_name.loc;
+ body
+ ]
+
+ and printModuleDeclaration (md: Parsetree.module_declaration) cmtTbl =
+ let body = match md.pmd_type.pmty_desc with
+ | Parsetree.Pmty_alias longident ->
+ Doc.concat [Doc.text " = "; printLongidentLocation longident cmtTbl]
+ | _ -> Doc.concat [Doc.text ": "; printModType md.pmd_type cmtTbl]
+ in
+ Doc.concat [
+ printAttributes ~loc:md.pmd_name.loc md.pmd_attributes;
+ Doc.text "module ";
+ printComments (Doc.text md.pmd_name.txt) cmtTbl md.pmd_name.loc;
+ body
+ ]
+
+ and printOpenDescription (openDescription : Parsetree.open_description) p =
+ Doc.concat [
+ printAttributes openDescription.popen_attributes;
+ Doc.text "open";
+ (match openDescription.popen_override with
+ | Asttypes.Fresh -> Doc.space
+ | Asttypes.Override -> Doc.text "! ");
+ printLongidentLocation openDescription.popen_lid p
+ ]
+
+ and printIncludeDescription (includeDescription: Parsetree.include_description) cmtTbl =
+ Doc.concat [
+ printAttributes includeDescription.pincl_attributes;
+ Doc.text "include ";
+ printModType includeDescription.pincl_mod cmtTbl;
+ ]
+
+ and printIncludeDeclaration (includeDeclaration : Parsetree.include_declaration) cmtTbl =
+ let isJsFfiImport = List.exists (fun attr ->
+ match attr with
+ | ({Location.txt = "ns.jsFfi"}, _) -> true
+ | _ -> false
+ ) includeDeclaration.pincl_attributes
+ in
+ if isJsFfiImport then
+ printJsFfiImportDeclaration includeDeclaration cmtTbl
+ else
+ Doc.concat [
+ printAttributes includeDeclaration.pincl_attributes;
+ Doc.text "include ";
+ let includeDoc =
+ printModExpr includeDeclaration.pincl_mod cmtTbl
+ in
+ if Parens.includeModExpr includeDeclaration.pincl_mod then
+ addParens includeDoc
+ else includeDoc;
+ ]
+
+ and printJsFfiImport (valueDescription: Parsetree.value_description) cmtTbl =
+ let attrs = List.filter (fun attr ->
+ match attr with
+ | ({Location.txt = "bs.val" | "genType.import" | "bs.scope" }, _) -> false
+ | _ -> true
+ ) valueDescription.pval_attributes in
+ let (ident, alias) = match valueDescription.pval_prim with
+ | primitive::_ ->
+ if primitive <> valueDescription.pval_name.txt then
+ (
+ printIdentLike primitive,
+ Doc.concat [
+ Doc.text " as ";
+ printIdentLike valueDescription.pval_name.txt;
+ ]
+ )
+ else
+ (printIdentLike primitive, Doc.nil)
+ | _ ->
+ (printIdentLike valueDescription.pval_name.txt, Doc.nil)
+ in
+ Doc.concat [
+ printAttributes ~loc:valueDescription.pval_name.loc attrs;
+ ident;
+ alias;
+ Doc.text ": ";
+ printTypExpr valueDescription.pval_type cmtTbl;
+ ]
+
+ and printJsFfiImportScope (scope: ParsetreeViewer.jsImportScope) =
+ match scope with
+ | JsGlobalImport -> Doc.nil
+ | JsModuleImport modName ->
+ Doc.concat [
+ Doc.text " from ";
+ Doc.doubleQuote;
+ Doc.text modName;
+ Doc.doubleQuote;
+ ]
+ | JsScopedImport idents ->
+ Doc.concat [
+ Doc.text " from ";
+ Doc.join ~sep:Doc.dot (List.map Doc.text idents)
+ ]
+
+ and printJsFfiImportDeclaration (includeDeclaration: Parsetree.include_declaration) cmtTbl =
+ let attrs = List.filter (fun attr ->
+ match attr with
+ | ({Location.txt = "ns.jsFfi"}, _) -> false
+ | _ -> true
+ ) includeDeclaration.pincl_attributes
+ in
+ let imports = ParsetreeViewer.extractValueDescriptionFromModExpr includeDeclaration.pincl_mod in
+ let scope = match imports with
+ | vd::_ -> ParsetreeViewer.classifyJsImport vd
+ | [] -> ParsetreeViewer.JsGlobalImport
+ in
+ let scopeDoc = printJsFfiImportScope scope in
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ Doc.text "import ";
+ Doc.group (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun vd -> printJsFfiImport vd cmtTbl) imports
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ ]
+ );
+ scopeDoc;
+ ]
+ )
+
+ and printValueBindings ~recFlag (vbs: Parsetree.value_binding list) cmtTbl =
+ printListi
+ ~getLoc:(fun vb -> vb.Parsetree.pvb_loc)
+ ~nodes:vbs
+ ~print:(printValueBinding ~recFlag)
+ cmtTbl
+
+ and printValueDescription valueDescription cmtTbl =
+ let isExternal =
+ match valueDescription.pval_prim with | [] -> false | _ -> true
+ in
+ Doc.group (
+ Doc.concat [
+ printAttributes valueDescription.pval_attributes;
+ Doc.text (if isExternal then "external " else "let ");
+ printComments
+ (printIdentLike valueDescription.pval_name.txt)
+ cmtTbl
+ valueDescription.pval_name.loc;
+ Doc.text ": ";
+ printTypExpr valueDescription.pval_type cmtTbl;
+ if isExternal then
+ Doc.group (
+ Doc.concat [
+ Doc.text " =";
+ Doc.indent(
+ Doc.concat [
+ Doc.line;
+ Doc.join ~sep:Doc.line (
+ List.map(fun s -> Doc.concat [
+ Doc.text "\"";
+ Doc.text s;
+ Doc.text "\"";
+ ])
+ valueDescription.pval_prim
+ );
+ ]
+ )
+ ]
+ )
+ else Doc.nil
+ ]
+ )
+
+ and printTypeDeclarations ~recFlag typeDeclarations cmtTbl =
+ printListi
+ ~getLoc:(fun n -> n.Parsetree.ptype_loc)
+ ~nodes:typeDeclarations
+ ~print:(printTypeDeclaration2 ~recFlag)
+ cmtTbl
+
+ (*
+ * type_declaration = {
+ * ptype_name: string loc;
+ * ptype_params: (core_type * variance) list;
+ * (* ('a1,...'an) t; None represents _*)
+ * ptype_cstrs: (core_type * core_type * Location.t) list;
+ * (* ... constraint T1=T1' ... constraint Tn=Tn' *)
+ * ptype_kind: type_kind;
+ * ptype_private: private_flag; (* = private ... *)
+ * ptype_manifest: core_type option; (* = T *)
+ * ptype_attributes: attributes; (* ... [@@id1] [@@id2] *)
+ * ptype_loc: Location.t;
+ * }
+ *
+ *
+ * type t (abstract, no manifest)
+ * type t = T0 (abstract, manifest=T0)
+ * type t = C of T | ... (variant, no manifest)
+ * type t = T0 = C of T | ... (variant, manifest=T0)
+ * type t = {l: T; ...} (record, no manifest)
+ * type t = T0 = {l : T; ...} (record, manifest=T0)
+ * type t = .. (open, no manifest)
+ *
+ *
+ * and type_kind =
+ * | Ptype_abstract
+ * | Ptype_variant of constructor_declaration list
+ * (* Invariant: non-empty list *)
+ * | Ptype_record of label_declaration list
+ * (* Invariant: non-empty list *)
+ * | Ptype_open
+ *)
+ and printTypeDeclaration ~name ~equalSign ~recFlag i (td: Parsetree.type_declaration) cmtTbl =
+ let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr td.ptype_attributes in
+ let attrs = printAttributes ~loc:td.ptype_loc attrs in
+ let prefix = if i > 0 then
+ Doc.concat [
+ Doc.text "and ";
+ if hasGenType then Doc.text "export " else Doc.nil
+ ]
+ else
+ Doc.concat [
+ Doc.text (if hasGenType then "export type " else "type ");
+ recFlag
+ ]
+ in
+ let typeName = name in
+ let typeParams = printTypeParams td.ptype_params cmtTbl in
+ let manifestAndKind = match td.ptype_kind with
+ | Ptype_abstract ->
+ begin match td.ptype_manifest with
+ | None -> Doc.nil
+ | Some(typ) ->
+ Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printPrivateFlag td.ptype_private;
+ printTypExpr typ cmtTbl;
+ ]
+ end
+ | Ptype_open -> Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printPrivateFlag td.ptype_private;
+ Doc.text "..";
+ ]
+ | Ptype_record(lds) ->
+ let manifest = match td.ptype_manifest with
+ | None -> Doc.nil
+ | Some(typ) -> Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printTypExpr typ cmtTbl;
+ ]
+ in
+ Doc.concat [
+ manifest;
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printPrivateFlag td.ptype_private;
+ printRecordDeclaration lds cmtTbl;
+ ]
+ | Ptype_variant(cds) ->
+ let manifest = match td.ptype_manifest with
+ | None -> Doc.nil
+ | Some(typ) -> Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printTypExpr typ cmtTbl;
+ ]
+ in
+ Doc.concat [
+ manifest;
+ Doc.concat [Doc.space; Doc.text equalSign];
+ printConstructorDeclarations ~privateFlag:td.ptype_private cds cmtTbl;
+ ]
+ in
+ let constraints = printTypeDefinitionConstraints td.ptype_cstrs in
+ Doc.group (
+ Doc.concat [
+ attrs;
+ prefix;
+ typeName;
+ typeParams;
+ manifestAndKind;
+ constraints;
+ ]
+ )
+
+ and printTypeDeclaration2 ~recFlag (td: Parsetree.type_declaration) cmtTbl i =
+ let name =
+ let doc = printIdentLike td.Parsetree.ptype_name.txt in
+ printComments doc cmtTbl td.ptype_name.loc
+ in
+ let equalSign = "=" in
+ let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr td.ptype_attributes in
+ let attrs = printAttributes ~loc:td.ptype_loc attrs in
+ let prefix = if i > 0 then
+ Doc.concat [
+ Doc.text "and ";
+ if hasGenType then Doc.text "export " else Doc.nil
+ ]
+ else
+ Doc.concat [
+ Doc.text (if hasGenType then "export type " else "type ");
+ recFlag
+ ]
+ in
+ let typeName = name in
+ let typeParams = printTypeParams td.ptype_params cmtTbl in
+ let manifestAndKind = match td.ptype_kind with
+ | Ptype_abstract ->
+ begin match td.ptype_manifest with
+ | None -> Doc.nil
+ | Some(typ) ->
+ Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printPrivateFlag td.ptype_private;
+ printTypExpr typ cmtTbl;
+ ]
+ end
+ | Ptype_open -> Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printPrivateFlag td.ptype_private;
+ Doc.text "..";
+ ]
+ | Ptype_record(lds) ->
+ let manifest = match td.ptype_manifest with
+ | None -> Doc.nil
+ | Some(typ) -> Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printTypExpr typ cmtTbl;
+ ]
+ in
+ Doc.concat [
+ manifest;
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printPrivateFlag td.ptype_private;
+ printRecordDeclaration lds cmtTbl;
+ ]
+ | Ptype_variant(cds) ->
+ let manifest = match td.ptype_manifest with
+ | None -> Doc.nil
+ | Some(typ) -> Doc.concat [
+ Doc.concat [Doc.space; Doc.text equalSign; Doc.space];
+ printTypExpr typ cmtTbl;
+ ]
+ in
+ Doc.concat [
+ manifest;
+ Doc.concat [Doc.space; Doc.text equalSign];
+ printConstructorDeclarations ~privateFlag:td.ptype_private cds cmtTbl;
+ ]
+ in
+ let constraints = printTypeDefinitionConstraints td.ptype_cstrs in
+ Doc.group (
+ Doc.concat [
+ attrs;
+ prefix;
+ typeName;
+ typeParams;
+ manifestAndKind;
+ constraints;
+ ]
+ )
+
+ and printTypeDefinitionConstraints cstrs =
+ match cstrs with
+ | [] -> Doc.nil
+ | cstrs -> Doc.indent (
+ Doc.group (
+ Doc.concat [
+ Doc.line;
+ Doc.group(
+ Doc.join ~sep:Doc.line (
+ List.map printTypeDefinitionConstraint cstrs
+ )
+ )
+ ]
+ )
+ )
+
+ and printTypeDefinitionConstraint ((typ1, typ2, _loc ): Parsetree.core_type * Parsetree.core_type * Location.t) =
+ Doc.concat [
+ Doc.text "constraint ";
+ printTypExpr typ1 CommentTable.empty;
+ Doc.text " = ";
+ printTypExpr typ2 CommentTable.empty;
+ ]
+
+ and printPrivateFlag (flag : Asttypes.private_flag) = match flag with
+ | Private -> Doc.text "private "
+ | Public -> Doc.nil
+
+ and printTypeParams typeParams cmtTbl =
+ match typeParams with
+ | [] -> Doc.nil
+ | typeParams ->
+ Doc.group (
+ Doc.concat [
+ Doc.lessThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun typeParam ->
+ let doc = printTypeParam typeParam cmtTbl in
+ printComments doc cmtTbl (fst typeParam).Parsetree.ptyp_loc
+ ) typeParams
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.greaterThan;
+ ]
+ )
+
+ and printTypeParam (param : (Parsetree.core_type * Asttypes.variance)) cmtTbl =
+ let (typ, variance) = param in
+ let printedVariance = match variance with
+ | Covariant -> Doc.text "+"
+ | Contravariant -> Doc.text "-"
+ | Invariant -> Doc.nil
+ in
+ Doc.concat [
+ printedVariance;
+ printTypExpr typ cmtTbl
+ ]
+
+ and printRecordDeclaration (lds: Parsetree.label_declaration list) cmtTbl =
+ let forceBreak = match (lds, List.rev lds) with
+ | (first::_, last::_) ->
+ first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum
+ | _ -> false
+ in
+ Doc.breakableGroup ~forceBreak (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line])
+ (List.map (fun ld ->
+ let doc = printLabelDeclaration ld cmtTbl in
+ printComments doc cmtTbl ld.Parsetree.pld_loc
+ ) lds)
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ ]
+ )
+
+ and printConstructorDeclarations
+ ~privateFlag (cds: Parsetree.constructor_declaration list) cmtTbl
+ =
+ let forceBreak = match (cds, List.rev cds) with
+ | (first::_, last::_) ->
+ first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum
+ | _ -> false
+ in
+ let privateFlag = match privateFlag with
+ | Asttypes.Private -> Doc.concat [
+ Doc.text "private";
+ Doc.line;
+ ]
+ | Public -> Doc.nil
+ in
+ let rows =
+ printListi
+ ~getLoc:(fun cd -> cd.Parsetree.pcd_loc)
+ ~nodes:cds
+ ~print:(fun cd cmtTbl i ->
+ let doc = printConstructorDeclaration2 i cd cmtTbl in
+ printComments doc cmtTbl cd.Parsetree.pcd_loc
+ )
+ ~forceBreak
+ cmtTbl
+ in
+ Doc.breakableGroup ~forceBreak (
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ privateFlag;
+ rows;
+ ]
+ )
+ )
+
+ and printConstructorDeclaration2 i (cd : Parsetree.constructor_declaration) cmtTbl =
+ let attrs = printAttributes cd.pcd_attributes in
+ let bar = if i > 0 then Doc.text "| "
+ else Doc.ifBreaks (Doc.text "| ") Doc.nil
+ in
+ let constrName =
+ let doc = Doc.text cd.pcd_name.txt in
+ printComments doc cmtTbl cd.pcd_name.loc
+ in
+ let constrArgs = printConstructorArguments ~indent:true cd.pcd_args cmtTbl in
+ let gadt = match cd.pcd_res with
+ | None -> Doc.nil
+ | Some(typ) -> Doc.indent (
+ Doc.concat [
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ]
+ )
+ in
+ Doc.concat [
+ bar;
+ Doc.group (
+ Doc.concat [
+ attrs; (* TODO: fix parsing of attributes, so when can print them above the bar? *)
+ constrName;
+ constrArgs;
+ gadt;
+ ]
+ )
+ ]
+
+ and printConstructorArguments ~indent (cdArgs : Parsetree.constructor_arguments) cmtTbl =
+ match cdArgs with
+ | Pcstr_tuple [] -> Doc.nil
+ | Pcstr_tuple types ->
+ let args = Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun typexpr ->
+ printTypExpr typexpr cmtTbl
+ ) types
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ] in
+ Doc.group (
+ if indent then Doc.indent args else args
+ )
+ | Pcstr_record lds ->
+ let args = Doc.concat [
+ Doc.lparen;
+ (* manually inline the printRecordDeclaration, gives better layout *)
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line])
+ (List.map (fun ld ->
+ let doc = printLabelDeclaration ld cmtTbl in
+ printComments doc cmtTbl ld.Parsetree.pld_loc
+ ) lds)
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ Doc.rparen;
+ ] in
+ if indent then Doc.indent args else args
+
+ and printLabelDeclaration (ld : Parsetree.label_declaration) cmtTbl =
+ let attrs = printAttributes ~loc:ld.pld_name.loc ld.pld_attributes in
+ let mutableFlag = match ld.pld_mutable with
+ | Mutable -> Doc.text "mutable "
+ | Immutable -> Doc.nil
+ in
+ let name =
+ let doc = printIdentLike ld.pld_name.txt in
+ printComments doc cmtTbl ld.pld_name.loc
+ in
+ Doc.group (
+ Doc.concat [
+ attrs;
+ mutableFlag;
+ name;
+ Doc.text ": ";
+ printTypExpr ld.pld_type cmtTbl;
+ ]
+ )
+
+ and printTypExpr (typExpr : Parsetree.core_type) cmtTbl =
+ let renderedType = match typExpr.ptyp_desc with
+ | Ptyp_any -> Doc.text "_"
+ | Ptyp_var var -> Doc.concat [
+ Doc.text "'";
+ printIdentLike var;
+ ]
+ | Ptyp_extension(extension) ->
+ printExtensionWithComments ~atModuleLvl:false extension cmtTbl
+ | Ptyp_alias(typ, alias) ->
+ let typ =
+ (* Technically type t = (string, float) => unit as 'x, doesn't require
+ * parens around the arrow expression. This is very confusing though.
+ * Is the "as" part of "unit" or "(string, float) => unit". By printing
+ * parens we guide the user towards its meaning.*)
+ let needsParens = match typ.ptyp_desc with
+ | Ptyp_arrow _ -> true
+ | _ -> false
+ in
+ let doc = printTypExpr typ cmtTbl in
+ if needsParens then
+ Doc.concat [Doc.lparen; doc; Doc.rparen]
+ else
+ doc
+ in
+ Doc.concat [typ; Doc.text " as "; Doc.concat [Doc.text "'"; printIdentLike alias]]
+ | Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, [{ptyp_desc = Ptyp_object (_fields, _openFlag)} as typ]) ->
+ let bsObject = printTypExpr typ cmtTbl in
+ begin match typExpr.ptyp_attributes with
+ | [] -> bsObject
+ | attrs ->
+ Doc.concat [
+ Doc.group (
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs)
+ );
+ Doc.space;
+ printTypExpr typ cmtTbl;
+ ]
+ end
+ | Ptyp_constr(longidentLoc, [{ ptyp_desc = Parsetree.Ptyp_tuple tuple }]) ->
+ let constrName = printLidentPath longidentLoc cmtTbl in
+ Doc.group(
+ Doc.concat([
+ constrName;
+ Doc.lessThan;
+ printTupleType ~inline:true tuple cmtTbl;
+ Doc.greaterThan;
+ ])
+ )
+ | Ptyp_constr(longidentLoc, constrArgs) ->
+ let constrName = printLidentPath longidentLoc cmtTbl in
+ begin match constrArgs with
+ | [] -> constrName
+ | [{
+ Parsetree.ptyp_desc =
+ Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")},
+ [{ptyp_desc = Ptyp_object (fields, openFlag)}])
+ }] ->
+ Doc.concat([
+ constrName;
+ Doc.lessThan;
+ printBsObjectSugar ~inline:true fields openFlag cmtTbl;
+ Doc.greaterThan;
+ ])
+ | _args -> Doc.group(
+ Doc.concat([
+ constrName;
+ Doc.lessThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map
+ (fun typexpr -> printTypExpr typexpr cmtTbl)
+ constrArgs
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.greaterThan;
+ ])
+ )
+ end
+ | Ptyp_arrow _ ->
+ let (attrsBefore, args, returnType) = ParsetreeViewer.arrowType typExpr in
+ let returnTypeNeedsParens = match returnType.ptyp_desc with
+ | Ptyp_alias _ -> true
+ | _ -> false
+ in
+ let returnDoc =
+ let doc = printTypExpr returnType cmtTbl in
+ if returnTypeNeedsParens then
+ Doc.concat [Doc.lparen; doc; Doc.rparen]
+ else doc
+ in
+ let (isUncurried, attrs) =
+ ParsetreeViewer.processUncurriedAttribute attrsBefore
+ in
+ begin match args with
+ | [] -> Doc.nil
+ | [([], Nolabel, n)] when not isUncurried ->
+ let hasAttrsBefore = not (attrs = []) in
+ let attrs = if hasAttrsBefore then
+ Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrsBefore);
+ Doc.space;
+ ]
+ else Doc.nil
+ in
+ let typDoc =
+ let doc = printTypExpr n cmtTbl in
+ match n.ptyp_desc with
+ | Ptyp_arrow _ | Ptyp_tuple _ -> addParens doc
+ | _ -> doc
+ in
+ Doc.group (
+ Doc.concat [
+ Doc.group attrs;
+ Doc.group (
+ if hasAttrsBefore then
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ typDoc;
+ Doc.text " => ";
+ returnDoc;
+ ]
+ );
+ Doc.softLine;
+ Doc.rparen
+ ]
+ else
+ Doc.concat [
+ typDoc;
+ Doc.text " => ";
+ returnDoc;
+ ]
+ )
+ ]
+ )
+ | args ->
+ let attrs = match attrs with
+ | [] -> Doc.nil
+ | attrs -> Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs);
+ Doc.space;
+ ]
+ in
+ let renderedArgs = Doc.concat [
+ attrs;
+ Doc.text "(";
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun tp -> printTypeParameter tp cmtTbl) args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.text ")";
+ ] in
+ Doc.group (
+ Doc.concat [
+ renderedArgs;
+ Doc.text " => ";
+ returnDoc;
+ ]
+ )
+ end
+ | Ptyp_tuple types -> printTupleType ~inline:false types cmtTbl
+ | Ptyp_object (fields, openFlag) ->
+ printBsObjectSugar ~inline:false fields openFlag cmtTbl
+ | Ptyp_poly([], typ) ->
+ printTypExpr typ cmtTbl
+ | Ptyp_poly(stringLocs, typ) ->
+ Doc.concat [
+ Doc.join ~sep:Doc.space (List.map (fun {Location.txt; loc} ->
+ let doc = Doc.concat [Doc.text "'"; Doc.text txt] in
+ printComments doc cmtTbl loc
+ ) stringLocs);
+ Doc.dot;
+ Doc.space;
+ printTypExpr typ cmtTbl
+ ]
+ | Ptyp_package packageType ->
+ printPackageType ~printModuleKeywordAndParens:true packageType cmtTbl
+ | Ptyp_class _ ->
+ Doc.text "classes are not supported in types"
+ | Ptyp_variant (rowFields, closedFlag, labelsOpt) ->
+ let printRowField = function
+ | Parsetree.Rtag ({txt}, attrs, true, []) ->
+ Doc.concat [
+ printAttributes attrs;
+ Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true txt]
+ ]
+ | Rtag ({txt}, attrs, truth, types) ->
+ let doType t = match t.Parsetree.ptyp_desc with
+ | Ptyp_tuple _ -> printTypExpr t cmtTbl
+ | _ -> Doc.concat [ Doc.lparen; printTypExpr t cmtTbl; Doc.rparen ]
+ in
+ let printedTypes = List.map doType types in
+ let cases = Doc.join ~sep:(Doc.concat [Doc.line; Doc.text "& "]) printedTypes in
+ let cases = if truth then Doc.concat [Doc.line; Doc.text "& "; cases] else cases in
+ Doc.group (Doc.concat [
+ printAttributes attrs;
+ Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true txt];
+ cases
+ ])
+ | Rinherit coreType ->
+ printTypExpr coreType cmtTbl
+ in
+ let docs = List.map printRowField rowFields in
+ let cases = Doc.join ~sep:(Doc.concat [Doc.line; Doc.text "| "]) docs in
+ let cases = if docs = [] then cases else Doc.concat [Doc.text "| "; cases] in
+ let openingSymbol =
+ if closedFlag = Open
+ then Doc.greaterThan
+ else if labelsOpt = None
+ then Doc.nil
+ else Doc.lessThan in
+ let hasLabels = labelsOpt <> None && labelsOpt <> Some [] in
+ let labels = match labelsOpt with
+ | None
+ | Some([]) ->
+ Doc.nil
+ | Some(labels) ->
+ Doc.concat (List.map (fun label -> Doc.concat [Doc.line; Doc.text "#" ; printIdentLike ~allowUident:true label] ) labels)
+ in
+ let closingSymbol = if hasLabels then Doc.text " >" else Doc.nil in
+ Doc.group (Doc.concat [Doc.lbracket; openingSymbol; Doc.line; cases; closingSymbol; labels; Doc.line; Doc.rbracket])
+ in
+ let shouldPrintItsOwnAttributes = match typExpr.ptyp_desc with
+ | Ptyp_arrow _ (* es6 arrow types print their own attributes *)
+ | Ptyp_constr({txt = Longident.Ldot(Longident.Lident "Js", "t")}, _) -> true
+ | _ -> false
+ in
+ let doc = begin match typExpr.ptyp_attributes with
+ | _::_ as attrs when not shouldPrintItsOwnAttributes ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ renderedType;
+ ]
+ )
+ | _ -> renderedType
+ end
+ in
+ printComments doc cmtTbl typExpr.ptyp_loc
+
+ and printBsObjectSugar ~inline fields openFlag cmtTbl =
+ let doc = match fields with
+ | [] -> Doc.concat [
+ Doc.lbrace;
+ (match openFlag with
+ | Asttypes.Closed -> Doc.dot
+ | Open -> Doc.dotdot);
+ Doc.rbrace
+ ]
+ | fields ->
+ Doc.concat [
+ Doc.lbrace;
+ (match openFlag with
+ | Asttypes.Closed -> Doc.nil
+ | Open -> Doc.dotdot);
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun field -> printObjectField field cmtTbl) fields
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ ]
+ in
+ if inline then doc else Doc.group doc
+
+ and printTupleType ~inline (types: Parsetree.core_type list) cmtTbl =
+ let tuple = Doc.concat([
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat([
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun typexpr -> printTypExpr typexpr cmtTbl) types
+ )
+ ])
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ])
+ in
+ if inline == false then Doc.group(tuple) else tuple
+
+ and printObjectField (field : Parsetree.object_field) cmtTbl =
+ match field with
+ | Otag (labelLoc, attrs, typ) ->
+ let lbl =
+ let doc = Doc.text ("\"" ^ labelLoc.txt ^ "\"") in
+ printComments doc cmtTbl labelLoc.loc
+ in
+ let doc = Doc.concat [
+ printAttributes ~loc:labelLoc.loc attrs;
+ lbl;
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ] in
+ let cmtLoc = {labelLoc.loc with loc_end = typ.ptyp_loc.loc_end} in
+ printComments doc cmtTbl cmtLoc
+ | _ -> Doc.nil
+
+ (* es6 arrow type arg
+ * type t = (~foo: string, ~bar: float=?, unit) => unit
+ * i.e. ~foo: string, ~bar: float *)
+ and printTypeParameter (attrs, lbl, typ) cmtTbl =
+ let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrs in
+ let uncurried = if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil in
+ let attrs = match attrs with
+ | [] -> Doc.nil
+ | attrs -> Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs);
+ Doc.line;
+ ] in
+ let label = match lbl with
+ | Asttypes.Nolabel -> Doc.nil
+ | Labelled lbl -> Doc.concat [
+ Doc.text "~";
+ printIdentLike lbl;
+ Doc.text ": ";
+ ]
+ | Optional lbl -> Doc.concat [
+ Doc.text "~";
+ printIdentLike lbl;
+ Doc.text ": ";
+ ]
+ in
+ let optionalIndicator = match lbl with
+ | Asttypes.Nolabel
+ | Labelled _ -> Doc.nil
+ | Optional _lbl -> Doc.text "=?"
+ in
+ let doc = Doc.group (
+ Doc.concat [
+ uncurried;
+ attrs;
+ label;
+ printTypExpr typ cmtTbl;
+ optionalIndicator;
+ ]
+ ) in
+ printComments doc cmtTbl typ.ptyp_loc
+
+ and printValueBinding ~recFlag vb cmtTbl i =
+ let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr vb.pvb_attributes in
+ let attrs = printAttributes ~loc:vb.pvb_pat.ppat_loc attrs in
+ let header =
+ if i == 0 then
+ Doc.concat [
+ if hasGenType then Doc.text "export " else Doc.text "let ";
+ recFlag
+ ] else
+ Doc.concat [
+ Doc.text "and ";
+ if hasGenType then Doc.text "export " else Doc.nil
+ ]
+ in
+ match vb with
+ | {pvb_pat =
+ {ppat_desc = Ppat_constraint (pattern, {ptyp_desc = Ptyp_poly _})};
+ pvb_expr =
+ {pexp_desc = Pexp_newtype _} as expr
+ } ->
+ let (_attrs, parameters, returnExpr) = ParsetreeViewer.funExpr expr in
+ let abstractType = match parameters with
+ | [NewTypes {locs = vars}] ->
+ Doc.concat [
+ Doc.text "type ";
+ Doc.join ~sep:Doc.space (List.map (fun var -> Doc.text var.Asttypes.txt) vars);
+ Doc.dot;
+ ]
+ | _ -> Doc.nil
+ in
+ begin match returnExpr.pexp_desc with
+ | Pexp_constraint (expr, typ) ->
+ Doc.group (
+ Doc.concat [
+ attrs;
+ header;
+ printPattern pattern cmtTbl;
+ Doc.text ":";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ abstractType;
+ Doc.space;
+ printTypExpr typ cmtTbl;
+ Doc.text " =";
+ Doc.concat [
+ Doc.line;
+ printExpressionWithComments expr cmtTbl;
+ ]
+ ]
+ )
+ ]
+ )
+ | _ -> Doc.nil
+ end
+ | _ ->
+ let (optBraces, expr) = ParsetreeViewer.processBracesAttr vb.pvb_expr in
+ let printedExpr =
+ let doc = printExpressionWithComments vb.pvb_expr cmtTbl in
+ match Parens.expr vb.pvb_expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ if ParsetreeViewer.isPipeExpr vb.pvb_expr then
+ Doc.customLayout [
+ Doc.group (
+ Doc.concat [
+ attrs;
+ header;
+ printPattern vb.pvb_pat cmtTbl;
+ Doc.text " =";
+ Doc.space;
+ printedExpr;
+ ]
+ );
+ Doc.group (
+ Doc.concat [
+ attrs;
+ header;
+ printPattern vb.pvb_pat cmtTbl;
+ Doc.text " =";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printedExpr;
+ ]
+ )
+ ]
+ );
+ ]
+ else
+ let shouldIndent =
+ match optBraces with
+ | Some _ -> false
+ | _ ->
+ ParsetreeViewer.isBinaryExpression expr ||
+ (match vb.pvb_expr with
+ | {
+ pexp_attributes = [({Location.txt="ns.ternary"}, _)];
+ pexp_desc = Pexp_ifthenelse (ifExpr, _, _)
+ } ->
+ ParsetreeViewer.isBinaryExpression ifExpr || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes
+ | { pexp_desc = Pexp_newtype _} -> false
+ | e ->
+ ParsetreeViewer.hasAttributes e.pexp_attributes ||
+ ParsetreeViewer.isArrayAccess e
+ )
+ in
+ Doc.group (
+ Doc.concat [
+ attrs;
+ header;
+ printPattern vb.pvb_pat cmtTbl;
+ Doc.text " =";
+ if shouldIndent then
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printedExpr;
+ ]
+ )
+ else
+ Doc.concat [
+ Doc.space;
+ printedExpr;
+ ]
+ ]
+ )
+
+ and printPackageType ~printModuleKeywordAndParens (packageType: Parsetree.package_type) cmtTbl =
+ let doc = match packageType with
+ | (longidentLoc, []) -> Doc.group(
+ Doc.concat [
+ printLongidentLocation longidentLoc cmtTbl;
+ ]
+ )
+ | (longidentLoc, packageConstraints) -> Doc.group(
+ Doc.concat [
+ printLongidentLocation longidentLoc cmtTbl;
+ printPackageConstraints packageConstraints cmtTbl;
+ Doc.softLine;
+ ]
+ )
+ in
+ if printModuleKeywordAndParens then
+ Doc.concat[
+ Doc.text "module(";
+ doc;
+ Doc.rparen
+ ]
+ else
+ doc
+
+ and printPackageConstraints packageConstraints cmtTbl =
+ Doc.concat [
+ Doc.text " with";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.join ~sep:Doc.line (
+ List.mapi (fun i pc ->
+ let (longident, typexpr) = pc in
+ let cmtLoc = {longident.Asttypes.loc with
+ loc_end = typexpr.Parsetree.ptyp_loc.loc_end
+ } in
+ let doc = printPackageConstraint i cmtTbl pc in
+ printComments doc cmtTbl cmtLoc
+ ) packageConstraints
+ )
+ ]
+ )
+ ]
+
+ and printPackageConstraint i cmtTbl (longidentLoc, typ) =
+ let prefix = if i == 0 then Doc.text "type " else Doc.text "and type " in
+ Doc.concat [
+ prefix;
+ printLongidentLocation longidentLoc cmtTbl;
+ Doc.text " = ";
+ printTypExpr typ cmtTbl;
+ ]
+
+ and printExtensionWithComments ~atModuleLvl (stringLoc, payload) cmtTbl =
+ let extName =
+ let doc = Doc.concat [
+ Doc.text "%";
+ if atModuleLvl then Doc.text "%" else Doc.nil;
+ Doc.text stringLoc.Location.txt;
+ ] in
+ printComments doc cmtTbl stringLoc.Location.loc
+ in
+ match payload with
+ | Parsetree.PStr [{pstr_desc = Pstr_eval (expr, attrs)}] ->
+ let exprDoc = printExpressionWithComments expr cmtTbl in
+ let needsParens = match attrs with | [] -> false | _ -> true in
+ Doc.group (
+ Doc.concat [
+ extName;
+ addParens (
+ Doc.concat [
+ printAttributes attrs;
+ if needsParens then addParens exprDoc else exprDoc;
+ ]
+ )
+ ]
+ )
+ | _ -> extName
+
+ and printPattern (p : Parsetree.pattern) cmtTbl =
+ let patternWithoutAttributes = match p.ppat_desc with
+ | Ppat_any -> Doc.text "_"
+ | Ppat_var var -> printIdentLike var.txt
+ | Ppat_constant c -> printConstant c
+ | Ppat_tuple patterns ->
+ Doc.group(
+ Doc.concat([
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat([
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun pat ->
+ printPattern pat cmtTbl) patterns)
+ ])
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen
+ ])
+ )
+ | Ppat_array [] ->
+ Doc.concat [
+ Doc.lbracket;
+ printCommentsInside cmtTbl p.ppat_loc;
+ Doc.rbracket;
+ ]
+ | Ppat_array patterns ->
+ Doc.group(
+ Doc.concat([
+ Doc.text "[";
+ Doc.indent (
+ Doc.concat([
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun pat ->
+ printPattern pat cmtTbl) patterns)
+ ])
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.text "]";
+ ])
+ )
+ | Ppat_construct({txt = Longident.Lident "()"}, _) ->
+ Doc.concat [
+ Doc.lparen;
+ printCommentsInside cmtTbl p.ppat_loc;
+ Doc.rparen;
+ ]
+ | Ppat_construct({txt = Longident.Lident "[]"}, _) ->
+ Doc.concat [
+ Doc.text "list[";
+ printCommentsInside cmtTbl p.ppat_loc;
+ Doc.rbracket;
+ ]
+ | Ppat_construct({txt = Longident.Lident "::"}, _) ->
+ let (patterns, tail) = ParsetreeViewer.collectPatternsFromListConstruct [] p in
+ let shouldHug = match (patterns, tail) with
+ | ([pat],
+ {ppat_desc = Ppat_construct({txt = Longident.Lident "[]"}, _)}) when ParsetreeViewer.isHuggablePattern pat -> true
+ | _ -> false
+ in
+ let children = Doc.concat([
+ if shouldHug then Doc.nil else Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun pat ->
+ printPattern pat cmtTbl) patterns);
+ begin match tail.Parsetree.ppat_desc with
+ | Ppat_construct({txt = Longident.Lident "[]"}, _) -> Doc.nil
+ | _ ->
+ let doc = Doc.concat [Doc.text "..."; printPattern tail cmtTbl] in
+ let tail = printComments doc cmtTbl tail.ppat_loc in
+ Doc.concat([Doc.text ","; Doc.line; tail])
+ end;
+ ]) in
+ Doc.group(
+ Doc.concat([
+ Doc.text "list[";
+ if shouldHug then children else Doc.concat [
+ Doc.indent children;
+ Doc.ifBreaks (Doc.text ",") Doc.nil;
+ Doc.softLine;
+ ];
+ Doc.rbracket;
+ ])
+ )
+ | Ppat_construct(constrName, constructorArgs) ->
+ let constrName = printLongident constrName.txt in
+ let argsDoc = match constructorArgs with
+ | None -> Doc.nil
+ | Some({ppat_loc; ppat_desc = Ppat_construct ({txt = Longident.Lident "()"}, _)}) ->
+ Doc.concat [
+ Doc.lparen;
+ printCommentsInside cmtTbl ppat_loc;
+ Doc.rparen;
+ ]
+ | Some({ppat_desc = Ppat_tuple []; ppat_loc = loc}) ->
+ Doc.concat [
+ Doc.lparen;
+ Doc.softLine;
+ printCommentsInside cmtTbl loc;
+ Doc.rparen;
+ ]
+ (* Some((1, 2) *)
+ | Some({ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as arg]}) ->
+ Doc.concat [
+ Doc.lparen;
+ printPattern arg cmtTbl;
+ Doc.rparen;
+ ]
+ | Some({ppat_desc = Ppat_tuple patterns}) ->
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun pat -> printPattern pat cmtTbl) patterns
+ );
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ | Some(arg) ->
+ let argDoc = printPattern arg cmtTbl in
+ let shouldHug = ParsetreeViewer.isHuggablePattern arg in
+ Doc.concat [
+ Doc.lparen;
+ if shouldHug then argDoc
+ else Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ argDoc;
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ ];
+ Doc.rparen;
+
+ ]
+ in
+ Doc.group(Doc.concat [constrName; argsDoc])
+ | Ppat_variant (label, None) ->
+ Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true label]
+ | Ppat_variant (label, variantArgs) ->
+ let variantName =
+ Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true label] in
+ let argsDoc = match variantArgs with
+ | None -> Doc.nil
+ | Some({ppat_desc = Ppat_construct ({txt = Longident.Lident "()"}, _)}) ->
+ Doc.text "()"
+ | Some({ppat_desc = Ppat_tuple []; ppat_loc = loc}) ->
+ Doc.concat [
+ Doc.lparen;
+ Doc.softLine;
+ printCommentsInside cmtTbl loc;
+ Doc.rparen;
+ ]
+ (* Some((1, 2) *)
+ | Some({ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as arg]}) ->
+ Doc.concat [
+ Doc.lparen;
+ printPattern arg cmtTbl;
+ Doc.rparen;
+ ]
+ | Some({ppat_desc = Ppat_tuple patterns}) ->
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun pat -> printPattern pat cmtTbl) patterns
+ );
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ | Some(arg) ->
+ let argDoc = printPattern arg cmtTbl in
+ let shouldHug = ParsetreeViewer.isHuggablePattern arg in
+ Doc.concat [
+ Doc.lparen;
+ if shouldHug then argDoc
+ else Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ argDoc;
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ ];
+ Doc.rparen;
+
+ ]
+ in
+ Doc.group(Doc.concat [variantName; argsDoc])
+ | Ppat_type ident ->
+ Doc.concat [Doc.text "##"; printIdentPath ident cmtTbl]
+ | Ppat_record(rows, openFlag) ->
+ Doc.group(
+ Doc.concat([
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun row -> printPatternRecordRow row cmtTbl) rows);
+ begin match openFlag with
+ | Open -> Doc.concat [Doc.text ","; Doc.line; Doc.text "_"]
+ | Closed -> Doc.nil
+ end;
+ ]
+ );
+ Doc.ifBreaks (Doc.text ",") Doc.nil;
+ Doc.softLine;
+ Doc.rbrace;
+ ])
+ )
+
+ | Ppat_exception p ->
+ let needsParens = match p.ppat_desc with
+ | Ppat_or (_, _) | Ppat_alias (_, _) -> true
+ | _ -> false
+ in
+ let pat =
+ let p = printPattern p cmtTbl in
+ if needsParens then
+ Doc.concat [Doc.text "("; p; Doc.text ")"]
+ else
+ p
+ in
+ Doc.group (
+ Doc.concat [Doc.text "exception"; Doc.line; pat]
+ )
+ | Ppat_or _ ->
+ (* Blue | Red | Green -> [Blue; Red; Green] *)
+ let orChain = ParsetreeViewer.collectOrPatternChain p in
+ let docs = List.mapi (fun i pat ->
+ let patternDoc = printPattern pat cmtTbl in
+ Doc.concat [
+ if i == 0 then Doc.nil else Doc.concat [Doc.line; Doc.text "| "];
+ match pat.ppat_desc with
+ (* (Blue | Red) | (Green | Black) | White *)
+ | Ppat_or _ -> addParens patternDoc
+ | _ -> patternDoc
+ ]
+ ) orChain in
+ Doc.group (Doc.concat docs)
+ | Ppat_extension ext ->
+ printExtensionWithComments ~atModuleLvl:false ext cmtTbl
+ | Ppat_lazy p ->
+ let needsParens = match p.ppat_desc with
+ | Ppat_or (_, _) | Ppat_alias (_, _) -> true
+ | _ -> false
+ in
+ let pat =
+ let p = printPattern p cmtTbl in
+ if needsParens then
+ Doc.concat [Doc.text "("; p; Doc.text ")"]
+ else
+ p
+ in
+ Doc.concat [Doc.text "lazy "; pat]
+ | Ppat_alias (p, aliasLoc) ->
+ let needsParens = match p.ppat_desc with
+ | Ppat_or (_, _) | Ppat_alias (_, _) -> true
+ | _ -> false
+ in
+ let renderedPattern =
+ let p = printPattern p cmtTbl in
+ if needsParens then
+ Doc.concat [Doc.text "("; p; Doc.text ")"]
+ else
+ p
+ in
+ Doc.concat([
+ renderedPattern;
+ Doc.text " as ";
+ printStringLoc aliasLoc cmtTbl;
+ ])
+
+ (* Note: module(P : S) is represented as *)
+ (* Ppat_constraint(Ppat_unpack, Ptyp_package) *)
+ | Ppat_constraint ({ppat_desc = Ppat_unpack stringLoc}, {ptyp_desc = Ptyp_package packageType; ptyp_loc}) ->
+ Doc.concat [
+ Doc.text "module(";
+ printComments (Doc.text stringLoc.txt) cmtTbl stringLoc.loc;
+ Doc.text ": ";
+ printComments
+ (printPackageType ~printModuleKeywordAndParens:false packageType cmtTbl)
+ cmtTbl
+ ptyp_loc;
+ Doc.rparen;
+ ]
+ | Ppat_constraint (pattern, typ) ->
+ Doc.concat [
+ printPattern pattern cmtTbl;
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ]
+
+ (* Note: module(P : S) is represented as *)
+ (* Ppat_constraint(Ppat_unpack, Ptyp_package) *)
+ | Ppat_unpack stringLoc ->
+ Doc.concat [
+ Doc.text "module(";
+ printComments (Doc.text stringLoc.txt) cmtTbl stringLoc.loc;
+ Doc.rparen;
+ ]
+ | Ppat_interval (a, b) ->
+ Doc.concat [
+ printConstant a;
+ Doc.text " .. ";
+ printConstant b;
+ ]
+ | Ppat_open _ -> Doc.nil
+ in
+ let doc = match p.ppat_attributes with
+ | [] -> patternWithoutAttributes
+ | attrs ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ patternWithoutAttributes;
+ ]
+ )
+ in
+ printComments doc cmtTbl p.ppat_loc
+
+ and printPatternRecordRow row cmtTbl =
+ match row with
+ (* punned {x}*)
+ | ({Location.txt=Longident.Lident ident} as longident,
+ {Parsetree.ppat_desc=Ppat_var {txt;_}}) when ident = txt ->
+ printLidentPath longident cmtTbl
+ | (longident, pattern) ->
+ let locForComments = {
+ longident.loc with
+ loc_end = pattern.Parsetree.ppat_loc.loc_end
+ } in
+ let doc = Doc.group (
+ Doc.concat([
+ printLidentPath longident cmtTbl;
+ Doc.text ": ";
+ Doc.indent(
+ Doc.concat [
+ Doc.softLine;
+ printPattern pattern cmtTbl;
+ ]
+ )
+ ])
+ ) in
+ printComments doc cmtTbl locForComments
+
+ and printExpressionWithComments expr cmtTbl =
+ let doc = printExpression expr cmtTbl in
+ printComments doc cmtTbl expr.Parsetree.pexp_loc
+
+ and printExpression (e : Parsetree.expression) cmtTbl =
+ let printedExpression = match e.pexp_desc with
+ | Parsetree.Pexp_constant c -> printConstant c
+ | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes ->
+ printJsxFragment e cmtTbl
+ | Pexp_construct ({txt = Longident.Lident "()"}, _) -> Doc.text "()"
+ | Pexp_construct ({txt = Longident.Lident "[]"}, _) ->
+ Doc.concat [
+ Doc.text "list[";
+ printCommentsInside cmtTbl e.pexp_loc;
+ Doc.rbracket;
+ ]
+ | Pexp_construct ({txt = Longident.Lident "::"}, _) ->
+ let (expressions, spread) = ParsetreeViewer.collectListExpressions e in
+ let spreadDoc = match spread with
+ | Some(expr) -> Doc.concat [
+ Doc.text ",";
+ Doc.line;
+ Doc.dotdotdot;
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ ]
+ | None -> Doc.nil
+ in
+ Doc.group(
+ Doc.concat([
+ Doc.text "list[";
+ Doc.indent (
+ Doc.concat([
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map
+ (fun expr ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ )
+ expressions);
+ spreadDoc;
+ ])
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbracket;
+ ])
+ )
+ | Pexp_construct (longidentLoc, args) ->
+ let constr = printLongidentLocation longidentLoc cmtTbl in
+ let args = match args with
+ | None -> Doc.nil
+ | Some({pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}) ->
+ Doc.text "()"
+ (* Some((1, 2)) *)
+ | Some({pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _} as arg]}) ->
+ Doc.concat [
+ Doc.lparen;
+ (let doc = printExpressionWithComments arg cmtTbl in
+ match Parens.expr arg with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc arg braces
+ | Nothing -> doc);
+ Doc.rparen;
+ ]
+ | Some({pexp_desc = Pexp_tuple args }) ->
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map
+ (fun expr ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc)
+ args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ | Some(arg) ->
+ let argDoc =
+ let doc = printExpressionWithComments arg cmtTbl in
+ match Parens.expr arg with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc arg braces
+ | Nothing -> doc
+ in
+ let shouldHug = ParsetreeViewer.isHuggableExpression arg in
+ Doc.concat [
+ Doc.lparen;
+ if shouldHug then argDoc
+ else Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ argDoc;
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ ];
+ Doc.rparen;
+ ]
+ in
+ Doc.group(Doc.concat [constr; args])
+ | Pexp_ident path ->
+ printLidentPath path cmtTbl
+ | Pexp_tuple exprs ->
+ Doc.group(
+ Doc.concat([
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat([
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun expr ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc)
+ exprs)
+ ])
+ );
+ Doc.ifBreaks (Doc.text ",") Doc.nil;
+ Doc.softLine;
+ Doc.rparen;
+ ])
+ )
+ | Pexp_array [] ->
+ Doc.concat [
+ Doc.lbracket;
+ printCommentsInside cmtTbl e.pexp_loc;
+ Doc.rbracket;
+ ]
+ | Pexp_array exprs ->
+ Doc.group(
+ Doc.concat([
+ Doc.lbracket;
+ Doc.indent (
+ Doc.concat([
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun expr ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ ) exprs)
+ ])
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbracket;
+ ])
+ )
+ | Pexp_variant (label, args) ->
+ let variantName =
+ Doc.concat [Doc.text "#"; printIdentLike ~allowUident:true label] in
+ let args = match args with
+ | None -> Doc.nil
+ | Some({pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}) ->
+ Doc.text "()"
+ (* #poly((1, 2) *)
+ | Some({pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _} as arg]}) ->
+ Doc.concat [
+ Doc.lparen;
+ (let doc = printExpressionWithComments arg cmtTbl in
+ match Parens.expr arg with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc arg braces
+ | Nothing -> doc);
+ Doc.rparen;
+ ]
+ | Some({pexp_desc = Pexp_tuple args }) ->
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map
+ (fun expr ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc)
+ args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ | Some(arg) ->
+ let argDoc =
+ let doc = printExpressionWithComments arg cmtTbl in
+ match Parens.expr arg with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc arg braces
+ | Nothing -> doc
+ in
+ let shouldHug = ParsetreeViewer.isHuggableExpression arg in
+ Doc.concat [
+ Doc.lparen;
+ if shouldHug then argDoc
+ else Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ argDoc;
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ ];
+ Doc.rparen;
+ ]
+ in
+ Doc.group(Doc.concat [variantName; args])
+ | Pexp_record (rows, spreadExpr) ->
+ let spread = match spreadExpr with
+ | None -> Doc.nil
+ | Some expr -> Doc.concat [
+ Doc.dotdotdot;
+ (let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc);
+ Doc.comma;
+ Doc.line;
+ ]
+ in
+ (* If the record is written over multiple lines, break automatically
+ * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded
+ * `let x = {
+ * a: 1,
+ * b: 2,
+ * }` -> record is written on multiple lines, break the group *)
+ let forceBreak =
+ e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum
+ in
+ Doc.breakableGroup ~forceBreak (
+ Doc.concat([
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ spread;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun row -> printRecordRow row cmtTbl) rows)
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ ])
+ )
+ | Pexp_extension extension ->
+ begin match extension with
+ | (
+ {txt = "bs.obj"},
+ PStr [{
+ pstr_loc = loc;
+ pstr_desc = Pstr_eval({pexp_desc = Pexp_record (rows, _)}, [])
+ }]
+ ) ->
+ (* If the object is written over multiple lines, break automatically
+ * `let x = {"a": 1, "b": 3}` -> same line, break when line-width exceeded
+ * `let x = {
+ * "a": 1,
+ * "b": 2,
+ * }` -> object is written on multiple lines, break the group *)
+ let forceBreak =
+ loc.loc_start.pos_lnum < loc.loc_end.pos_lnum
+ in
+ Doc.breakableGroup ~forceBreak (
+ Doc.concat([
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.text ","; Doc.line])
+ (List.map (fun row -> printBsObjectRow row cmtTbl) rows)
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ ])
+ )
+ | extension ->
+ printExtensionWithComments ~atModuleLvl:false extension cmtTbl
+ end
+ | Pexp_apply _ ->
+ if ParsetreeViewer.isUnaryExpression e then
+ printUnaryExpression e cmtTbl
+ else if ParsetreeViewer.isTemplateLiteral e then
+ printTemplateLiteral e cmtTbl
+ else if ParsetreeViewer.isBinaryExpression e then
+ printBinaryExpression e cmtTbl
+ else
+ printPexpApply e cmtTbl
+ | Pexp_unreachable -> Doc.dot
+ | Pexp_field (expr, longidentLoc) ->
+ let lhs =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.fieldExpr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ lhs;
+ Doc.dot;
+ printLidentPath longidentLoc cmtTbl;
+ ]
+ | Pexp_setfield (expr1, longidentLoc, expr2) ->
+ printSetFieldExpr e.pexp_attributes expr1 longidentLoc expr2 e.pexp_loc cmtTbl
+ | Pexp_ifthenelse (_ifExpr, _thenExpr, _elseExpr) ->
+ if ParsetreeViewer.isTernaryExpr e then
+ let (parts, alternate) = ParsetreeViewer.collectTernaryParts e in
+ let ternaryDoc = match parts with
+ | (condition1, consequent1)::rest ->
+ Doc.group (Doc.concat [
+ printTernaryOperand condition1 cmtTbl;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.indent (
+ Doc.concat [
+ Doc.text "? ";
+ printTernaryOperand consequent1 cmtTbl
+ ]
+ );
+ Doc.concat (
+ List.map (fun (condition, consequent) ->
+ Doc.concat [
+ Doc.line;
+ Doc.text ": ";
+ printTernaryOperand condition cmtTbl;
+ Doc.line;
+ Doc.text "? ";
+ printTernaryOperand consequent cmtTbl;
+ ]
+ ) rest
+ );
+ Doc.line;
+ Doc.text ": ";
+ Doc.indent (printTernaryOperand alternate cmtTbl);
+ ]
+ )
+ ])
+ | _ -> Doc.nil
+ in
+ let attrs = ParsetreeViewer.filterTernaryAttributes e.pexp_attributes in
+ let needsParens = match ParsetreeViewer.filterParsingAttrs attrs with
+ | [] -> false | _ -> true
+ in
+ Doc.concat [
+ printAttributes attrs;
+ if needsParens then addParens ternaryDoc else ternaryDoc;
+ ]
+ else
+ let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions e in
+ let ifDocs = Doc.join ~sep:Doc.space (
+ List.mapi (fun i (ifExpr, thenExpr) ->
+ let ifTxt = if i > 0 then Doc.text "else if " else Doc.text "if " in
+ let condition =
+ if ParsetreeViewer.isBlockExpr ifExpr then
+ printExpressionBlock ~braces:true ifExpr cmtTbl
+ else
+ let doc = printExpressionWithComments ifExpr cmtTbl in
+ match Parens.expr ifExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc ifExpr braces
+ | Nothing -> Doc.ifBreaks (addParens doc) doc
+ in
+ Doc.concat [
+ ifTxt;
+ Doc.group (condition);
+ Doc.space;
+ let thenExpr = match ParsetreeViewer.processBracesAttr thenExpr with
+ (* This case only happens when coming from Reason, we strip braces *)
+ | (Some _, expr) -> expr
+ | _ -> thenExpr
+ in
+ printExpressionBlock ~braces:true thenExpr cmtTbl;
+ ]
+ ) ifs
+ ) in
+ let elseDoc = match elseExpr with
+ | None -> Doc.nil
+ | Some expr -> Doc.concat [
+ Doc.text " else ";
+ printExpressionBlock ~braces:true expr cmtTbl;
+ ]
+ in
+ Doc.concat [
+ printAttributes e.pexp_attributes;
+ ifDocs;
+ elseDoc;
+ ]
+ | Pexp_while (expr1, expr2) ->
+ let condition =
+ let doc = printExpressionWithComments expr1 cmtTbl in
+ match Parens.expr expr1 with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr1 braces
+ | Nothing -> doc
+ in
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.text "while ";
+ if ParsetreeViewer.isBlockExpr expr1 then
+ condition
+ else
+ Doc.group (
+ Doc.ifBreaks (addParens condition) condition
+ );
+ Doc.space;
+ printExpressionBlock ~braces:true expr2 cmtTbl;
+ ]
+ )
+ | Pexp_for (pattern, fromExpr, toExpr, directionFlag, body) ->
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.text "for ";
+ printPattern pattern cmtTbl;
+ Doc.text " in ";
+ (let doc = printExpressionWithComments fromExpr cmtTbl in
+ match Parens.expr fromExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc fromExpr braces
+ | Nothing -> doc);
+ printDirectionFlag directionFlag;
+ (let doc = printExpressionWithComments toExpr cmtTbl in
+ match Parens.expr toExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc toExpr braces
+ | Nothing -> doc);
+ Doc.space;
+ printExpressionBlock ~braces:true body cmtTbl;
+ ]
+ )
+ | Pexp_constraint(
+ {pexp_desc = Pexp_pack modExpr},
+ {ptyp_desc = Ptyp_package packageType; ptyp_loc}
+ ) ->
+ Doc.group (
+ Doc.concat [
+ Doc.text "module(";
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ printModExpr modExpr cmtTbl;
+ Doc.text ": ";
+ printComments
+ (printPackageType ~printModuleKeywordAndParens:false packageType cmtTbl)
+ cmtTbl
+ ptyp_loc
+ ]
+ );
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+
+ | Pexp_constraint (expr, typ) ->
+ let exprDoc =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ exprDoc;
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ]
+ | Pexp_letmodule ({txt = _modName}, _modExpr, _expr) ->
+ printExpressionBlock ~braces:true e cmtTbl
+ | Pexp_letexception (_extensionConstructor, _expr) ->
+ printExpressionBlock ~braces:true e cmtTbl
+ | Pexp_assert expr ->
+ let rhs =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.lazyOrAssertExprRhs expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ Doc.text "assert ";
+ rhs;
+ ]
+ | Pexp_lazy expr ->
+ let rhs =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.lazyOrAssertExprRhs expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.group (
+ Doc.concat [
+ Doc.text "lazy ";
+ rhs;
+ ]
+ )
+ | Pexp_open (_overrideFlag, _longidentLoc, _expr) ->
+ printExpressionBlock ~braces:true e cmtTbl
+ | Pexp_pack (modExpr) ->
+ Doc.group (Doc.concat [
+ Doc.text "module(";
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ printModExpr modExpr cmtTbl;
+ ]
+ );
+ Doc.softLine;
+ Doc.rparen;
+ ])
+ | Pexp_sequence _ ->
+ printExpressionBlock ~braces:true e cmtTbl
+ | Pexp_let _ ->
+ printExpressionBlock ~braces:true e cmtTbl
+ | Pexp_fun (Nolabel, None, {ppat_desc = Ppat_var {txt="__x"}}, ({pexp_desc = Pexp_apply _})) ->
+
+ (* (__x) => f(a, __x, c) -----> f(a, _, c) *)
+ printExpressionWithComments (ParsetreeViewer.rewriteUnderscoreApply e) cmtTbl
+ | Pexp_fun _ | Pexp_newtype _ ->
+ let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr e in
+ let (uncurried, attrs) =
+ ParsetreeViewer.processUncurriedAttribute attrsOnArrow
+ in
+ let (returnExpr, typConstraint) = match returnExpr.pexp_desc with
+ | Pexp_constraint (expr, typ) -> (
+ {expr with pexp_attributes = List.concat [
+ expr.pexp_attributes;
+ returnExpr.pexp_attributes;
+ ]},
+ Some typ
+ )
+ | _ -> (returnExpr, None)
+ in
+ let hasConstraint = match typConstraint with | Some _ -> true | None -> false in
+ let parametersDoc = printExprFunParameters
+ ~inCallback:false
+ ~uncurried
+ ~hasConstraint
+ parameters
+ cmtTbl
+ in
+ let returnExprDoc =
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr returnExpr in
+ let shouldInline = match (returnExpr.pexp_desc, optBraces) with
+ | (_, Some _ ) -> true
+ | ((Pexp_array _
+ | Pexp_tuple _
+ | Pexp_construct (_, Some _)
+ | Pexp_record _), _) -> true
+ | _ -> false
+ in
+ let shouldIndent = match returnExpr.pexp_desc with
+ | Pexp_sequence _
+ | Pexp_let _
+ | Pexp_letmodule _
+ | Pexp_letexception _
+ | Pexp_open _ -> false
+ | _ -> true
+ in
+ let returnDoc =
+ let doc = printExpressionWithComments returnExpr cmtTbl in
+ match Parens.expr returnExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc returnExpr braces
+ | Nothing -> doc
+ in
+ if shouldInline then Doc.concat [
+ Doc.space;
+ returnDoc;
+ ] else
+ Doc.group (
+ if shouldIndent then
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ returnDoc;
+ ]
+ )
+ else
+ Doc.concat [
+ Doc.space;
+ returnDoc
+ ]
+ )
+ in
+ let typConstraintDoc = match typConstraint with
+ | Some(typ) -> Doc.concat [Doc.text ": "; printTypExpr typ cmtTbl]
+ | _ -> Doc.nil
+ in
+ let attrs = printAttributes attrs in
+ Doc.group (
+ Doc.concat [
+ attrs;
+ parametersDoc;
+ typConstraintDoc;
+ Doc.text " =>";
+ returnExprDoc;
+ ]
+ )
+ | Pexp_try (expr, cases) ->
+ let exprDoc =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ Doc.text "try ";
+ exprDoc;
+ Doc.text " catch ";
+ printCases cases cmtTbl;
+ ]
+ | Pexp_match (expr, cases) ->
+ let exprDoc =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ Doc.text "switch ";
+ exprDoc;
+ Doc.space;
+ printCases cases cmtTbl;
+ ]
+ | Pexp_function cases ->
+ Doc.concat [
+ Doc.text "x => switch x ";
+ printCases cases cmtTbl;
+ ]
+ | Pexp_coerce (expr, typOpt, typ) ->
+ let docExpr = printExpressionWithComments expr cmtTbl in
+ let docTyp = printTypExpr typ cmtTbl in
+ let ofType = match typOpt with
+ | None -> Doc.nil
+ | Some(typ1) ->
+ Doc.concat [Doc.text ": "; printTypExpr typ1 cmtTbl]
+ in
+ Doc.concat [Doc.lparen; docExpr; ofType; Doc.text " :> "; docTyp; Doc.rparen]
+ | Pexp_send _ ->
+ Doc.text "Pexp_send not impemented in printer"
+ | Pexp_new _ ->
+ Doc.text "Pexp_new not impemented in printer"
+ | Pexp_setinstvar _ ->
+ Doc.text "Pexp_setinstvar not impemented in printer"
+ | Pexp_override _ ->
+ Doc.text "Pexp_override not impemented in printer"
+ | Pexp_poly _ ->
+ Doc.text "Pexp_poly not impemented in printer"
+ | Pexp_object _ ->
+ Doc.text "Pexp_object not impemented in printer"
+ in
+ let shouldPrintItsOwnAttributes = match e.pexp_desc with
+ | Pexp_apply _
+ | Pexp_fun _
+ | Pexp_newtype _
+ | Pexp_setfield _
+ | Pexp_ifthenelse _ -> true
+ | Pexp_construct _ when ParsetreeViewer.hasJsxAttribute e.pexp_attributes -> true
+ | _ -> false
+ in
+ match e.pexp_attributes with
+ | [] -> printedExpression
+ | attrs when not shouldPrintItsOwnAttributes ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ printedExpression;
+ ]
+ )
+ | _ -> printedExpression
+
+ and printPexpFun ~inCallback e cmtTbl =
+ let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr e in
+ let (uncurried, attrs) =
+ ParsetreeViewer.processUncurriedAttribute attrsOnArrow
+ in
+ let (returnExpr, typConstraint) = match returnExpr.pexp_desc with
+ | Pexp_constraint (expr, typ) -> (
+ {expr with pexp_attributes = List.concat [
+ expr.pexp_attributes;
+ returnExpr.pexp_attributes;
+ ]},
+ Some typ
+ )
+ | _ -> (returnExpr, None)
+ in
+ let parametersDoc = printExprFunParameters
+ ~inCallback
+ ~uncurried
+ ~hasConstraint:(match typConstraint with | Some _ -> true | None -> false)
+ parameters cmtTbl in
+ let returnShouldIndent = match returnExpr.pexp_desc with
+ | Pexp_sequence _ | Pexp_let _ | Pexp_letmodule _ | Pexp_letexception _ | Pexp_open _ -> false
+ | _ -> true
+ in
+ let returnExprDoc =
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr returnExpr in
+ let shouldInline = match (returnExpr.pexp_desc, optBraces) with
+ | (_, Some _) -> true
+ | ((Pexp_array _
+ | Pexp_tuple _
+ | Pexp_construct (_, Some _)
+ | Pexp_record _), _) -> true
+ | _ -> false
+ in
+ let returnDoc =
+ let doc = printExpressionWithComments returnExpr cmtTbl in
+ match Parens.expr returnExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc returnExpr braces
+ | Nothing -> doc
+ in
+ if shouldInline then Doc.concat [
+ Doc.space;
+ returnDoc;
+ ] else
+ Doc.group (
+ if returnShouldIndent then
+ Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ returnDoc;
+ ]
+ );
+ if inCallback then Doc.softLine else Doc.nil;
+ ]
+ else
+ Doc.concat [
+ Doc.space;
+ returnDoc;
+ ]
+ )
+ in
+ let typConstraintDoc = match typConstraint with
+ | Some(typ) -> Doc.concat [
+ Doc.text ": ";
+ printTypExpr typ cmtTbl
+ ]
+ | _ -> Doc.nil
+ in
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ parametersDoc;
+ typConstraintDoc;
+ Doc.text " =>";
+ returnExprDoc;
+ ]
+ )
+
+ and printTernaryOperand expr cmtTbl =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.ternaryOperand expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+
+ and printSetFieldExpr attrs lhs longidentLoc rhs loc cmtTbl =
+ let rhsDoc =
+ let doc = printExpressionWithComments rhs cmtTbl in
+ match Parens.setFieldExprRhs rhs with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc rhs braces
+ | Nothing -> doc
+ in
+ let lhsDoc =
+ let doc = printExpressionWithComments lhs cmtTbl in
+ match Parens.fieldExpr lhs with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc lhs braces
+ | Nothing -> doc
+ in
+ let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in
+ let doc = Doc.group (Doc.concat [
+ lhsDoc;
+ Doc.dot;
+ printLidentPath longidentLoc cmtTbl;
+ Doc.text " =";
+ if shouldIndent then Doc.group (
+ Doc.indent (
+ (Doc.concat [Doc.line; rhsDoc])
+ )
+ ) else
+ Doc.concat [Doc.space; rhsDoc]
+ ]) in
+ let doc = match attrs with
+ | [] -> doc
+ | attrs ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ doc
+ ]
+ )
+ in
+ printComments doc cmtTbl loc
+
+ and printTemplateLiteral expr cmtTbl =
+ let tag = ref "j" in
+ let rec walkExpr expr =
+ let open Parsetree in
+ match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "^"}},
+ [Nolabel, arg1; Nolabel, arg2]
+ ) ->
+ let lhs = walkExpr arg1 in
+ let rhs = walkExpr arg2 in
+ Doc.concat [lhs; rhs]
+ | Pexp_constant (Pconst_string (txt, Some prefix)) ->
+ tag := prefix;
+ Doc.text txt
+ | _ ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ Doc.concat [Doc.text "${"; doc; Doc.rbrace]
+ in
+ let content = walkExpr expr in
+ Doc.concat [
+ if !tag = "j" then Doc.nil else Doc.text !tag;
+ Doc.text "`";
+ content;
+ Doc.text "`"
+ ]
+
+ and printUnaryExpression expr cmtTbl =
+ let printUnaryOperator op = Doc.text (
+ match op with
+ | "~+" -> "+"
+ | "~+." -> "+."
+ | "~-" -> "-"
+ | "~-." -> "-."
+ | "not" -> "!"
+ | _ -> assert false
+ ) in
+ match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [Nolabel, operand]
+ ) ->
+ let printedOperand =
+ let doc = printExpressionWithComments operand cmtTbl in
+ match Parens.unaryExprOperand operand with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc operand braces
+ | Nothing -> doc
+ in
+ let doc = Doc.concat [
+ printUnaryOperator operator;
+ printedOperand;
+ ] in
+ printComments doc cmtTbl expr.pexp_loc
+ | _ -> assert false
+
+ and printBinaryExpression (expr : Parsetree.expression) cmtTbl =
+ let printBinaryOperator ~inlineRhs operator =
+ let operatorTxt = match operator with
+ | "|." -> "->"
+ | "^" -> "++"
+ | "=" -> "=="
+ | "==" -> "==="
+ | "<>" -> "!="
+ | "!=" -> "!=="
+ | txt -> txt
+ in
+ let spacingBeforeOperator =
+ if operator = "|." then Doc.softLine
+ else if operator = "|>" then Doc.line
+ else Doc.space;
+ in
+ let spacingAfterOperator =
+ if operator = "|." then Doc.nil
+ else if operator = "|>" then Doc.space
+ else if inlineRhs then Doc.space else Doc.line
+ in
+ Doc.concat [
+ spacingBeforeOperator;
+ Doc.text operatorTxt;
+ spacingAfterOperator;
+ ]
+ in
+ let printOperand ~isLhs expr parentOperator =
+ let rec flatten ~isLhs expr parentOperator =
+ if ParsetreeViewer.isBinaryExpression expr then
+ begin match expr with
+ | {pexp_desc = Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [_, left; _, right]
+ )} ->
+ if ParsetreeViewer.flattenableOperators parentOperator operator &&
+ not (ParsetreeViewer.hasAttributes expr.pexp_attributes)
+ then
+ let leftPrinted = flatten ~isLhs:true left operator in
+ let rightPrinted =
+ let (_, rightAttrs) =
+ ParsetreeViewer.partitionPrinteableAttributes right.pexp_attributes
+ in
+ let doc =
+ printExpressionWithComments
+ {right with pexp_attributes = rightAttrs}
+ cmtTbl
+ in
+ let doc = if Parens.flattenOperandRhs parentOperator right then
+ Doc.concat [Doc.lparen; doc; Doc.rparen]
+ else
+ doc
+ in
+ let printeableAttrs =
+ ParsetreeViewer.filterPrinteableAttributes right.pexp_attributes
+ in
+ Doc.concat [printAttributes printeableAttrs; doc]
+ in
+ let doc = Doc.concat [
+ leftPrinted;
+ printBinaryOperator ~inlineRhs:false operator;
+ rightPrinted;
+ ] in
+ let doc =
+ if not isLhs && (Parens.rhsBinaryExprOperand operator expr) then
+ Doc.concat [Doc.lparen; doc; Doc.rparen]
+ else doc
+ in
+ printComments doc cmtTbl expr.pexp_loc
+ else (
+ let doc = printExpressionWithComments {expr with pexp_attributes = []} cmtTbl in
+ let doc = if Parens.subBinaryExprOperand parentOperator operator ||
+ (expr.pexp_attributes <> [] &&
+ (ParsetreeViewer.isBinaryExpression expr ||
+ ParsetreeViewer.isTernaryExpr expr))
+ then
+ Doc.concat [Doc.lparen; doc; Doc.rparen]
+ else doc
+ in Doc.concat [
+ printAttributes expr.pexp_attributes;
+ doc
+ ]
+ )
+ | _ -> assert false
+ end
+ else
+ begin match expr.pexp_desc with
+ | Pexp_setfield (lhs, field, rhs) ->
+ let doc = printSetFieldExpr expr.pexp_attributes lhs field rhs expr.pexp_loc cmtTbl in
+ if isLhs then addParens doc else doc
+ | Pexp_apply(
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}},
+ [(Nolabel, lhs); (Nolabel, rhs)]
+ ) ->
+ let rhsDoc = printExpressionWithComments rhs cmtTbl in
+ let lhsDoc = printExpressionWithComments lhs cmtTbl in
+ (* TODO: unify indentation of "=" *)
+ let shouldIndent = ParsetreeViewer.isBinaryExpression rhs in
+ let doc = Doc.group (
+ Doc.concat [
+ lhsDoc;
+ Doc.text " =";
+ if shouldIndent then Doc.group (
+ Doc.indent (Doc.concat [Doc.line; rhsDoc])
+ ) else
+ Doc.concat [Doc.space; rhsDoc]
+ ]
+ ) in
+ let doc = match expr.pexp_attributes with
+ | [] -> doc
+ | attrs ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ doc
+ ]
+ )
+ in
+ if isLhs then addParens doc else doc
+ | _ ->
+ let doc = printExpressionWithComments expr cmtTbl in
+ begin match Parens.binaryExprOperand ~isLhs expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ end
+ end
+ in
+ flatten ~isLhs expr parentOperator
+ in
+ match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident (("|." | "|>") as op)}},
+ [Nolabel, lhs; Nolabel, rhs]
+ ) when not (
+ ParsetreeViewer.isBinaryExpression lhs ||
+ ParsetreeViewer.isBinaryExpression rhs
+ ) ->
+ let lhsDoc = printOperand ~isLhs:true lhs op in
+ let rhsDoc = printOperand ~isLhs:false rhs op in
+ Doc.group (
+ Doc.concat [
+ lhsDoc;
+ (match op with
+ | "|." -> Doc.text "->"
+ | "|>" -> Doc.text " |> "
+ | _ -> assert false);
+ rhsDoc;
+ ]
+ )
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident operator}},
+ [Nolabel, lhs; Nolabel, rhs]
+ ) ->
+ let right =
+ let operatorWithRhs =
+ let rhsDoc = printOperand ~isLhs:false rhs operator in
+ Doc.concat [
+ printBinaryOperator
+ ~inlineRhs:(ParsetreeViewer.shouldInlineRhsBinaryExpr rhs) operator;
+ rhsDoc;
+ ] in
+ if ParsetreeViewer.shouldIndentBinaryExpr expr then
+ Doc.group (Doc.indent operatorWithRhs)
+ else operatorWithRhs
+ in
+ let doc = Doc.group (
+ Doc.concat [
+ printOperand ~isLhs:true lhs operator;
+ right
+ ]
+ ) in
+ Doc.group (
+ Doc.concat [
+ printAttributes expr.pexp_attributes;
+ match Parens.binaryExpr {expr with
+ pexp_attributes = List.filter (fun attr ->
+ match attr with
+ | ({Location.txt = ("ns.braces")}, _) -> false
+ | _ -> true
+ ) expr.pexp_attributes
+ } with
+ | Braced(bracesLoc) -> printBraces doc expr bracesLoc
+ | Parenthesized -> addParens doc
+ | Nothing -> doc;
+ ]
+ )
+ | _ -> Doc.nil
+
+ (* callExpr(arg1, arg2) *)
+ and printPexpApply expr cmtTbl =
+ match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}},
+ [Nolabel, parentExpr; Nolabel, memberExpr]
+ ) ->
+ let parentDoc =
+ let doc = printExpressionWithComments parentExpr cmtTbl in
+ match Parens.unaryExprOperand parentExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc parentExpr braces
+ | Nothing -> doc
+ in
+ let member =
+ let memberDoc = match memberExpr.pexp_desc with
+ | Pexp_ident lident ->
+ printComments (printLongident lident.txt) cmtTbl memberExpr.pexp_loc
+ | _ -> printExpressionWithComments memberExpr cmtTbl
+ in
+ Doc.concat [Doc.text "\""; memberDoc; Doc.text "\""]
+ in
+ Doc.group (Doc.concat [
+ printAttributes expr.pexp_attributes;
+ parentDoc;
+ Doc.lbracket;
+ member;
+ Doc.rbracket;
+ ])
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "#="}},
+ [Nolabel, lhs; Nolabel, rhs]
+ ) ->
+ let rhsDoc =
+ let doc = printExpressionWithComments rhs cmtTbl in
+ match Parens.expr rhs with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc rhs braces
+ | Nothing -> doc
+ in
+ (* TODO: unify indentation of "=" *)
+ let shouldIndent = not (ParsetreeViewer.isBracedExpr rhs) && ParsetreeViewer.isBinaryExpression rhs in
+ let doc = Doc.group(
+ Doc.concat [
+ printExpressionWithComments lhs cmtTbl;
+ Doc.text " =";
+ if shouldIndent then Doc.group (
+ Doc.indent (
+ (Doc.concat [Doc.line; rhsDoc])
+ )
+ ) else
+ Doc.concat [Doc.space; rhsDoc]
+ ]
+ ) in
+ begin match expr.pexp_attributes with
+ | [] -> doc
+ | attrs ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ doc
+ ]
+ )
+ end
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "get")}},
+ [Nolabel, parentExpr; Nolabel, memberExpr]
+ ) ->
+ let member =
+ let memberDoc =
+ let doc = printExpressionWithComments memberExpr cmtTbl in
+ match Parens.expr memberExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc memberExpr braces
+ | Nothing -> doc
+ in
+ let shouldInline = match memberExpr.pexp_desc with
+ | Pexp_constant _ | Pexp_ident _ -> true
+ | _ -> false
+ in
+ if shouldInline then memberDoc else (
+ Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ memberDoc;
+ ]
+ );
+ Doc.softLine
+ ]
+ )
+ in
+ let parentDoc =
+ let doc = printExpressionWithComments parentExpr cmtTbl in
+ match Parens.unaryExprOperand parentExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc parentExpr braces
+ | Nothing -> doc
+ in
+ Doc.group (Doc.concat [
+ printAttributes expr.pexp_attributes;
+ parentDoc;
+ Doc.lbracket;
+ member;
+ Doc.rbracket;
+ ])
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Ldot (Lident "Array", "set")}},
+ [Nolabel, parentExpr; Nolabel, memberExpr; Nolabel, targetExpr]
+ ) ->
+ let member =
+ let memberDoc =
+ let doc = printExpressionWithComments memberExpr cmtTbl in
+ match Parens.expr memberExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc memberExpr braces
+ | Nothing -> doc
+ in
+ let shouldInline = match memberExpr.pexp_desc with
+ | Pexp_constant _ | Pexp_ident _ -> true
+ | _ -> false
+ in
+ if shouldInline then memberDoc else (
+ Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ memberDoc;
+ ]
+ );
+ Doc.softLine
+ ]
+ )
+ in
+ let shouldIndentTargetExpr =
+ if ParsetreeViewer.isBracedExpr targetExpr then
+ false
+ else
+ ParsetreeViewer.isBinaryExpression targetExpr ||
+ (match targetExpr with
+ | {
+ pexp_attributes = [({Location.txt="ns.ternary"}, _)];
+ pexp_desc = Pexp_ifthenelse (ifExpr, _, _)
+ } ->
+ ParsetreeViewer.isBinaryExpression ifExpr || ParsetreeViewer.hasAttributes ifExpr.pexp_attributes
+ | { pexp_desc = Pexp_newtype _} -> false
+ | e ->
+ ParsetreeViewer.hasAttributes e.pexp_attributes ||
+ ParsetreeViewer.isArrayAccess e
+ )
+ in
+ let targetExpr =
+ let doc = printExpressionWithComments targetExpr cmtTbl in
+ match Parens.expr targetExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc targetExpr braces
+ | Nothing -> doc
+ in
+ let parentDoc =
+ let doc = printExpressionWithComments parentExpr cmtTbl in
+ match Parens.unaryExprOperand parentExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc parentExpr braces
+ | Nothing -> doc
+ in
+ Doc.group (
+ Doc.concat [
+ printAttributes expr.pexp_attributes;
+ parentDoc;
+ Doc.lbracket;
+ member;
+ Doc.rbracket;
+ Doc.text " =";
+ if shouldIndentTargetExpr then
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ targetExpr;
+ ]
+ )
+ else
+ Doc.concat [
+ Doc.space;
+ targetExpr;
+ ]
+ ]
+ )
+ (* TODO: cleanup, are those branches even remotely performant? *)
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident lident},
+ args
+ ) when ParsetreeViewer.isJsxExpression expr ->
+ printJsxExpression lident args cmtTbl
+ | Pexp_apply (callExpr, args) ->
+ let args = List.map (fun (lbl, arg) ->
+ (lbl, ParsetreeViewer.rewriteUnderscoreApply arg)
+ ) args
+ in
+ let (uncurried, attrs) =
+ ParsetreeViewer.processUncurriedAttribute expr.pexp_attributes
+ in
+ let callExprDoc =
+ let doc = printExpressionWithComments callExpr cmtTbl in
+ match Parens.callExpr callExpr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc callExpr braces
+ | Nothing -> doc
+ in
+ if ParsetreeViewer.requiresSpecialCallbackPrintingFirstArg args then
+ let argsDoc =
+ printArgumentsWithCallbackInFirstPosition ~uncurried args cmtTbl
+ in
+ Doc.concat [
+ printAttributes attrs;
+ callExprDoc;
+ argsDoc;
+ ]
+ else if ParsetreeViewer.requiresSpecialCallbackPrintingLastArg args then
+ let argsDoc =
+ printArgumentsWithCallbackInLastPosition ~uncurried args cmtTbl
+ in
+ Doc.concat [
+ printAttributes attrs;
+ callExprDoc;
+ argsDoc;
+ ]
+ else
+ let argsDoc = printArguments ~uncurried args cmtTbl in
+ Doc.concat [
+ printAttributes attrs;
+ callExprDoc;
+ argsDoc;
+ ]
+ | _ -> assert false
+
+ and printJsxExpression lident args cmtTbl =
+ let name = printJsxName lident in
+ let (formattedProps, children) = printJsxProps args cmtTbl in
+ (* *)
+ let isSelfClosing = match children with | [] -> true | _ -> false in
+ Doc.group (
+ Doc.concat [
+ Doc.group (
+ Doc.concat [
+ printComments (Doc.concat [Doc.lessThan; name]) cmtTbl lident.Asttypes.loc;
+ formattedProps;
+ if isSelfClosing then Doc.concat [Doc.line; Doc.text "/>"] else Doc.nil
+ ]
+ );
+ if isSelfClosing then Doc.nil
+ else
+ Doc.concat [
+ Doc.greaterThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printJsxChildren children cmtTbl;
+ ]
+ );
+ Doc.line;
+ Doc.text "";
+ name;
+ Doc.greaterThan;
+ ]
+ ]
+ )
+
+ and printJsxFragment expr cmtTbl =
+ let opening = Doc.text "<>" in
+ let closing = Doc.text ">" in
+ let (children, _) = ParsetreeViewer.collectListExpressions expr in
+ Doc.group (
+ Doc.concat [
+ opening;
+ begin match children with
+ | [] -> Doc.nil
+ | children ->
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printJsxChildren children cmtTbl;
+ ]
+ )
+ end;
+ Doc.line;
+ closing;
+ ]
+ )
+
+ and printJsxChildren (children: Parsetree.expression list) cmtTbl =
+ Doc.group (
+ Doc.join ~sep:Doc.line (
+ List.map (fun expr ->
+ let exprDoc = printExpressionWithComments expr cmtTbl in
+ match Parens.jsxChildExpr expr with
+ | Parenthesized | Braced _ ->
+ (* {(20: int)} make sure that we also protect the expression inside *)
+ addBraces (if Parens.bracedExpr expr then addParens exprDoc else exprDoc)
+ | Nothing -> exprDoc
+ ) children
+ )
+ )
+
+ and printJsxProps args cmtTbl =
+ let rec loop props args =
+ match args with
+ | [] -> (Doc.nil, [])
+ | [
+ (Asttypes.Labelled "children", children);
+ (
+ Asttypes.Nolabel,
+ {Parsetree.pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, None)}
+ )
+ ] ->
+ let formattedProps = Doc.indent (
+ match props with
+ | [] -> Doc.nil
+ | props ->
+ Doc.concat [
+ Doc.line;
+ Doc.group (
+ Doc.join ~sep:Doc.line (props |> List.rev)
+ )
+ ]
+ ) in
+ let (children, _) = ParsetreeViewer.collectListExpressions children in
+ (formattedProps, children)
+ | arg::args ->
+ let propDoc = printJsxProp arg cmtTbl in
+ loop (propDoc::props) args
+ in
+ loop [] args
+
+ and printJsxProp arg cmtTbl =
+ match arg with
+ | (
+ (Asttypes.Labelled lblTxt | Optional lblTxt) as lbl,
+ {
+ Parsetree.pexp_attributes = [({Location.txt = "ns.namedArgLoc"; loc = argLoc}, _)];
+ pexp_desc = Pexp_ident {txt = Longident.Lident ident}
+ }
+ ) when lblTxt = ident (* jsx punning *) ->
+ begin match lbl with
+ | Nolabel -> Doc.nil
+ | Labelled _lbl ->
+ printComments (printIdentLike ident) cmtTbl argLoc
+ | Optional _lbl ->
+ let doc = Doc.concat [
+ Doc.question;
+ printIdentLike ident;
+ ] in
+ printComments doc cmtTbl argLoc
+ end
+ | (
+ (Asttypes.Labelled lblTxt | Optional lblTxt) as lbl,
+ {
+ Parsetree.pexp_attributes = [];
+ pexp_desc = Pexp_ident {txt = Longident.Lident ident}
+ }
+ ) when lblTxt = ident (* jsx punning when printing from Reason *) ->
+ begin match lbl with
+ | Nolabel -> Doc.nil
+ | Labelled _lbl -> printIdentLike ident
+ | Optional _lbl -> Doc.concat [
+ Doc.question;
+ printIdentLike ident;
+ ]
+ end
+ | (lbl, expr) ->
+ let (argLoc, expr) = match expr.pexp_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::attrs ->
+ (loc, {expr with pexp_attributes = attrs})
+ | _ ->
+ Location.none, expr
+ in
+ let lblDoc = match lbl with
+ | Asttypes.Labelled lbl ->
+ let lbl = printComments (printIdentLike lbl) cmtTbl argLoc in
+ Doc.concat [lbl; Doc.equal]
+ | Asttypes.Optional lbl ->
+ let lbl = printComments (printIdentLike lbl) cmtTbl argLoc in
+ Doc.concat [lbl; Doc.equal; Doc.question]
+ | Nolabel -> Doc.nil
+ in
+ let exprDoc =
+ let doc = printExpression expr cmtTbl in
+ match Parens.jsxPropExpr expr with
+ | Parenthesized | Braced(_) ->
+ (* {(20: int)} make sure that we also protect the expression inside *)
+ addBraces (if Parens.bracedExpr expr then addParens doc else doc)
+ | _ -> doc
+ in
+ let fullLoc = {argLoc with loc_end = expr.pexp_loc.loc_end} in
+ printComments
+ (Doc.concat [
+ lblDoc;
+ exprDoc;
+ ])
+ cmtTbl
+ fullLoc
+
+ (* div -> div.
+ * Navabar.createElement -> Navbar
+ * Staff.Users.createElement -> Staff.Users *)
+ and printJsxName {txt = lident} =
+ let rec flatten acc lident = match lident with
+ | Longident.Lident txt -> txt::acc
+ | Ldot (lident, txt) ->
+ let acc = if txt = "createElement" then acc else txt::acc in
+ flatten acc lident
+ | _ -> acc
+ in
+ match lident with
+ | Longident.Lident txt -> Doc.text txt
+ | _ as lident ->
+ let segments = flatten [] lident in
+ Doc.join ~sep:Doc.dot (List.map Doc.text segments)
+
+ and printArgumentsWithCallbackInFirstPosition ~uncurried args cmtTbl =
+ let (callback, printedArgs) = match args with
+ | (lbl, expr)::args ->
+ let lblDoc = match lbl with
+ | Asttypes.Nolabel -> Doc.nil
+ | Asttypes.Labelled txt ->
+ Doc.concat [
+ Doc.tilde; printIdentLike txt; Doc.equal;
+ ]
+ | Asttypes.Optional txt ->
+ Doc.concat [
+ Doc.tilde; printIdentLike txt; Doc.equal; Doc.question;
+ ]
+ in
+ let callback = Doc.concat [
+ lblDoc;
+ printPexpFun ~inCallback:true expr cmtTbl
+ ] in
+ let printedArgs = List.map (fun arg ->
+ printArgument arg cmtTbl
+ ) args |> Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line])
+ in
+ (callback, printedArgs)
+ | _ -> assert false
+ in
+ (* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) *)
+ (* Thing.map((arg1, arg2) => {
+ * MyModuleBlah.toList(argument)
+ * }, longArgumet, veryLooooongArgument)
+ *)
+ let fitsOnOneLine = Doc.concat [
+ if uncurried then Doc.text "(. " else Doc.lparen;
+ callback;
+ Doc.comma;
+ Doc.line;
+ printedArgs;
+ Doc.rparen;
+ ] in
+
+ (* Thing.map(
+ * (param1, parm2) => doStuff(param1, parm2),
+ * arg1,
+ * arg2,
+ * arg3,
+ * )
+ *)
+ let breakAllArgs = printArguments ~uncurried args cmtTbl in
+ Doc.customLayout [
+ fitsOnOneLine;
+ breakAllArgs;
+ ]
+
+ and printArgumentsWithCallbackInLastPosition ~uncurried args cmtTbl =
+ let rec loop acc args = match args with
+ | [] -> (Doc.nil, Doc.nil)
+ | [lbl, expr] ->
+ let lblDoc = match lbl with
+ | Asttypes.Nolabel -> Doc.nil
+ | Asttypes.Labelled txt ->
+ Doc.concat [
+ Doc.tilde; printIdentLike txt; Doc.equal;
+ ]
+ | Asttypes.Optional txt ->
+ Doc.concat [
+ Doc.tilde; printIdentLike txt; Doc.equal; Doc.question;
+ ]
+ in
+ let callback = printPexpFun ~inCallback:true expr cmtTbl in
+ (Doc.concat (List.rev acc), Doc.concat [lblDoc; callback])
+ | arg::args ->
+ let argDoc = printArgument arg cmtTbl in
+ loop (Doc.line::Doc.comma::argDoc::acc) args
+ in
+ let (printedArgs, callback) = loop [] args in
+
+ (* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) *)
+ let fitsOnOneLine = Doc.concat [
+ if uncurried then Doc.text "(." else Doc.lparen;
+ printedArgs;
+ callback;
+ Doc.rparen;
+ ] in
+
+ (* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) =>
+ * MyModuleBlah.toList(argument)
+ * )
+ *)
+ let arugmentsFitOnOneLine =
+ Doc.concat [
+ if uncurried then Doc.text "(." else Doc.lparen;
+ Doc.softLine;
+ printedArgs;
+ Doc.breakableGroup ~forceBreak:true callback;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ in
+
+ (* Thing.map(
+ * arg1,
+ * arg2,
+ * arg3,
+ * (param1, parm2) => doStuff(param1, parm2)
+ * )
+ *)
+ let breakAllArgs = printArguments ~uncurried args cmtTbl in
+ Doc.customLayout [
+ fitsOnOneLine;
+ arugmentsFitOnOneLine;
+ breakAllArgs;
+ ]
+
+ and printArguments ~uncurried (args : (Asttypes.arg_label * Parsetree.expression) list) cmtTbl =
+ match args with
+ | [Nolabel, {pexp_desc = Pexp_construct ({txt = Longident.Lident "()"}, _)}] ->
+ if uncurried then Doc.text "(.)" else Doc.text "()"
+ | [(Nolabel, arg)] when ParsetreeViewer.isHuggableExpression arg ->
+ let argDoc =
+ let doc = printExpressionWithComments arg cmtTbl in
+ match Parens.expr arg with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc arg braces
+ | Nothing -> doc
+ in
+ Doc.concat [
+ if uncurried then Doc.text "(." else Doc.lparen;
+ argDoc;
+ Doc.rparen;
+ ]
+ | args -> Doc.group (
+ Doc.concat [
+ if uncurried then Doc.text "(." else Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ if uncurried then Doc.line else Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun arg -> printArgument arg cmtTbl) args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+
+(*
+ * argument ::=
+ * | _ (* syntax sugar *)
+ * | expr
+ * | expr : type
+ * | ~ label-name
+ * | ~ label-name
+ * | ~ label-name ?
+ * | ~ label-name = expr
+ * | ~ label-name = _ (* syntax sugar *)
+ * | ~ label-name = expr : type
+ * | ~ label-name = ? expr
+ * | ~ label-name = ? _ (* syntax sugar *)
+ * | ~ label-name = ? expr : type *)
+ and printArgument (argLbl, arg) cmtTbl =
+ match (argLbl, arg) with
+ (* ~a (punned)*)
+ | (
+ (Asttypes.Labelled lbl),
+ ({pexp_desc=Pexp_ident {txt = Longident.Lident name};
+ pexp_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)])
+ } as argExpr)
+ ) when lbl = name && not (ParsetreeViewer.isBracedExpr argExpr) ->
+ let loc = match arg.pexp_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> loc
+ | _ -> arg.pexp_loc
+ in
+ let doc = Doc.concat [
+ Doc.tilde;
+ printIdentLike lbl
+ ] in
+ printComments doc cmtTbl loc
+
+ (* ~a: int (punned)*)
+ | (
+ (Asttypes.Labelled lbl),
+ {pexp_desc = Pexp_constraint (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident name}} as argExpr,
+ typ
+ );
+ pexp_loc;
+ pexp_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)]) as attrs
+ }
+ ) when lbl = name && not (ParsetreeViewer.isBracedExpr argExpr) ->
+ let loc = match attrs with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ ->
+ {loc with loc_end = pexp_loc.loc_end}
+ | _ -> arg.pexp_loc
+ in
+ let doc = Doc.concat [
+ Doc.tilde;
+ printIdentLike lbl;
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ] in
+ printComments doc cmtTbl loc
+ (* ~a? (optional lbl punned)*)
+ | (
+ (Asttypes.Optional lbl),
+ {pexp_desc=Pexp_ident {txt = Longident.Lident name};
+ pexp_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)])
+ }
+ ) when lbl = name ->
+ let loc = match arg.pexp_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ -> loc
+ | _ -> arg.pexp_loc
+ in
+ let doc = Doc.concat [
+ Doc.tilde;
+ printIdentLike lbl;
+ Doc.question;
+ ] in
+ printComments doc cmtTbl loc
+ | (_lbl, expr) ->
+ let (argLoc, expr) = match expr.pexp_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::attrs ->
+ (loc, {expr with pexp_attributes = attrs})
+ | _ ->
+ expr.pexp_loc, expr
+ in
+ let printedLbl = match argLbl with
+ | Asttypes.Nolabel -> Doc.nil
+ | Asttypes.Labelled lbl ->
+ let doc = Doc.concat [Doc.tilde; printIdentLike lbl; Doc.equal] in
+ printComments doc cmtTbl argLoc
+ | Asttypes.Optional lbl ->
+ let doc = Doc.concat [Doc.tilde; printIdentLike lbl; Doc.equal; Doc.question] in
+ printComments doc cmtTbl argLoc
+ in
+ let printedExpr =
+ let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ let loc = {argLoc with loc_end = expr.pexp_loc.loc_end} in
+ let doc = Doc.concat [
+ printedLbl;
+ printedExpr;
+ ] in
+ printComments doc cmtTbl loc
+
+ and printCases (cases: Parsetree.case list) cmtTbl =
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.concat [
+ Doc.line;
+ printList
+ ~getLoc:(fun n -> {n.Parsetree.pc_lhs.ppat_loc with
+ loc_end =
+ match ParsetreeViewer.processBracesAttr n.Parsetree.pc_rhs with
+ | (None, _) -> n.pc_rhs.pexp_loc.loc_end
+ | (Some ({loc}, _), _) -> loc.Location.loc_end
+ })
+ ~print:printCase
+ ~nodes:cases
+ cmtTbl
+ ];
+ Doc.line;
+ Doc.rbrace;
+ ]
+ )
+
+ and printCase (case: Parsetree.case) cmtTbl =
+ let rhs = match case.pc_rhs.pexp_desc with
+ | Pexp_let _
+ | Pexp_letmodule _
+ | Pexp_letexception _
+ | Pexp_open _
+ | Pexp_sequence _ ->
+ printExpressionBlock ~braces:(ParsetreeViewer.isBracedExpr case.pc_rhs) case.pc_rhs cmtTbl
+ | _ ->
+ let doc = printExpressionWithComments case.pc_rhs cmtTbl in
+ begin match Parens.expr case.pc_rhs with
+ | Parenthesized -> addParens doc
+ | _ -> doc
+ end
+
+ in
+ let guard = match case.pc_guard with
+ | None -> Doc.nil
+ | Some expr -> Doc.group (
+ Doc.concat [
+ Doc.line;
+ Doc.text "when ";
+ printExpressionWithComments expr cmtTbl;
+ ]
+ )
+ in
+ let shouldInlineRhs = match case.pc_rhs.pexp_desc with
+ | Pexp_construct ({txt = Longident.Lident ("()" | "true" | "false")}, _)
+ | Pexp_constant _
+ | Pexp_ident _ -> true
+ | _ when ParsetreeViewer.isHuggableRhs case.pc_rhs -> true
+ | _ -> false
+ in
+ let shouldIndentPattern = match case.pc_lhs.ppat_desc with
+ | Ppat_or _ -> false
+ | _ -> true
+ in
+ let patternDoc =
+ let doc = printPattern case.pc_lhs cmtTbl in
+ match case.pc_lhs.ppat_desc with
+ | Ppat_constraint _ -> addParens doc
+ | _ -> doc
+ in
+ let content = Doc.concat [
+ if shouldIndentPattern then Doc.indent patternDoc else patternDoc;
+ Doc.indent guard;
+ Doc.text " =>";
+ Doc.indent (
+ Doc.concat [
+ if shouldInlineRhs then Doc.space else Doc.line;
+ rhs;
+ ]
+ )
+ ] in
+ Doc.group (
+ Doc.concat [
+ Doc.text "| ";
+ content;
+ ]
+ )
+
+ and printExprFunParameters ~inCallback ~uncurried ~hasConstraint parameters cmtTbl =
+ match parameters with
+ (* let f = _ => () *)
+ | [ParsetreeViewer.Parameter {
+ attrs = [];
+ lbl = Asttypes.Nolabel;
+ defaultExpr = None;
+ pat = {Parsetree.ppat_desc = Ppat_any}
+ }] when not uncurried ->
+ if hasConstraint then Doc.text "(_)" else Doc.text "_"
+ (* let f = a => () *)
+ | [ParsetreeViewer.Parameter {
+ attrs = [];
+ lbl = Asttypes.Nolabel;
+ defaultExpr = None;
+ pat = {Parsetree.ppat_desc = Ppat_var stringLoc}
+ }] when not uncurried ->
+ let txtDoc =
+ let var = printIdentLike stringLoc.txt in
+ if hasConstraint then addParens var else var
+ in
+ printComments txtDoc cmtTbl stringLoc.loc
+ (* let f = () => () *)
+ | [ParsetreeViewer.Parameter {
+ attrs = [];
+ lbl = Asttypes.Nolabel;
+ defaultExpr = None;
+ pat = {ppat_desc = Ppat_construct({txt = Longident.Lident "()"}, None)}
+ }] when not uncurried ->
+ Doc.text "()"
+ (* let f = (~greeting, ~from as hometown, ~x=?) => () *)
+ | parameters ->
+ let lparen = if uncurried then Doc.text "(. " else Doc.lparen in
+ let shouldHug = ParsetreeViewer.parametersShouldHug parameters in
+ let printedParamaters = Doc.concat [
+ if shouldHug || inCallback then Doc.nil else Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; if inCallback then Doc.space else Doc.line])
+ (List.map (fun p -> printExpFunParameter p cmtTbl) parameters)
+ ] in
+ Doc.group (
+ Doc.concat [
+ lparen;
+ if shouldHug || inCallback then
+ printedParamaters
+ else Doc.indent printedParamaters;
+ if shouldHug || inCallback then
+ Doc.nil
+ else
+ Doc.concat [Doc.trailingComma; Doc.softLine];
+ Doc.rparen;
+ ]
+ )
+
+ and printExpFunParameter parameter cmtTbl =
+ match parameter with
+ | ParsetreeViewer.NewTypes {attrs; locs = lbls} ->
+ Doc.group (
+ Doc.concat [
+ printAttributes attrs;
+ Doc.text "type ";
+ Doc.join ~sep:Doc.space (List.map (fun lbl ->
+ printComments (printIdentLike lbl.Asttypes.txt) cmtTbl lbl.Asttypes.loc
+ ) lbls)
+ ]
+ )
+ | Parameter {attrs; lbl; defaultExpr; pat = pattern} ->
+ let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute attrs in
+ let uncurried = if isUncurried then Doc.concat [Doc.dot; Doc.space] else Doc.nil in
+ let attrs = printAttributes attrs in
+ (* =defaultValue *)
+ let defaultExprDoc = match defaultExpr with
+ | Some expr -> Doc.concat [
+ Doc.text "=";
+ printExpressionWithComments expr cmtTbl
+ ]
+ | None -> Doc.nil
+ in
+ (* ~from as hometown
+ * ~from -> punning *)
+ let labelWithPattern = match (lbl, pattern) with
+ | (Asttypes.Nolabel, pattern) -> printPattern pattern cmtTbl
+ | (
+ (Asttypes.Labelled lbl | Optional lbl),
+ {ppat_desc = Ppat_var stringLoc;
+ ppat_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)])
+ }
+ ) when lbl = stringLoc.txt ->
+ (* ~d *)
+ Doc.concat [
+ Doc.text "~";
+ printIdentLike lbl;
+ ]
+ | (
+ (Asttypes.Labelled lbl | Optional lbl),
+ ({ppat_desc = Ppat_constraint ({ ppat_desc = Ppat_var { txt } }, typ);
+ ppat_attributes = ([] | [({Location.txt = "ns.namedArgLoc";}, _)])
+ })
+ ) when lbl = txt ->
+ (* ~d: e *)
+ Doc.concat [
+ Doc.text "~";
+ printIdentLike lbl;
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ]
+ | ((Asttypes.Labelled lbl | Optional lbl), pattern) ->
+ (* ~b as c *)
+ Doc.concat [
+ Doc.text "~";
+ printIdentLike lbl;
+ Doc.text " as ";
+ printPattern pattern cmtTbl
+ ]
+ in
+ let optionalLabelSuffix = match (lbl, defaultExpr) with
+ | (Asttypes.Optional _, None) -> Doc.text "=?"
+ | _ -> Doc.nil
+ in
+ let doc = Doc.group (
+ Doc.concat [
+ uncurried;
+ attrs;
+ labelWithPattern;
+ defaultExprDoc;
+ optionalLabelSuffix;
+ ]
+ ) in
+ let cmtLoc = match defaultExpr with
+ | None ->
+ begin match pattern.ppat_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ ->
+ {loc with loc_end = pattern.ppat_loc.loc_end}
+ | _ -> pattern.ppat_loc
+ end
+ | Some expr ->
+ let startPos = match pattern.ppat_attributes with
+ | ({Location.txt = "ns.namedArgLoc"; loc}, _)::_ ->
+ loc.loc_start
+ | _ -> pattern.ppat_loc.loc_start
+ in {
+ pattern.ppat_loc with
+ loc_start = startPos;
+ loc_end = expr.pexp_loc.loc_end
+ }
+ in
+ printComments doc cmtTbl cmtLoc
+
+ and printExpressionBlock ~braces expr cmtTbl =
+ let rec collectRows acc expr = match expr.Parsetree.pexp_desc with
+ | Parsetree.Pexp_letmodule (modName, modExpr, expr2) ->
+ let name =
+ let doc = Doc.text modName.txt in
+ printComments doc cmtTbl modName.loc
+ in
+ let letModuleDoc = Doc.concat [
+ Doc.text "module ";
+ name;
+ Doc.text " = ";
+ printModExpr modExpr cmtTbl;
+ ] in
+ let loc = {expr.pexp_loc with loc_end = modExpr.pmod_loc.loc_end} in
+ collectRows ((loc, letModuleDoc)::acc) expr2
+ | Pexp_letexception (extensionConstructor, expr2) ->
+ let loc =
+ let loc = {expr.pexp_loc with loc_end = extensionConstructor.pext_loc.loc_end} in
+ match getFirstLeadingComment cmtTbl loc with
+ | None -> loc
+ | Some comment ->
+ let cmtLoc = Comment.loc comment in
+ {cmtLoc with loc_end = loc.loc_end}
+ in
+ let letExceptionDoc = printExceptionDef extensionConstructor cmtTbl in
+ collectRows ((loc, letExceptionDoc)::acc) expr2
+ | Pexp_open (overrideFlag, longidentLoc, expr2) ->
+ let openDoc = Doc.concat [
+ Doc.text "open";
+ printOverrideFlag overrideFlag;
+ Doc.space;
+ printLongidentLocation longidentLoc cmtTbl;
+ ] in
+ let loc = {expr.pexp_loc with loc_end = longidentLoc.loc.loc_end} in
+ collectRows ((loc, openDoc)::acc) expr2
+ | Pexp_sequence (expr1, expr2) ->
+ let exprDoc =
+ let doc = printExpression expr1 cmtTbl in
+ match Parens.expr expr1 with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr1 braces
+ | Nothing -> doc
+ in
+ let loc = expr1.pexp_loc in
+ collectRows ((loc, exprDoc)::acc) expr2
+ | Pexp_let (recFlag, valueBindings, expr2) ->
+ let loc =
+ let loc = match (valueBindings, List.rev valueBindings) with
+ | (vb::_, lastVb::_) -> {vb.pvb_loc with loc_end = lastVb.pvb_loc.loc_end}
+ | _ -> Location.none
+ in
+ match getFirstLeadingComment cmtTbl loc with
+ | None -> loc
+ | Some comment ->
+ let cmtLoc = Comment.loc comment in
+ {cmtLoc with loc_end = loc.loc_end}
+ in
+ let recFlag = match recFlag with
+ | Asttypes.Nonrecursive -> Doc.nil
+ | Asttypes.Recursive -> Doc.text "rec "
+ in
+ let letDoc = printValueBindings ~recFlag valueBindings cmtTbl in
+ (* let () = {
+ * let () = foo()
+ * ()
+ * }
+ * We don't need to print the () on the last line of the block
+ *)
+ begin match expr2.pexp_desc with
+ | Pexp_construct ({txt = Longident.Lident "()"}, _) ->
+ List.rev ((loc, letDoc)::acc)
+ | _ ->
+ collectRows ((loc, letDoc)::acc) expr2
+ end
+ | _ ->
+ let exprDoc =
+ let doc = printExpression expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc
+ in
+ List.rev ((expr.pexp_loc, exprDoc)::acc)
+ in
+ let rows = collectRows [] expr in
+ let block =
+ printList
+ ~getLoc:fst
+ ~nodes:rows
+ ~print:(fun (_, doc) _ -> doc)
+ ~forceBreak:true
+ cmtTbl
+ in
+ Doc.breakableGroup ~forceBreak:true (
+ if braces then
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ block;
+ ]
+ );
+ Doc.line;
+ Doc.rbrace;
+ ]
+ else block
+ )
+
+ (*
+ * // user types:
+ * let f = (a, b) => { a + b }
+ *
+ * // printer: everything is on one line
+ * let f = (a, b) => { a + b }
+ *
+ * // user types: over multiple lines
+ * let f = (a, b) => {
+ * a + b
+ * }
+ *
+ * // printer: over multiple lines
+ * let f = (a, b) => {
+ * a + b
+ * }
+ *)
+ and printBraces doc expr bracesLoc =
+ let overMultipleLines =
+ let open Location in
+ bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum
+ in
+ match expr.Parsetree.pexp_desc with
+ | Pexp_letmodule _
+ | Pexp_letexception _
+ | Pexp_let _
+ | Pexp_open _
+ | Pexp_sequence _ ->
+ (* already has braces *)
+ doc
+ | _ ->
+ Doc.breakableGroup ~forceBreak:overMultipleLines (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ if Parens.bracedExpr expr then addParens doc else doc;
+ ]
+ );
+ Doc.softLine;
+ Doc.rbrace;
+ ]
+ )
+
+ and printOverrideFlag overrideFlag = match overrideFlag with
+ | Asttypes.Override -> Doc.text "!"
+ | Fresh -> Doc.nil
+
+ and printDirectionFlag flag = match flag with
+ | Asttypes.Downto -> Doc.text " downto "
+ | Asttypes.Upto -> Doc.text " to "
+
+ and printRecordRow (lbl, expr) cmtTbl =
+ let cmtLoc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in
+ let doc = Doc.group (Doc.concat [
+ printLidentPath lbl cmtTbl;
+ Doc.text ": ";
+ (let doc = printExpressionWithComments expr cmtTbl in
+ match Parens.expr expr with
+ | Parens.Parenthesized -> addParens doc
+ | Braced braces -> printBraces doc expr braces
+ | Nothing -> doc);
+ ]) in
+ printComments doc cmtTbl cmtLoc
+
+ and printBsObjectRow (lbl, expr) cmtTbl =
+ let cmtLoc = {lbl.loc with loc_end = expr.pexp_loc.loc_end} in
+ let lblDoc =
+ let doc = Doc.concat [
+ Doc.text "\"";
+ printLongident lbl.txt;
+ Doc.text "\"";
+ ] in
+ printComments doc cmtTbl lbl.loc
+ in
+ let doc = Doc.concat [
+ lblDoc;
+ Doc.text ": ";
+ printExpressionWithComments expr cmtTbl
+ ] in
+ printComments doc cmtTbl cmtLoc
+
+ (* The optional loc indicates whether we need to print the attributes in
+ * relation to some location. In practise this means the following:
+ * `@attr type t = string` -> on the same line, print on the same line
+ * `@attr
+ * type t = string` -> attr is on prev line, print the attributes
+ * with a line break between, we respect the users' original layout *)
+ and printAttributes ?loc (attrs: Parsetree.attributes) =
+ match ParsetreeViewer.filterParsingAttrs attrs with
+ | [] -> Doc.nil
+ | attrs ->
+ let lineBreak = match loc with
+ | None -> Doc.line
+ | Some loc -> begin match List.rev attrs with
+ | ({loc = firstLoc}, _)::_ when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum ->
+ Doc.hardLine;
+ | _ -> Doc.line
+ end
+ in
+ Doc.concat [
+ Doc.group (Doc.join ~sep:Doc.line (List.map printAttribute attrs));
+ lineBreak;
+ ]
+
+ and printAttribute ((id, payload) : Parsetree.attribute) =
+ let attrName = Doc.concat [
+ Doc.text "@";
+ Doc.text id.txt
+ ] in
+ match payload with
+ | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] ->
+ let exprDoc = printExpression expr CommentTable.empty in
+ let needsParens = match attrs with | [] -> false | _ -> true in
+ Doc.group (
+ Doc.concat [
+ attrName;
+ addParens (
+ Doc.concat [
+ printAttributes attrs;
+ if needsParens then addParens exprDoc else exprDoc;
+ ]
+ )
+ ]
+ )
+ | PTyp typ ->
+ Doc.group (
+ Doc.concat [
+ attrName;
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.text ": ";
+ printTypExpr typ CommentTable.empty;
+ ]
+ );
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ | _ -> attrName
+
+ and printAttributeWithComments ((id, payload) : Parsetree.attribute) cmtTbl =
+ let attrName = Doc.text ("@" ^ id.txt) in
+ match payload with
+ | PStr [{pstr_desc = Pstr_eval (expr, attrs)}] ->
+ let exprDoc = printExpressionWithComments expr cmtTbl in
+ let needsParens = match attrs with | [] -> false | _ -> true in
+ Doc.group (
+ Doc.concat [
+ attrName;
+ addParens (
+ Doc.concat [
+ printAttributes attrs;
+ if needsParens then addParens exprDoc else exprDoc;
+ ]
+ )
+ ]
+ )
+ | _ -> attrName
+
+ and printModExpr modExpr cmtTbl =
+ let doc = match modExpr.pmod_desc with
+ | Pmod_ident longidentLoc ->
+ printLongidentLocation longidentLoc cmtTbl
+ | Pmod_structure structure ->
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ printStructure structure cmtTbl;
+ ];
+ );
+ Doc.softLine;
+ Doc.rbrace;
+ ]
+ )
+ | Pmod_unpack expr ->
+ let shouldHug = match expr.pexp_desc with
+ | Pexp_let _ -> true
+ | Pexp_constraint (
+ {pexp_desc = Pexp_let _ },
+ {ptyp_desc = Ptyp_package _packageType}
+ ) -> true
+ | _ -> false
+ in
+ let (expr, moduleConstraint) = match expr.pexp_desc with
+ | Pexp_constraint (
+ expr,
+ {ptyp_desc = Ptyp_package packageType; ptyp_loc}
+ ) ->
+ let packageDoc =
+ let doc = printPackageType ~printModuleKeywordAndParens:false packageType cmtTbl in
+ printComments doc cmtTbl ptyp_loc
+ in
+ let typeDoc = Doc.group (Doc.concat [
+ Doc.text ":";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ packageDoc
+ ]
+ )
+ ]) in
+ (expr, typeDoc)
+ | _ -> (expr, Doc.nil)
+ in
+ let unpackDoc = Doc.group(Doc.concat [
+ printExpressionWithComments expr cmtTbl;
+ moduleConstraint;
+ ]) in
+ Doc.group (
+ Doc.concat [
+ Doc.text "unpack(";
+ if shouldHug then unpackDoc
+ else
+ Doc.concat [
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ unpackDoc;
+ ]
+ );
+ Doc.softLine;
+ ];
+ Doc.rparen;
+ ]
+ )
+ | Pmod_extension extension ->
+ printExtensionWithComments ~atModuleLvl:false extension cmtTbl
+ | Pmod_apply _ ->
+ let (args, callExpr) = ParsetreeViewer.modExprApply modExpr in
+ let isUnitSugar = match args with
+ | [{pmod_desc = Pmod_structure []}] -> true
+ | _ -> false
+ in
+ let shouldHug = match args with
+ | [{pmod_desc = Pmod_structure _}] -> true
+ | _ -> false
+ in
+ Doc.group (
+ Doc.concat [
+ printModExpr callExpr cmtTbl;
+ if isUnitSugar then
+ printModApplyArg (List.hd args [@doesNotRaise]) cmtTbl
+ else
+ Doc.concat [
+ Doc.lparen;
+ if shouldHug then
+ printModApplyArg (List.hd args [@doesNotRaise]) cmtTbl
+ else
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun modArg -> printModApplyArg modArg cmtTbl) args
+ )
+ ]
+ );
+ if not shouldHug then
+ Doc.concat [
+ Doc.trailingComma;
+ Doc.softLine;
+ ]
+ else Doc.nil;
+ Doc.rparen;
+ ]
+ ]
+ )
+ | Pmod_constraint (modExpr, modType) ->
+ Doc.concat [
+ printModExpr modExpr cmtTbl;
+ Doc.text ": ";
+ printModType modType cmtTbl;
+ ]
+ | Pmod_functor _ ->
+ printModFunctor modExpr cmtTbl
+ in
+ printComments doc cmtTbl modExpr.pmod_loc
+
+ and printModFunctor modExpr cmtTbl =
+ let (parameters, returnModExpr) = ParsetreeViewer.modExprFunctor modExpr in
+ (* let shouldInline = match returnModExpr.pmod_desc with *)
+ (* | Pmod_structure _ | Pmod_ident _ -> true *)
+ (* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true *)
+ (* | _ -> false *)
+ (* in *)
+ let (returnConstraint, returnModExpr) = match returnModExpr.pmod_desc with
+ | Pmod_constraint (modExpr, modType) ->
+ let constraintDoc =
+ let doc = printModType modType cmtTbl in
+ if Parens.modExprFunctorConstraint modType then addParens doc else doc
+ in
+ let modConstraint = Doc.concat [
+ Doc.text ": ";
+ constraintDoc;
+ ] in
+ (modConstraint, printModExpr modExpr cmtTbl)
+ | _ -> (Doc.nil, printModExpr returnModExpr cmtTbl)
+ in
+ let parametersDoc = match parameters with
+ | [(attrs, {txt = "*"}, None)] ->
+ let attrs = match attrs with
+ | [] -> Doc.nil
+ | attrs -> Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs);
+ Doc.line;
+ ] in
+ Doc.group (Doc.concat [
+ attrs;
+ Doc.text "()"
+ ])
+ | [([], {txt = lbl}, None)] -> Doc.text lbl
+ | parameters ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun param -> printModFunctorParam param cmtTbl) parameters
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ in
+ Doc.group (
+ Doc.concat [
+ parametersDoc;
+ returnConstraint;
+ Doc.text " => ";
+ returnModExpr
+ ]
+ )
+
+ and printModFunctorParam (attrs, lbl, optModType) cmtTbl =
+ let cmtLoc = match optModType with
+ | None -> lbl.Asttypes.loc
+ | Some modType -> {lbl.loc with loc_end =
+ modType.Parsetree.pmty_loc.loc_end
+ }
+ in
+ let attrs = match attrs with
+ | [] -> Doc.nil
+ | attrs -> Doc.concat [
+ Doc.join ~sep:Doc.line (List.map printAttribute attrs);
+ Doc.line;
+ ] in
+ let lblDoc =
+ let doc = Doc.text lbl.txt in
+ printComments doc cmtTbl lbl.loc
+ in
+ let doc = Doc.group (
+ Doc.concat [
+ attrs;
+ lblDoc;
+ (match optModType with
+ | None -> Doc.nil
+ | Some modType ->
+ Doc.concat [
+ Doc.text ": ";
+ printModType modType cmtTbl
+ ]);
+ ]
+ ) in
+ printComments doc cmtTbl cmtLoc
+
+ and printModApplyArg modExpr cmtTbl =
+ match modExpr.pmod_desc with
+ | Pmod_structure [] -> Doc.text "()"
+ | _ -> printModExpr modExpr cmtTbl
+
+
+ and printExceptionDef (constr : Parsetree.extension_constructor) cmtTbl =
+ let kind = match constr.pext_kind with
+ | Pext_rebind longident -> Doc.indent (
+ Doc.concat [
+ Doc.text " =";
+ Doc.line;
+ printLongidentLocation longident cmtTbl;
+ ]
+ )
+ | Pext_decl (Pcstr_tuple [], None) -> Doc.nil
+ | Pext_decl (args, gadt) ->
+ let gadtDoc = match gadt with
+ | Some typ -> Doc.concat [
+ Doc.text ": ";
+ printTypExpr typ cmtTbl
+ ]
+ | None -> Doc.nil
+ in
+ Doc.concat [
+ printConstructorArguments ~indent:false args cmtTbl;
+ gadtDoc
+ ]
+ in
+ let name =
+ printComments
+ (Doc.text constr.pext_name.txt)
+ cmtTbl
+ constr.pext_name.loc
+ in
+ let doc = Doc.group (
+ Doc.concat [
+ printAttributes constr.pext_attributes;
+ Doc.text "exception ";
+ name;
+ kind
+ ]
+ ) in
+ printComments doc cmtTbl constr.pext_loc
+
+ and printExtensionConstructor (constr : Parsetree.extension_constructor) cmtTbl i =
+ let attrs = printAttributes constr.pext_attributes in
+ let bar = if i > 0 then Doc.text "| "
+ else Doc.ifBreaks (Doc.text "| ") Doc.nil
+ in
+ let kind = match constr.pext_kind with
+ | Pext_rebind longident -> Doc.indent (
+ Doc.concat [
+ Doc.text " =";
+ Doc.line;
+ printLongidentLocation longident cmtTbl;
+ ]
+ )
+ | Pext_decl (Pcstr_tuple [], None) -> Doc.nil
+ | Pext_decl (args, gadt) ->
+ let gadtDoc = match gadt with
+ | Some typ -> Doc.concat [
+ Doc.text ": ";
+ printTypExpr typ cmtTbl;
+ ]
+ | None -> Doc.nil
+ in
+ Doc.concat [
+ printConstructorArguments ~indent:false args cmtTbl;
+ gadtDoc
+ ]
+ in
+ let name =
+ printComments (Doc.text constr.pext_name.txt) cmtTbl constr.pext_name.loc
+ in
+ Doc.concat [
+ bar;
+ Doc.group (
+ Doc.concat [
+ attrs;
+ name;
+ kind;
+ ]
+ )
+ ]
+
+ let printImplementation ~width (s: Parsetree.structure) comments =
+ let cmtTbl = CommentTable.make () in
+ CommentTable.walkStructure s cmtTbl comments;
+ (* CommentTable.log cmtTbl; *)
+ let doc = printStructure s cmtTbl in
+ (* Doc.debug doc; *)
+ let stringDoc = Doc.toString ~width doc in
+ print_string stringDoc
+
+ let printInterface ~width (s: Parsetree.signature) comments =
+ let cmtTbl = CommentTable.make () in
+ CommentTable.walkSignature s cmtTbl comments;
+ let stringDoc = Doc.toString ~width (printSignature s cmtTbl) in
+ print_string stringDoc
+
+end
+
+module Scanner = struct
+ type mode = Template | Jsx | Diamond
+
+ type t = {
+ filename: string;
+ src: bytes;
+ mutable err:
+ startPos: Lexing.position
+ -> endPos: Lexing.position
+ -> Diagnostics.category
+ -> unit;
+ mutable ch: int; (* current character *)
+ mutable offset: int; (* character offset *)
+ mutable rdOffset: int; (* reading offset (position after current character) *)
+ mutable lineOffset: int; (* current line offset *)
+ mutable lnum: int; (* current line number *)
+ mutable mode: mode list;
+ }
+
+ let setDiamondMode scanner =
+ scanner.mode <- Diamond::scanner.mode
+
+ let setTemplateMode scanner =
+ scanner.mode <- Template::scanner.mode
+
+ let setJsxMode scanner =
+ scanner.mode <- Jsx::scanner.mode
+
+ let popMode scanner mode =
+ match scanner.mode with
+ | m::ms when m = mode ->
+ scanner.mode <- ms
+ | _ -> ()
+
+ let inDiamondMode scanner = match scanner.mode with
+ | Diamond::_ -> true
+ | _ -> false
+
+ let inJsxMode scanner = match scanner.mode with
+ | Jsx::_ -> true
+ | _ -> false
+
+ let inTemplateMode scanner = match scanner.mode with
+ | Template::_ -> true
+ | _ -> false
+
+ let position scanner = Lexing.{
+ pos_fname = scanner.filename;
+ (* line number *)
+ pos_lnum = scanner.lnum;
+ (* offset of the beginning of the line (number
+ of characters between the beginning of the scanner and the beginning
+ of the line) *)
+ pos_bol = scanner.lineOffset;
+ (* [pos_cnum] is the offset of the position (number of
+ characters between the beginning of the scanner and the position). *)
+ pos_cnum = scanner.offset;
+ }
+
+ let next scanner =
+ if scanner.rdOffset < (Bytes.length scanner.src) then (
+ scanner.offset <- scanner.rdOffset;
+ let ch = (Bytes.get [@doesNotRaise]) scanner.src scanner.rdOffset in
+ scanner.rdOffset <- scanner.rdOffset + 1;
+ scanner.ch <- int_of_char ch
+ ) else (
+ scanner.offset <- Bytes.length scanner.src;
+ scanner.ch <- -1
+ )
+
+ let peek scanner =
+ if scanner.rdOffset < (Bytes.length scanner.src) then
+ int_of_char (Bytes.unsafe_get scanner.src scanner.rdOffset)
+ else
+ -1
+
+ let make b filename =
+ let scanner = {
+ filename;
+ src = b;
+ err = (fun ~startPos:_ ~endPos:_ _ -> ());
+ ch = CharacterCodes.space;
+ offset = 0;
+ rdOffset = 0;
+ lineOffset = 0;
+ lnum = 1;
+ mode = [];
+ } in
+ next scanner;
+ scanner
+
+ let skipWhitespace scanner =
+ let rec scan () =
+ if scanner.ch == CharacterCodes.space || scanner.ch == CharacterCodes.tab then (
+ next scanner;
+ scan()
+ ) else if CharacterCodes.isLineBreak scanner.ch then (
+ scanner.lineOffset <- scanner.offset + 1;
+ scanner.lnum <- scanner.lnum + 1;
+ next scanner;
+ scan()
+ ) else (
+ ()
+ )
+ in
+ scan()
+
+ let scanIdentifier scanner =
+ let startOff = scanner.offset in
+ while (
+ CharacterCodes.isLetter scanner.ch ||
+ CharacterCodes.isDigit scanner.ch ||
+ CharacterCodes.underscore == scanner.ch ||
+ CharacterCodes.singleQuote == scanner.ch
+ ) do
+ next scanner
+ done;
+ let str = Bytes.sub_string scanner.src startOff (scanner.offset - startOff) in
+ Token.lookupKeyword str
+
+ let scanDigits scanner ~base =
+ if base <= 10 then (
+ while CharacterCodes.isDigit scanner.ch || scanner.ch == CharacterCodes.underscore do
+ next scanner
+ done;
+ ) else (
+ while CharacterCodes.isHex scanner.ch || scanner.ch == CharacterCodes.underscore do
+ next scanner
+ done;
+ )
+
+ (* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }] *)
+ let scanNumber scanner =
+ let startOff = scanner.offset in
+
+ (* integer part *)
+ let base, _prefix = if scanner.ch != CharacterCodes.dot then (
+ if scanner.ch == CharacterCodes._0 then (
+ next scanner;
+ let ch = CharacterCodes.lower scanner.ch in
+ if ch == CharacterCodes.Lower.x then (
+ next scanner;
+ 16, 'x'
+ ) else if ch == CharacterCodes.Lower.o then (
+ next scanner;
+ 8, 'o'
+ ) else if ch == CharacterCodes.Lower.b then (
+ next scanner;
+ 2, 'b'
+ ) else (
+ 8, '0'
+ )
+ ) else (
+ 10, ' '
+ )
+ ) else (10, ' ')
+ in
+ scanDigits scanner ~base;
+
+ (* *)
+ let isFloat = if CharacterCodes.dot == scanner.ch then (
+ next scanner;
+ scanDigits scanner ~base;
+ true
+ ) else (
+ false
+ ) in
+
+ (* exponent part *)
+ let isFloat =
+ if let exp = CharacterCodes.lower scanner.ch in
+ exp == CharacterCodes.Lower.e || exp == CharacterCodes.Lower.p
+ then (
+ next scanner;
+ if scanner.ch == CharacterCodes.plus || scanner.ch == CharacterCodes.minus then
+ next scanner;
+ scanDigits scanner ~base;
+ true
+ ) else
+ isFloat
+ in
+ let literal =
+ Bytes.sub_string scanner.src startOff (scanner.offset - startOff)
+ in
+
+ (* suffix *)
+ let suffix =
+ if scanner.ch >= CharacterCodes.Lower.g && scanner.ch <= CharacterCodes.Lower.z
+ || scanner.ch >= CharacterCodes.Upper.g && scanner.ch <= CharacterCodes.Upper.z
+ then (
+ let ch = scanner.ch in
+ next scanner;
+ Some (Char.unsafe_chr ch)
+ ) else
+ None
+ in
+ if isFloat then
+ Token.Float {f = literal; suffix}
+ else
+ Token.Int {i = literal; suffix}
+
+ let scanExoticIdentifier scanner =
+ next scanner;
+ let buffer = Buffer.create 20 in
+ let startPos = position scanner in
+
+ let rec scan () =
+ if scanner.ch == CharacterCodes.eof then
+ let endPos = position scanner in
+ scanner.err ~startPos ~endPos (Diagnostics.message "Did you forget a \" here?")
+ else if scanner.ch == CharacterCodes.doubleQuote then (
+ next scanner
+ ) else if CharacterCodes.isLineBreak scanner.ch then (
+ scanner.lineOffset <- scanner.offset + 1;
+ scanner.lnum <- scanner.lnum + 1;
+ let endPos = position scanner in
+ scanner.err ~startPos ~endPos (Diagnostics.message "Did you forget a \" here?");
+ next scanner
+ ) else (
+ Buffer.add_char buffer ((Char.chr [@doesNotRaise]) scanner.ch);
+ next scanner;
+ scan()
+ )
+ in
+ scan();
+ Token.Lident (Buffer.contents buffer)
+
+ let scanStringEscapeSequence ~startPos scanner =
+ (* \ already consumed *)
+ if CharacterCodes.Lower.n == scanner.ch
+ || CharacterCodes.Lower.t == scanner.ch
+ || CharacterCodes.Lower.b == scanner.ch
+ || CharacterCodes.Lower.r == scanner.ch
+ || CharacterCodes.backslash == scanner.ch
+ || CharacterCodes.space == scanner.ch
+ || CharacterCodes.singleQuote == scanner.ch
+ || CharacterCodes.doubleQuote == scanner.ch
+ then
+ next scanner
+ else
+ let (n, base, max) =
+ if CharacterCodes.isDigit scanner.ch then
+ (* decimal *)
+ (3, 10, 255)
+ else if scanner.ch == CharacterCodes.Lower.o then
+ (* octal *)
+ let () = next scanner in
+ (3, 8, 255)
+ else if scanner.ch == CharacterCodes.Lower.x then
+ (* hex *)
+ let () = next scanner in
+ (2, 16, 255)
+ else
+ (* unknown escape sequence
+ * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat *)
+ (* let pos = position scanner in *)
+ (* let () = *)
+ (* let msg = if scanner.ch == -1 then *)
+ (* "unclosed escape sequence" *)
+ (* else "unknown escape sequence" *)
+ (* in *)
+ (* scanner.err ~startPos ~endPos:pos (Diagnostics.message msg) *)
+ (* in *)
+ (-1, -1, -1)
+ in
+ if n < 0 then ()
+ else
+ let rec while_ n x =
+ if n == 0 then x
+ else
+ let d = CharacterCodes.digitValue scanner.ch in
+ if d >= base then
+ let pos = position scanner in
+ let msg = if scanner.ch == -1 then
+ "unclosed escape sequence"
+ else "unknown escape sequence"
+ in
+ scanner.err ~startPos ~endPos:pos (Diagnostics.message msg);
+ -1
+ else
+ let () = next scanner in
+ while_ (n - 1) (x * base + d)
+ in
+ let x = while_ n 0 in
+ if x > max then
+ let pos = position scanner in
+ let msg = "invalid escape sequence (value too high)" in
+ scanner.err ~startPos ~endPos:pos (Diagnostics.message msg);
+ ()
+
+ let scanString scanner =
+ let offs = scanner.offset in
+
+ let startPos = position scanner in
+ let rec scan () =
+ if scanner.ch == CharacterCodes.eof then
+ let endPos = position scanner in
+ scanner.err ~startPos ~endPos Diagnostics.unclosedString
+ else if scanner.ch == CharacterCodes.doubleQuote then (
+ next scanner;
+ ) else if scanner.ch == CharacterCodes.backslash then (
+ let startPos = position scanner in
+ next scanner;
+ scanStringEscapeSequence ~startPos scanner;
+ scan ()
+ ) else if CharacterCodes.isLineBreak scanner.ch then (
+ scanner.lineOffset <- scanner.offset + 1;
+ scanner.lnum <- scanner.lnum + 1;
+ next scanner;
+ scan ()
+ ) else (
+ next scanner;
+ scan ()
+ )
+ in
+ scan ();
+ Token.String (Bytes.sub_string scanner.src offs (scanner.offset - offs - 1))
+
+ (* I wonder if this gets inlined *)
+ let convertNumber scanner ~n ~base =
+ let x = ref 0 in
+ for _ = n downto 1 do
+ let d = CharacterCodes.digitValue scanner.ch in
+ x := (!x * base) + d;
+ next scanner
+ done;
+ !x
+
+ let scanEscape scanner =
+ (* let offset = scanner.offset in *)
+ let c = match scanner.ch with
+ | 98 (* b *) -> next scanner; '\008'
+ | 110 (* n *) -> next scanner; '\010'
+ | 114 (* r *) -> next scanner; '\013'
+ | 116 (* t *) -> next scanner; '\009'
+ | ch when CharacterCodes.isDigit ch ->
+ let x = convertNumber scanner ~n:3 ~base:10 in
+ (Char.chr [@doesNotRaise]) x
+ | ch when ch == CharacterCodes.Lower.x ->
+ next scanner;
+ let x = convertNumber scanner ~n:2 ~base:16 in
+ (Char.chr [@doesNotRaise]) x
+ | ch when ch == CharacterCodes.Lower.o ->
+ next scanner;
+ let x = convertNumber scanner ~n:3 ~base:8 in
+ (Char.chr [@doesNotRaise]) x
+ | ch ->
+ next scanner;
+ (Char.chr [@doesNotRaise]) ch
+ in
+ next scanner; (* Consume \' *)
+ Token.Character c
+
+ let scanSingleLineComment scanner =
+ let startOff = scanner.offset in
+ let startPos = position scanner in
+ while not (CharacterCodes.isLineBreak scanner.ch) && scanner.ch >= 0 do
+ next scanner
+ done;
+ let endPos = position scanner in
+ Token.Comment (
+ Comment.makeSingleLineComment
+ ~loc:(Location.{loc_start = startPos; loc_end = endPos; loc_ghost = false})
+ (Bytes.sub_string scanner.src startOff (scanner.offset - startOff))
+ )
+
+ let scanMultiLineComment scanner =
+ let startOff = scanner.offset in
+ let startPos = position scanner in
+ let rec scan ~depth () =
+ if scanner.ch == CharacterCodes.asterisk &&
+ peek scanner == CharacterCodes.forwardslash then (
+ next scanner;
+ next scanner;
+ if depth > 0 then scan ~depth:(depth - 1) () else ()
+ ) else if scanner.ch == CharacterCodes.eof then (
+ let endPos = position scanner in
+ scanner.err ~startPos ~endPos Diagnostics.unclosedComment
+ ) else if scanner.ch == CharacterCodes.forwardslash
+ && peek scanner == CharacterCodes. asterisk then (
+ next scanner;
+ next scanner;
+ scan ~depth:(depth + 1) ()
+ ) else (
+ if CharacterCodes.isLineBreak scanner.ch then (
+ scanner.lineOffset <- scanner.offset + 1;
+ scanner.lnum <- scanner.lnum + 1;
+ );
+ next scanner;
+ scan ~depth ()
+ )
+ in
+ scan ~depth:0 ();
+ Token.Comment (
+ Comment.makeMultiLineComment
+ ~loc:(Location.{loc_start = startPos; loc_end = (position scanner); loc_ghost = false})
+ (Bytes.sub_string scanner.src startOff (scanner.offset - 2 - startOff))
+ )
+
+ let scanTemplate scanner =
+ let startOff = scanner.offset in
+ let startPos = position scanner in
+
+ let rec scan () =
+ if scanner.ch == CharacterCodes.eof then (
+ let endPos = position scanner in
+ scanner.err ~startPos ~endPos Diagnostics.unclosedTemplate;
+ popMode scanner Template;
+ Token.TemplateTail(
+ Bytes.sub_string scanner.src startOff (scanner.offset - 2 - startOff)
+ )
+ )
+ else if scanner.ch == CharacterCodes.backslash then (
+ next scanner;
+ if scanner.ch == CharacterCodes.backtick
+ || scanner.ch == CharacterCodes.backslash
+ || scanner.ch == CharacterCodes.dollar
+ then next scanner;
+ scan()
+ ) else if scanner.ch == CharacterCodes.backtick then (
+ next scanner;
+ let contents =
+ Bytes.sub_string scanner.src startOff (scanner.offset - 1 - startOff)
+ in
+ popMode scanner Template;
+ Token.TemplateTail contents
+ ) else if scanner.ch == CharacterCodes.dollar &&
+ peek scanner == CharacterCodes.lbrace
+ then (
+ next scanner; (* consume $ *)
+ next scanner; (* consume { *)
+ let contents =
+ Bytes.sub_string scanner.src startOff (scanner.offset - 2 - startOff)
+ in
+ popMode scanner Template;
+ Token.TemplatePart contents
+ ) else (
+ if CharacterCodes.isLineBreak scanner.ch then (
+ scanner.lineOffset <- scanner.offset + 1;
+ scanner.lnum <- scanner.lnum + 1;
+ );
+ next scanner;
+ scan()
+ )
+ in
+ scan()
+
+ let rec scan scanner =
+ if not (inTemplateMode scanner) then skipWhitespace scanner;
+ let startPos = position scanner in
+ let ch = scanner.ch in
+ let token = if inTemplateMode scanner then
+ scanTemplate scanner
+ else if ch == CharacterCodes.underscore then (
+ let nextCh = peek scanner in
+ if nextCh == CharacterCodes.underscore || CharacterCodes.isDigit nextCh || CharacterCodes.isLetter nextCh then
+ scanIdentifier scanner
+ else (
+ next scanner;
+ Token.Underscore
+ )
+ ) else if CharacterCodes.isLetter ch then
+ scanIdentifier scanner
+ else if CharacterCodes.isDigit ch then
+ scanNumber scanner
+ else begin
+ next scanner;
+ if ch == CharacterCodes.dot then
+ if scanner.ch == CharacterCodes.dot then (
+ next scanner;
+ if scanner.ch == CharacterCodes.dot then (
+ next scanner;
+ Token.DotDotDot
+ ) else (
+ Token.DotDot
+ )
+ ) else (
+ Token.Dot
+ )
+ else if ch == CharacterCodes.doubleQuote then
+ scanString scanner
+ else if ch == CharacterCodes.singleQuote then (
+ if scanner.ch == CharacterCodes.backslash
+ && not ((peek scanner) == CharacterCodes.doubleQuote) (* start of exotic ident *)
+ then (
+ next scanner;
+ scanEscape scanner
+ ) else if (peek scanner) == CharacterCodes.singleQuote then (
+ let ch = scanner.ch in
+ next scanner;
+ next scanner;
+ Token.Character ((Char.chr [@doesNotRaise]) ch)
+ ) else (
+ SingleQuote
+ )
+ ) else if ch == CharacterCodes.bang then
+ if scanner.ch == CharacterCodes.equal then (
+ next scanner;
+ if scanner.ch == CharacterCodes.equal then (
+ next scanner;
+ Token.BangEqualEqual
+ ) else (
+ Token.BangEqual
+ )
+ ) else (
+ Token.Bang
+ )
+ else if ch == CharacterCodes.semicolon then
+ Token.Semicolon
+ else if ch == CharacterCodes.equal then (
+ if scanner.ch == CharacterCodes.greaterThan then (
+ next scanner;
+ Token.EqualGreater
+ ) else if scanner.ch == CharacterCodes.equal then (
+ next scanner;
+ if scanner.ch == CharacterCodes.equal then (
+ next scanner;
+ Token.EqualEqualEqual
+ ) else (
+ Token.EqualEqual
+ )
+ ) else (
+ Token.Equal
+ )
+ ) else if ch == CharacterCodes.bar then
+ if scanner.ch == CharacterCodes.bar then (
+ next scanner;
+ Token.Lor
+ ) else if scanner.ch == CharacterCodes.greaterThan then (
+ next scanner;
+ Token.BarGreater
+ ) else (
+ Token.Bar
+ )
+ else if ch == CharacterCodes.ampersand then
+ if scanner.ch == CharacterCodes.ampersand then (
+ next scanner;
+ Token.Land
+ ) else (
+ Token.Band
+ )
+ else if ch == CharacterCodes.lparen then
+ Token.Lparen
+ else if ch == CharacterCodes.rparen then
+ Token.Rparen
+ else if ch == CharacterCodes.lbracket then
+ Token.Lbracket
+ else if ch == CharacterCodes.rbracket then
+ Token.Rbracket
+ else if ch == CharacterCodes.lbrace then
+ Token.Lbrace
+ else if ch == CharacterCodes.rbrace then
+ Token.Rbrace
+ else if ch == CharacterCodes.comma then
+ Token.Comma
+ else if ch == CharacterCodes.colon then
+ if scanner.ch == CharacterCodes.equal then(
+ next scanner;
+ Token.ColonEqual
+ ) else if (scanner.ch == CharacterCodes.greaterThan) then (
+ next scanner;
+ Token.ColonGreaterThan
+ ) else (
+ Token.Colon
+ )
+ else if ch == CharacterCodes.backslash then
+ scanExoticIdentifier scanner
+ else if ch == CharacterCodes.forwardslash then
+ if scanner.ch == CharacterCodes.forwardslash then (
+ next scanner;
+ scanSingleLineComment scanner
+ ) else if (scanner.ch == CharacterCodes.asterisk) then (
+ next scanner;
+ scanMultiLineComment scanner
+ ) else if scanner.ch == CharacterCodes.dot then (
+ next scanner;
+ Token.ForwardslashDot
+ ) else (
+ Token.Forwardslash
+ )
+ else if ch == CharacterCodes.minus then
+ if scanner.ch == CharacterCodes.dot then (
+ next scanner;
+ Token.MinusDot
+ ) else if scanner.ch == CharacterCodes.greaterThan then (
+ next scanner;
+ Token.MinusGreater;
+ ) else (
+ Token.Minus
+ )
+ else if ch == CharacterCodes.plus then
+ if scanner.ch == CharacterCodes.dot then (
+ next scanner;
+ Token.PlusDot
+ ) else if scanner.ch == CharacterCodes.plus then (
+ next scanner;
+ Token.PlusPlus
+ ) else if scanner.ch == CharacterCodes.equal then (
+ next scanner;
+ Token.PlusEqual
+ ) else (
+ Token.Plus
+ )
+ else if ch == CharacterCodes.greaterThan then
+ if scanner.ch == CharacterCodes.equal && not (inDiamondMode scanner) then (
+ next scanner;
+ Token.GreaterEqual
+ ) else (
+ Token.GreaterThan
+ )
+ else if ch == CharacterCodes.lessThan then
+ (* Imagine the following:
<
+ * < indicates the start of a new jsx-element, the parser expects
+ * the name of a new element after the <
+ * Example:
+ * This signals a closing element. To simulate the two-token lookahead,
+ * the is emitted as a single new token LessThanSlash *)
+ if inJsxMode scanner then (
+ skipWhitespace scanner;
+ if scanner.ch == CharacterCodes.forwardslash then
+ let () = next scanner in
+ Token.LessThanSlash
+ else
+ Token.LessThan
+ ) else if scanner.ch == CharacterCodes.equal then (
+ next scanner;
+ Token.LessEqual
+ ) else (
+ Token.LessThan
+ )
+ else if ch == CharacterCodes.hash then
+ if scanner.ch == CharacterCodes.hash then(
+ next scanner;
+ Token.HashHash
+ ) else if scanner.ch == CharacterCodes.equal then(
+ next scanner;
+ Token.HashEqual
+ ) else (
+ Token.Hash
+ )
+ else if ch == CharacterCodes.asterisk then
+ if scanner.ch == CharacterCodes.asterisk then (
+ next scanner;
+ Token.Exponentiation;
+ ) else if scanner.ch == CharacterCodes.dot then (
+ next scanner;
+ Token.AsteriskDot
+ ) else (
+ Token.Asterisk
+ )
+ else if ch == CharacterCodes.tilde then
+ Token.Tilde
+ else if ch == CharacterCodes.question then
+ Token.Question
+ else if ch == CharacterCodes.at then
+ if scanner.ch == CharacterCodes.at then (
+ next scanner;
+ Token.AtAt
+ ) else (
+ Token.At
+ )
+ else if ch == CharacterCodes.percent then
+ if scanner.ch == CharacterCodes.percent then (
+ next scanner;
+ Token.PercentPercent
+ ) else (
+ Token.Percent
+ )
+ else if ch == CharacterCodes.backtick then
+ Token.Backtick
+ else if ch == -1 then
+ Token.Eof
+ else (
+ (* if we arrive here, we're dealing with an unkown character,
+ * report the error and continue scanning… *)
+ let endPos = position scanner in
+ scanner.err ~startPos ~endPos (Diagnostics.unknownUchar ch);
+ let (_, _, token) = scan scanner in
+ token
+ )
+ end in
+ let endPos = position scanner in
+ (startPos, endPos, token)
+
+ (* Imagine:
<
+ * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and
+ * determines the correct token to disambiguate *)
+ let reconsiderLessThan scanner =
+ (* < consumed *)
+ skipWhitespace scanner;
+ if scanner.ch == CharacterCodes.forwardslash then
+ let () = next scanner in
+ Token.LessThanSlash
+ else
+ Token.LessThan
+
+ (* If an operator has whitespace around both sides, it's a binary operator *)
+ let isBinaryOp src startCnum endCnum =
+ if startCnum == 0 then false
+ else
+ let leftOk =
+ let c =
+ (startCnum - 1)
+ |> (Bytes.get [@doesNotRaise]) src
+ |> Char.code
+ in
+ c == CharacterCodes.space ||
+ c == CharacterCodes.tab ||
+ CharacterCodes.isLineBreak c
+ in
+ let rightOk =
+ let c =
+ if endCnum == Bytes.length src then -1
+ else endCnum |> (Bytes.get [@doesNotRaise]) src |> Char.code
+ in
+ c == CharacterCodes.space ||
+ c == CharacterCodes.tab ||
+ CharacterCodes.isLineBreak c ||
+ c == CharacterCodes.eof
+ in
+ leftOk && rightOk
+end
+
+(* AST for js externals *)
+module JsFfi = struct
+ type scope =
+ | Global
+ | Module of string (* bs.module("path") *)
+ | Scope of Longident.t (* bs.scope(/"window", "location"/) *)
+
+ type label_declaration = {
+ jld_attributes: Parsetree.attributes; [@live]
+ jld_name: string;
+ jld_alias: string;
+ jld_type: Parsetree.core_type;
+ jld_loc: Location.t
+ }
+
+ type importSpec =
+ | Default of label_declaration
+ | Spec of label_declaration list
+
+ type import_description = {
+ jid_loc: Location.t;
+ jid_spec: importSpec;
+ jid_scope: scope;
+ jid_attributes: Parsetree.attributes;
+ }
+
+ let decl ~attrs ~loc ~name ~alias ~typ = {
+ jld_loc = loc;
+ jld_attributes = attrs;
+ jld_name = name;
+ jld_alias = alias;
+ jld_type = typ
+ }
+
+ let importDescr ~attrs ~scope ~importSpec ~loc = {
+ jid_loc = loc;
+ jid_spec = importSpec;
+ jid_scope = scope;
+ jid_attributes = attrs;
+ }
+
+ let toParsetree importDescr =
+ let bsVal = (Location.mknoloc "bs.val", Parsetree.PStr []) in
+ let attrs = match importDescr.jid_scope with
+ | Global -> [bsVal]
+ (* @genType.import("./MyMath"),
+ * @genType.import(/"./MyMath", "default"/) *)
+ | Module s ->
+ let structure = [
+ Parsetree.Pconst_string (s, None)
+ |> Ast_helper.Exp.constant
+ |> Ast_helper.Str.eval
+ ] in
+ let genType = (Location.mknoloc "genType.import", Parsetree.PStr structure) in
+ [genType]
+ | Scope longident ->
+ let structureItem =
+ let expr = match Longident.flatten longident |> List.map (fun s ->
+ Ast_helper.Exp.constant (Parsetree.Pconst_string (s, None))
+ ) with
+ | [expr] -> expr
+ | [] as exprs | (_ as exprs) -> exprs |> Ast_helper.Exp.tuple
+ in
+ Ast_helper.Str.eval expr
+ in
+ let bsScope = (
+ Location.mknoloc "bs.scope",
+ Parsetree. PStr [structureItem]
+ ) in
+ [bsVal; bsScope]
+ in
+ let valueDescrs = match importDescr.jid_spec with
+ | Default decl ->
+ let prim = [decl.jld_name] in
+ let allAttrs =
+ List.concat [attrs; importDescr.jid_attributes]
+ |> List.map (fun attr -> match attr with
+ | (
+ {Location.txt = "genType.import"} as id,
+ Parsetree.PStr [{pstr_desc = Parsetree.Pstr_eval (moduleName, _) }]
+ ) ->
+ let default =
+ Parsetree.Pconst_string ("default", None) |> Ast_helper.Exp.constant
+ in
+ let structureItem =
+ [moduleName; default]
+ |> Ast_helper.Exp.tuple
+ |> Ast_helper.Str.eval
+ in
+ (id, Parsetree.PStr [structureItem])
+ | attr -> attr
+ )
+ in
+ [Ast_helper.Val.mk
+ ~loc:importDescr.jid_loc
+ ~prim
+ ~attrs:allAttrs
+ (Location.mknoloc decl.jld_alias)
+ decl.jld_type
+ |> Ast_helper.Str.primitive]
+ | Spec decls ->
+ List.map (fun decl ->
+ let prim = [decl.jld_name] in
+ let allAttrs = List.concat [attrs; decl.jld_attributes] in
+ Ast_helper.Val.mk
+ ~loc:importDescr.jid_loc
+ ~prim
+ ~attrs:allAttrs
+ (Location.mknoloc decl.jld_alias)
+ decl.jld_type
+ |> Ast_helper.Str.primitive ~loc:decl.jld_loc
+ ) decls
+ in
+ let jsFfiAttr = (Location.mknoloc "ns.jsFfi", Parsetree.PStr []) in
+ Ast_helper.Mod.structure ~loc:importDescr.jid_loc valueDescrs
+ |> Ast_helper.Incl.mk ~attrs:[jsFfiAttr] ~loc:importDescr.jid_loc
+ |> Ast_helper.Str.include_ ~loc:importDescr.jid_loc
+end
+
+module ParsetreeCompatibility = struct
+ let concatLongidents l1 l2 =
+ let parts1 = Longident.flatten l1 in
+ let parts2 = Longident.flatten l2 in
+ match List.concat [parts1; parts2] |> Longident.unflatten with
+ | Some longident -> longident
+ | None -> l2
+
+ (* TODO: support nested open's ? *)
+ let rec rewritePpatOpen longidentOpen pat =
+ let open Parsetree in
+ match pat.ppat_desc with
+ | Ppat_array (first::rest) ->
+ (* Color.[Red, Blue, Green] -> [Color.Red, Blue, Green] *)
+ {pat with ppat_desc = Ppat_array ((rewritePpatOpen longidentOpen first)::rest)}
+ | Ppat_tuple (first::rest) ->
+ (* Color.(Red, Blue, Green) -> (Color.Red, Blue, Green) *)
+ {pat with ppat_desc = Ppat_tuple ((rewritePpatOpen longidentOpen first)::rest)}
+ | Ppat_construct(
+ {txt = Longident.Lident "::"} as listConstructor,
+ Some ({ppat_desc=Ppat_tuple (pat::rest)} as element)
+ ) ->
+ (* Color.(list[Red, Blue, Green]) -> list[Color.Red, Blue, Green] *)
+ {pat with ppat_desc =
+ Ppat_construct (
+ listConstructor,
+ Some {element with ppat_desc = Ppat_tuple ((rewritePpatOpen longidentOpen pat)::rest)}
+ )
+ }
+ | Ppat_construct ({txt = constructor} as longidentLoc, optPattern) ->
+ (* Foo.(Bar(a)) -> Foo.Bar(a) *)
+ {pat with ppat_desc =
+ Ppat_construct (
+ {longidentLoc with txt = concatLongidents longidentOpen constructor},
+ optPattern
+ )
+ }
+ | Ppat_record (({txt = lbl} as longidentLoc, firstPat)::rest, flag) ->
+ (* Foo.{x} -> {Foo.x: x} *)
+ let firstRow = (
+ {longidentLoc with txt = concatLongidents longidentOpen lbl},
+ firstPat
+ ) in
+ {pat with ppat_desc = Ppat_record (firstRow::rest, flag)}
+ | Ppat_or (pat1, pat2) ->
+ {pat with ppat_desc = Ppat_or (
+ rewritePpatOpen longidentOpen pat1,
+ rewritePpatOpen longidentOpen pat2
+ )}
+ | Ppat_constraint (pattern, typ) ->
+ {pat with ppat_desc = Ppat_constraint (
+ rewritePpatOpen longidentOpen pattern,
+ typ
+ )}
+ | Ppat_type ({txt = constructor} as longidentLoc) ->
+ {pat with ppat_desc = Ppat_type (
+ {longidentLoc with txt = concatLongidents longidentOpen constructor}
+ )}
+ | Ppat_lazy p ->
+ {pat with ppat_desc = Ppat_lazy (rewritePpatOpen longidentOpen p)}
+ | Ppat_exception p ->
+ {pat with ppat_desc = Ppat_exception (rewritePpatOpen longidentOpen p)}
+ | _ -> pat
+
+ let rec rewriteReasonFastPipe expr =
+ let open Parsetree in
+ match expr.pexp_desc with
+ | Pexp_apply (
+ {pexp_desc = Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "|."}} as op,
+ [Asttypes.Nolabel, lhs; Nolabel, rhs]
+ ); pexp_attributes = subAttrs},
+ args
+ ) ->
+ let rhsLoc = {rhs.pexp_loc with loc_end = expr.pexp_loc.loc_end} in
+ let newLhs =
+ let expr = rewriteReasonFastPipe lhs in
+ {expr with pexp_attributes = subAttrs}
+ in
+ let allArgs =
+ (Asttypes.Nolabel, newLhs)::[
+ Asttypes.Nolabel, Ast_helper.Exp.apply ~loc:rhsLoc rhs args
+ ]
+ in
+ Ast_helper.Exp.apply ~attrs:expr.pexp_attributes ~loc:expr.pexp_loc op allArgs
+ | _ -> expr
+
+ let makeReasonArityMapper ~forPrinter =
+ let open Ast_mapper in
+ { default_mapper with
+ expr = begin fun mapper expr ->
+ match expr with
+ (* Don't mind this case, Reason doesn't handle this. *)
+ (* | {pexp_desc = Pexp_variant (lbl, args); pexp_loc; pexp_attributes} -> *)
+ (* let newArgs = match args with *)
+ (* | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args-> *)
+ (* if forPrinter then args else Some sp *)
+ (* | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp *)
+ (* | _ -> args *)
+ (* in *)
+ (* default_mapper.expr mapper {pexp_desc=Pexp_variant(lbl, newArgs); pexp_loc; pexp_attributes} *)
+ | {pexp_desc=Pexp_construct(lid, args); pexp_loc; pexp_attributes} ->
+ let newArgs = match args with
+ | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args ->
+ if forPrinter then args else Some sp
+ | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp
+ | _ -> args
+ in
+ default_mapper.expr mapper { pexp_desc=Pexp_construct(lid, newArgs); pexp_loc; pexp_attributes}
+ | expr ->
+ default_mapper.expr mapper (rewriteReasonFastPipe expr)
+ end;
+ pat = begin fun mapper pattern ->
+ match pattern with
+ (* Don't mind this case, Reason doesn't handle this. *)
+ (* | {ppat_desc = Ppat_variant (lbl, args); ppat_loc; ppat_attributes} -> *)
+ (* let newArgs = match args with *)
+ (* | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args -> *)
+ (* if forPrinter then args else Some sp *)
+ (* | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp *)
+ (* | _ -> args *)
+ (* in *)
+ (* default_mapper.pat mapper {ppat_desc = Ppat_variant (lbl, newArgs); ppat_loc; ppat_attributes;} *)
+ | {ppat_desc=Ppat_construct(lid, args);
+ ppat_loc;
+ ppat_attributes} ->
+ let new_args = match args with
+ | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args ->
+ if forPrinter then args else Some sp
+ | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp
+ | _ -> args in
+ default_mapper.pat mapper { ppat_desc=Ppat_construct(lid, new_args); ppat_loc; ppat_attributes;}
+ | x -> default_mapper.pat mapper x
+ end;
+ }
+
+ let escapeTemplateLiteral s =
+ let len = String.length s in
+ let b = Buffer.create len in
+ let i = ref 0 in
+ while !i < len do
+ let c = (String.get [@doesNotRaise]) s !i in
+ if c = '`' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b '`';
+ incr i;
+ ) else if c = '$' then (
+ if !i + 1 < len then (
+ let c2 = (String.get [@doesNotRaise]) s (!i + 1) in
+ if c2 = '{' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b '$';
+ Buffer.add_char b '{';
+ ) else (
+ Buffer.add_char b c;
+ Buffer.add_char b c2;
+ );
+ i := !i + 2;
+ ) else (
+ Buffer.add_char b c;
+ incr i
+ )
+ ) else if c = '\\' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b '\\';
+ incr i;
+ ) else (
+ Buffer.add_char b c;
+ incr i
+ )
+ done;
+ Buffer.contents b
+
+ let escapeStringContents s =
+ let len = String.length s in
+ let b = Buffer.create len in
+
+ let i = ref 0 in
+
+ while !i < len do
+ let c = String.unsafe_get s !i in
+ if c = '\\' then (
+ incr i;
+ Buffer.add_char b c;
+ let c = String.unsafe_get s !i in
+ if !i < len then
+ let () = Buffer.add_char b c in
+ incr i
+ else
+ ()
+ ) else if c = '"' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b c;
+ incr i;
+ ) else (
+ Buffer.add_char b c;
+ incr i;
+ )
+ done;
+ Buffer.contents b
+
+ let looksLikeRecursiveTypeDeclaration typeDeclaration =
+ let open Parsetree in
+ let name = typeDeclaration.ptype_name.txt in
+ let rec checkKind kind =
+ match kind with
+ | Ptype_abstract | Ptype_open -> false
+ | Ptype_variant constructorDeclarations ->
+ List.exists checkConstructorDeclaration constructorDeclarations
+ | Ptype_record labelDeclarations ->
+ List.exists checkLabelDeclaration labelDeclarations
+
+ and checkConstructorDeclaration constrDecl =
+ checkConstructorArguments constrDecl.pcd_args
+ || (match constrDecl.pcd_res with
+ | Some typexpr ->
+ checkTypExpr typexpr
+ | None -> false
+ )
+
+ and checkLabelDeclaration labelDeclaration =
+ checkTypExpr labelDeclaration.pld_type
+
+ and checkConstructorArguments constrArg =
+ match constrArg with
+ | Pcstr_tuple types ->
+ List.exists checkTypExpr types
+ | Pcstr_record labelDeclarations ->
+ List.exists checkLabelDeclaration labelDeclarations
+
+ and checkTypExpr typ =
+ match typ.ptyp_desc with
+ | Ptyp_any -> false
+ | Ptyp_var _ -> false
+ | Ptyp_object _ -> false
+ | Ptyp_class _ -> false
+ | Ptyp_package _ -> false
+ | Ptyp_extension _ -> false
+ | Ptyp_arrow (_lbl, typ1, typ2) ->
+ checkTypExpr typ1 || checkTypExpr typ2
+ | Ptyp_tuple types ->
+ List.exists checkTypExpr types
+ | Ptyp_constr ({txt = longident}, types) ->
+ (match longident with
+ | Lident ident -> ident = name
+ | _ -> false
+ ) ||
+ List.exists checkTypExpr types
+ | Ptyp_alias (typ, _) -> checkTypExpr typ
+ | Ptyp_variant (rowFields, _, _) ->
+ List.exists checkRowFields rowFields
+ | Ptyp_poly (_, typ) ->
+ checkTypExpr typ
+
+ and checkRowFields rowField =
+ match rowField with
+ | Rtag (_, _, _, types) ->
+ List.exists checkTypExpr types
+ | Rinherit typexpr ->
+ checkTypExpr typexpr
+ in
+ checkKind typeDeclaration.ptype_kind
+
+
+ let filterReasonRawLiteral attrs =
+ List.filter (fun attr ->
+ match attr with
+ | ({Location.txt = ("reason.raw_literal")}, _) -> false
+ | _ -> true
+ ) attrs
+
+ let stringLiteralMapper stringData =
+ let isSameLocation l1 l2 =
+ let open Location in
+ l1.loc_start.pos_cnum == l2.loc_start.pos_cnum
+ in
+ let remainingStringData = stringData in
+ let open Ast_mapper in
+ { default_mapper with
+ expr = (fun mapper expr ->
+ match expr.pexp_desc with
+ | Pexp_constant (Pconst_string (_txt, None)) ->
+ begin match
+ List.find_opt (fun (_stringData, stringLoc) ->
+ isSameLocation stringLoc expr.pexp_loc
+ ) remainingStringData
+ with
+ | Some(stringData, _) ->
+ let stringData =
+ let attr = List.find_opt (fun attr -> match attr with
+ | ({Location.txt = ("reason.raw_literal")}, _) -> true
+ | _ -> false
+ ) expr.pexp_attributes in
+ match attr with
+ | Some (_, PStr [{pstr_desc = Pstr_eval ({pexp_desc = Pexp_constant (Pconst_string (raw, _))}, _)}]) ->
+ raw
+ | _ -> (String.sub [@doesNotRaise]) stringData 1 (String.length stringData - 2)
+ in
+ {expr with
+ pexp_attributes = filterReasonRawLiteral expr.pexp_attributes;
+ pexp_desc = Pexp_constant (Pconst_string (stringData, None))
+ }
+ | None ->
+ default_mapper.expr mapper expr
+ end
+ | _ -> default_mapper.expr mapper expr
+ )
+ }
+
+ let normalize =
+ let open Ast_mapper in
+ { default_mapper with
+ attributes = (fun mapper attrs ->
+ attrs
+ |> List.filter (fun attr ->
+ match attr with
+ | ({Location.txt = (
+ "reason.preserve_braces"
+ | "explicit_arity"
+ | "implicity_arity"
+ )}, _) -> false
+ | _ ->true
+ )
+ |> default_mapper.attributes mapper
+ );
+ pat = begin fun mapper p ->
+ match p.ppat_desc with
+ | Ppat_open ({txt = longidentOpen}, pattern) ->
+ let p = rewritePpatOpen longidentOpen pattern in
+ default_mapper.pat mapper p
+ | _ ->
+ default_mapper.pat mapper p
+ end;
+ expr = (fun mapper expr ->
+ match expr.pexp_desc with
+ | Pexp_constant (Pconst_string (txt, None)) ->
+ let raw = escapeStringContents txt in
+ let s = Parsetree.Pconst_string (raw, None) in
+ let expr = Ast_helper.Exp.constant
+ ~attrs:expr.pexp_attributes
+ ~loc:expr.pexp_loc s
+ in
+ expr
+ | Pexp_constant (Pconst_string (txt, tag)) ->
+ let s = Parsetree.Pconst_string ((escapeTemplateLiteral txt), tag) in
+ Ast_helper.Exp.constant
+ ~attrs:(mapper.attributes mapper expr.pexp_attributes)
+ ~loc:expr.pexp_loc
+ s
+ | Pexp_function cases ->
+ let loc = match (cases, List.rev cases) with
+ | (first::_), (last::_) ->
+ {first.pc_lhs.ppat_loc with loc_end = last.pc_rhs.pexp_loc.loc_end}
+ | _ -> Location.none
+ in
+ Ast_helper.Exp.fun_ ~loc
+ Asttypes.Nolabel None (Ast_helper.Pat.var (Location.mknoloc "x"))
+ (Ast_helper.Exp.match_ ~loc
+ (Ast_helper.Exp.ident (Location.mknoloc (Longident.Lident "x")))
+ (default_mapper.cases mapper cases)
+ )
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "!"}},
+ [Asttypes.Nolabel, operand]
+ ) ->
+ (* turn `!foo` into `foo.contents` *)
+ Ast_helper.Exp.field ~loc:expr.pexp_loc ~attrs:expr.pexp_attributes
+ operand
+ (Location.mknoloc (Longident.Lident "contents"))
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "##"}} as op,
+ [Asttypes.Nolabel, lhs; Nolabel, ({pexp_desc = Pexp_constant (Pconst_string (txt, None))} as stringExpr)]
+ ) ->
+ let ident = Ast_helper.Exp.ident ~loc:stringExpr.pexp_loc
+ (Location.mkloc (Longident.Lident txt) stringExpr.pexp_loc)
+ in
+ Ast_helper.Exp.apply ~loc:expr.pexp_loc ~attrs:expr.pexp_attributes
+ op [Asttypes.Nolabel, lhs; Nolabel, ident]
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "@@"}},
+ [Asttypes.Nolabel, callExpr; Nolabel, argExpr]
+ ) ->
+ Ast_helper.Exp.apply (mapper.expr mapper callExpr) [
+ Asttypes.Nolabel, mapper.expr mapper argExpr
+ ]
+ | Pexp_apply (
+ {pexp_desc = Pexp_ident {txt = Longident.Lident "@"}},
+ [Nolabel, arg1; Nolabel, arg2]
+ ) ->
+ let listConcat = Longident.Ldot (Longident.Lident "List", "append") in
+ Ast_helper.Exp.apply
+ (Ast_helper.Exp.ident (Location.mknoloc listConcat))
+ [Nolabel, mapper.expr mapper arg1; Nolabel, mapper.expr mapper arg2]
+ | Pexp_match (
+ condition,
+ [
+ {pc_lhs = {ppat_desc = Ppat_construct ({txt = Longident.Lident "true"}, None)}; pc_rhs = thenExpr };
+ {pc_lhs = {ppat_desc = Ppat_construct ({txt = Longident.Lident "false"}, None)}; pc_rhs = elseExpr };
+ ]
+ ) ->
+ let ternaryMarker = (Location.mknoloc "ns.ternary", Parsetree.PStr []) in
+ Ast_helper.Exp.ifthenelse
+ ~loc:expr.pexp_loc
+ ~attrs:(ternaryMarker::expr.pexp_attributes)
+ (default_mapper.expr mapper condition)
+ (default_mapper.expr mapper thenExpr)
+ (Some (default_mapper.expr mapper elseExpr))
+ | _ -> default_mapper.expr mapper expr
+ );
+ structure_item = begin fun mapper structureItem ->
+ match structureItem.pstr_desc with
+ (* heuristic: if we have multiple type declarations, mark them recursive *)
+ | Pstr_type (recFlag, typeDeclarations) ->
+ let flag = match typeDeclarations with
+ | [td] ->
+ if looksLikeRecursiveTypeDeclaration td then Asttypes.Recursive
+ else Asttypes.Nonrecursive
+ | _ -> recFlag
+ in
+ {structureItem with pstr_desc = Pstr_type (
+ flag,
+ List.map (fun typeDeclaration ->
+ default_mapper.type_declaration mapper typeDeclaration
+ ) typeDeclarations
+ )}
+ | _ -> default_mapper.structure_item mapper structureItem
+ end;
+ signature_item = begin fun mapper signatureItem ->
+ match signatureItem.psig_desc with
+ (* heuristic: if we have multiple type declarations, mark them recursive *)
+ | Psig_type (recFlag, typeDeclarations) ->
+ let flag = match typeDeclarations with
+ | [td] ->
+ if looksLikeRecursiveTypeDeclaration td then Asttypes.Recursive
+ else Asttypes.Nonrecursive
+ | _ -> recFlag
+ in
+ {signatureItem with psig_desc = Psig_type (
+ flag,
+ List.map (fun typeDeclaration ->
+ default_mapper.type_declaration mapper typeDeclaration
+ ) typeDeclarations
+ )}
+ | _ -> default_mapper.signature_item mapper signatureItem
+ end;
+ value_binding = begin fun mapper vb ->
+ match vb with
+ | {
+ pvb_pat = {ppat_desc = Ppat_var _} as pat;
+ pvb_expr = {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ) }
+ } when expr_loc.loc_ghost ->
+ (* let t: t = (expr : t) -> let t: t = expr *)
+ let typ = default_mapper.typ mapper typ in
+ let pat = default_mapper.pat mapper pat in
+ let expr = mapper.expr mapper expr in
+ let newPattern = Ast_helper.Pat.constraint_
+ ~loc:{pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end}
+ pat typ in
+ {vb with
+ pvb_pat = newPattern;
+ pvb_expr = expr;
+ pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes}
+ | {
+ pvb_pat = {ppat_desc = Ppat_constraint (pat, {ptyp_desc = Ptyp_poly ([], _)})} ;
+ pvb_expr = {pexp_loc = expr_loc; pexp_desc = Pexp_constraint (expr, typ) }
+ } when expr_loc.loc_ghost ->
+ (* let t: . t = (expr : t) -> let t: t = expr *)
+ let typ = default_mapper.typ mapper typ in
+ let pat = default_mapper.pat mapper pat in
+ let expr = mapper.expr mapper expr in
+ let newPattern = Ast_helper.Pat.constraint_
+ ~loc:{pat.ppat_loc with loc_end = typ.ptyp_loc.loc_end}
+ pat typ in
+ {vb with
+ pvb_pat = newPattern;
+ pvb_expr = expr;
+ pvb_attributes = default_mapper.attributes mapper vb.pvb_attributes}
+ | _ -> default_mapper.value_binding mapper vb
+ end;
+ }
+
+ let normalizeReasonArityStructure ~forPrinter s =
+ let mapper = makeReasonArityMapper ~forPrinter in
+ mapper.Ast_mapper.structure mapper s
+
+ let normalizeReasonAritySignature ~forPrinter s =
+ let mapper = makeReasonArityMapper ~forPrinter in
+ mapper.Ast_mapper.signature mapper s
+
+ let structure s = normalize.Ast_mapper.structure normalize s
+ let signature s = normalize.Ast_mapper.signature normalize s
+
+ let replaceStringLiteralStructure stringData structure =
+ let mapper = stringLiteralMapper stringData in
+ mapper.Ast_mapper.structure mapper structure
+
+ let replaceStringLiteralSignature stringData signature =
+ let mapper = stringLiteralMapper stringData in
+ mapper.Ast_mapper.signature mapper signature
+end
+
+module OcamlParser = Parser
+
+module Parser = struct
+ type mode = ParseForTypeChecker | Default
+
+ type regionStatus = Report | Silent
+
+ type t = {
+ mode: mode;
+ mutable scanner: Scanner.t;
+ mutable token: Token.t;
+ mutable startPos: Lexing.position;
+ mutable endPos: Lexing.position;
+ mutable prevEndPos: Lexing.position;
+ mutable breadcrumbs: (Grammar.t * Lexing.position) list;
+ mutable errors: Reporting.parseError list;
+ mutable diagnostics: Diagnostics.t list;
+ mutable comments: Comment.t list;
+ mutable regions: regionStatus ref list;
+ }
+
+ let err ?startPos ?endPos p error =
+ let d = Diagnostics.make
+ ~filename:p.scanner.filename
+ ~startPos:(match startPos with | Some pos -> pos | None -> p.startPos)
+ ~endPos:(match endPos with | Some pos -> pos | None -> p.endPos)
+ error
+ in
+ try
+ if (!(List.hd p.regions) = Report) then (
+ p.diagnostics <- d::p.diagnostics;
+ List.hd p.regions := Silent
+ )
+ with Failure _ -> ()
+
+ let beginRegion p =
+ p.regions <- ref Report :: p.regions
+ let endRegion p =
+ try p.regions <- List.tl p.regions with Failure _ -> ()
+
+ (* Advance to the next non-comment token and store any encountered comment
+ * in the parser's state. Every comment contains the end position of it's
+ * previous token to facilite comment interleaving *)
+ let rec next ?prevEndPos p =
+ let prevEndPos = match prevEndPos with Some pos -> pos | None -> p.endPos in
+ let (startPos, endPos, token) = Scanner.scan p.scanner in
+ match token with
+ | Comment c ->
+ Comment.setPrevTokEndPos c p.endPos;
+ p.comments <- c::p.comments;
+ p.prevEndPos <- p.endPos;
+ p.endPos <- endPos;
+ next ~prevEndPos p
+ | _ ->
+ p.token <- token;
+ (* p.prevEndPos <- prevEndPos; *)
+ p.prevEndPos <- prevEndPos;
+ p.startPos <- startPos;
+ p.endPos <- endPos
+
+ let checkProgress ~prevEndPos ~result p =
+ if p.endPos == prevEndPos
+ then None
+ else Some result
+
+ let make ?(mode=ParseForTypeChecker) src filename =
+ let scanner = Scanner.make (Bytes.of_string src) filename in
+ let parserState = {
+ mode;
+ scanner;
+ token = Token.Eof;
+ startPos = Lexing.dummy_pos;
+ prevEndPos = Lexing.dummy_pos;
+ endPos = Lexing.dummy_pos;
+ breadcrumbs = [];
+ errors = [];
+ diagnostics = [];
+ comments = [];
+ regions = [ref Report];
+ } in
+ parserState.scanner.err <- (fun ~startPos ~endPos error ->
+ let diagnostic = Diagnostics.make
+ ~filename
+ ~startPos
+ ~endPos
+ error
+ in
+ parserState.diagnostics <- diagnostic::parserState.diagnostics
+ );
+ next parserState;
+ parserState
+
+ let leaveBreadcrumb p circumstance =
+ let crumb = (circumstance, p.startPos) in
+ p.breadcrumbs <- crumb::p.breadcrumbs
+
+ let eatBreadcrumb p =
+ match p.breadcrumbs with
+ | [] -> ()
+ | _::crumbs -> p.breadcrumbs <- crumbs
+
+ let optional p token =
+ if p.token = token then
+ let () = next p in true
+ else
+ false
+
+ let expect ?grammar token p =
+ if p.token = token then
+ next p
+ else
+ let error = Diagnostics.expected ?grammar p.prevEndPos token in
+ err ~startPos:p.prevEndPos p error
+
+ (* Don't use immutable copies here, it trashes certain heuristics
+ * in the ocaml compiler, resulting in massive slowdowns of the parser *)
+ let lookahead p callback =
+ let err = p.scanner.err in
+ let ch = p.scanner.ch in
+ let offset = p.scanner.offset in
+ let rdOffset = p.scanner.rdOffset in
+ let lineOffset = p.scanner.lineOffset in
+ let lnum = p.scanner.lnum in
+ let mode = p.scanner.mode in
+ let token = p.token in
+ let startPos = p.startPos in
+ let endPos = p.endPos in
+ let prevEndPos = p.prevEndPos in
+ let breadcrumbs = p.breadcrumbs in
+ let errors = p.errors in
+ let diagnostics = p.diagnostics in
+ let comments = p.comments in
+
+ let res = callback p in
+
+ p.scanner.err <- err;
+ p.scanner.ch <- ch;
+ p.scanner.offset <- offset;
+ p.scanner.rdOffset <- rdOffset;
+ p.scanner.lineOffset <- lineOffset;
+ p.scanner.lnum <- lnum;
+ p.scanner.mode <- mode;
+ p.token <- token;
+ p.startPos <- startPos;
+ p.endPos <- endPos;
+ p.prevEndPos <- prevEndPos;
+ p.breadcrumbs <- breadcrumbs;
+ p.errors <- errors;
+ p.diagnostics <- diagnostics;
+ p.comments <- comments;
+
+ res
+end
+
+module NapkinScript = struct
+ let mkLoc startLoc endLoc = Location.{
+ loc_start = startLoc;
+ loc_end = endLoc;
+ loc_ghost = false;
+ }
+
+
+ module Recover = struct
+ type action = unit option (* None is abort, Some () is retry *)
+
+ let defaultExpr () =
+ let id = Location.mknoloc "napkinscript.exprhole" in
+ Ast_helper.Exp.mk (Pexp_extension (id, PStr []))
+
+ let defaultType () =
+ let id = Location.mknoloc "napkinscript.typehole" in
+ Ast_helper.Typ.extension (id, PStr [])
+
+ let defaultPattern () =
+ let id = Location.mknoloc "napkinscript.patternhole" in
+ Ast_helper.Pat.extension (id, PStr [])
+ (* Ast_helper.Pat.any () *)
+
+ let defaultModuleExpr () = Ast_helper.Mod.structure []
+ let defaultModuleType () = Ast_helper.Mty.signature []
+
+ let recoverEqualGreater p =
+ Parser.expect EqualGreater p;
+ match p.Parser.token with
+ | MinusGreater -> Parser.next p
+ | _ -> ()
+
+ let shouldAbortListParse p =
+ let rec check breadcrumbs =
+ match breadcrumbs with
+ | [] -> false
+ | (grammar, _)::rest ->
+ if Grammar.isPartOfList grammar p.Parser.token then
+ true
+ else
+ check rest
+ in
+ check p.breadcrumbs
+ end
+
+ module ErrorMessages = struct
+ let listPatternSpread = "List pattern matches only supports one `...` spread, at the end.
+Explanation: a list spread at the tail is efficient, but a spread in the middle would create new list[s]; out of performance concern, our pattern matching currently guarantees to never create new intermediate data."
+
+ let recordPatternSpread = "Record's `...` spread is not supported in pattern matches.
+Explanation: you can't collect a subset of a record's field into its own record, since a record needs an explicit declaration and that subset wouldn't have one.
+Solution: you need to pull out each field you want explicitly."
+
+ let recordPatternUnderscore = "Record patterns only support one `_`, at the end."
+ [@@live]
+
+ let arrayPatternSpread = "Array's `...` spread is not supported in pattern matches.
+Explanation: such spread would create a subarray; out of performance concern, our pattern matching currently guarantees to never create new intermediate data.
+Solution: if it's to validate the first few elements, use a `when` clause + Array size check + `get` checks on the current pattern. If it's to obtain a subarray, use `Array.sub` or `Belt.Array.slice`."
+
+ let arrayExprSpread = "Arrays can't use the `...` spread currently. Please use `concat` or other Array helpers."
+
+ let recordExprSpread = "Records can only have one `...` spread, at the beginning.
+Explanation: since records have a known, fixed shape, a spread like `{a, ...b}` wouldn't make sense, as `b` would override every field of `a` anyway."
+
+ let listExprSpread = "Lists can only have one `...` spread, and at the end.
+Explanation: lists are singly-linked list, where a node contains a value and points to the next node. `list[a, ...bc]` efficiently creates a new item and links `bc` as its next nodes. `[...bc, a]` would be expensive, as it'd need to traverse `bc` and prepend each item to `a` one by one. We therefore disallow such syntax sugar.
+Solution: directly use `concat`."
+
+ let variantIdent = "A polymorphic variant (e.g. #id) must start with an alphabetical letter."
+end
+
+
+ let jsxAttr = (Location.mknoloc "JSX", Parsetree.PStr [])
+ let uncurryAttr = (Location.mknoloc "bs", Parsetree.PStr [])
+ let ternaryAttr = (Location.mknoloc "ns.ternary", Parsetree.PStr [])
+ let makeBracesAttr loc = (Location.mkloc "ns.braces" loc, Parsetree.PStr [])
+
+ type typDefOrExt =
+ | TypeDef of {recFlag: Asttypes.rec_flag; types: Parsetree.type_declaration list}
+ | TypeExt of Parsetree.type_extension
+
+ type labelledParameter =
+ | TermParameter of
+ {uncurried: bool; attrs: Parsetree.attributes; label: Asttypes.arg_label; expr: Parsetree.expression option;
+ pat: Parsetree.pattern; pos: Lexing.position}
+ | TypeParameter of {uncurried: bool; attrs: Parsetree.attributes; locs: string Location.loc list; pos: Lexing.position}
+
+ type recordPatternItem =
+ | PatUnderscore
+ | PatField of (Ast_helper.lid * Parsetree.pattern)
+
+ type context =
+ | OrdinaryExpr
+ | TernaryTrueBranchExpr
+ | WhenExpr
+
+ let getClosingToken = function
+ | Token.Lparen -> Token.Rparen
+ | Lbrace -> Rbrace
+ | Lbracket -> Rbracket
+ | _ -> assert false
+
+ let rec goToClosing closingToken state =
+ match (state.Parser.token, closingToken) with
+ | (Rparen, Token.Rparen) | (Rbrace, Rbrace) | (Rbracket, Rbracket) ->
+ Parser.next state;
+ ()
+ | (Token.Lbracket | Lparen | Lbrace) as t, _ ->
+ Parser.next state;
+ goToClosing (getClosingToken t) state;
+ goToClosing closingToken state
+ | ((Rparen | Token.Rbrace | Rbracket | Eof), _) ->
+ () (* TODO: how do report errors here? *)
+ | _ ->
+ Parser.next state;
+ goToClosing closingToken state
+
+ (* Madness *)
+ let isEs6ArrowExpression ~inTernary p =
+ Parser.lookahead p (fun state ->
+ match state.Parser.token with
+ | Lident _ | List | Underscore ->
+ Parser.next state;
+ begin match state.Parser.token with
+ (* Don't think that this valid
+ * Imagine: let x = (a: int)
+ * This is a parenthesized expression with a type constraint, wait for
+ * the arrow *)
+ (* | Colon when not inTernary -> true *)
+ | EqualGreater -> true
+ | _ -> false
+ end
+ | Lparen ->
+ let prevEndPos = state.prevEndPos in
+ Parser.next state;
+ begin match state.token with
+ | Rparen ->
+ Parser.next state;
+ begin match state.Parser.token with
+ | Colon when not inTernary -> true
+ | EqualGreater -> true
+ | _ -> false
+ end
+ | Dot (* uncurried *) -> true
+ | Tilde -> true
+ | Backtick -> false (* (` always indicates the start of an expr, can't be es6 parameter *)
+ | _ ->
+ goToClosing Rparen state;
+ begin match state.Parser.token with
+ | EqualGreater -> true
+ (* | Lbrace TODO: detect missing =>, is this possible? *)
+ | Colon when not inTernary -> true
+ | Rparen ->
+ (* imagine having something as :
+ * switch colour {
+ * | Red
+ * when l == l'
+ * || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2)
+ * We'll arrive at the outer rparen just before the =>.
+ * This is not an es6 arrow.
+ * *)
+ false
+ | _ ->
+ Parser.next state;
+ (* error recovery, peek at the next token,
+ * (elements, providerId] => {
+ * in the example above, we have an unbalanced ] here
+ *)
+ begin match state.Parser.token with
+ | EqualGreater when state.startPos.pos_lnum == prevEndPos.pos_lnum -> true
+ | _ -> false
+ end
+ end
+ end
+ | _ -> false)
+
+
+ let isEs6ArrowFunctor p =
+ Parser.lookahead p (fun state ->
+ match state.Parser.token with
+ (* | Uident _ | Underscore -> *)
+ (* Parser.next state; *)
+ (* begin match state.Parser.token with *)
+ (* | EqualGreater -> true *)
+ (* | _ -> false *)
+ (* end *)
+ | Lparen ->
+ Parser.next state;
+ begin match state.token with
+ | Rparen ->
+ Parser.next state;
+ begin match state.token with
+ | Colon | EqualGreater -> true
+ | _ -> false
+ end
+ | _ ->
+ goToClosing Rparen state;
+ begin match state.Parser.token with
+ | EqualGreater | Lbrace -> true
+ | Colon -> true
+ | _ -> false
+ end
+ end
+ | _ -> false
+ )
+
+ let isEs6ArrowType p =
+ Parser.lookahead p (fun state ->
+ match state.Parser.token with
+ | Lparen ->
+ Parser.next state;
+ begin match state.Parser.token with
+ | Rparen ->
+ Parser.next state;
+ begin match state.Parser.token with
+ | EqualGreater -> true
+ | _ -> false
+ end
+ | Tilde | Dot -> true
+ | _ ->
+ goToClosing Rparen state;
+ begin match state.Parser.token with
+ | EqualGreater -> true
+ | _ -> false
+ end
+ end
+ | Tilde -> true
+ | _ -> false
+ )
+
+ let buildLongident words = match List.rev words with
+ | [] -> assert false
+ | hd::tl -> List.fold_left (fun p s -> Longident.Ldot (p, s)) (Lident hd) tl
+
+ let makeInfixOperator p token startPos endPos =
+ let stringifiedToken =
+ if token = Token.MinusGreater then "|."
+ else if token = Token.PlusPlus then "^"
+ else if token = Token.BangEqual then "<>"
+ else if token = Token.BangEqualEqual then "!="
+ else if token = Token.Equal then (
+ (* TODO: could have a totally different meaning like x->fooSet(y)*)
+ Parser.err ~startPos ~endPos p (
+ Diagnostics.message "Did you mean `==` here?"
+ );
+ "="
+ ) else if token = Token.EqualEqual then "="
+ else if token = Token.EqualEqualEqual then "=="
+ else Token.toString token
+ in
+ let loc = mkLoc startPos endPos in
+ let operator = Location.mkloc
+ (Longident.Lident stringifiedToken) loc
+ in
+ Ast_helper.Exp.ident ~loc operator
+
+ let negateString s =
+ if String.length s > 0 && (s.[0] [@doesNotRaise]) = '-'
+ then (String.sub [@doesNotRaise]) s 1 (String.length s - 1)
+ else "-" ^ s
+
+ let makeUnaryExpr startPos tokenEnd token operand =
+ match token, operand.Parsetree.pexp_desc with
+ | (Token.Plus | PlusDot), Pexp_constant((Pconst_integer _ | Pconst_float _)) ->
+ operand
+ | Minus, Pexp_constant(Pconst_integer (n,m)) ->
+ {operand with pexp_desc = Pexp_constant(Pconst_integer (negateString n,m))}
+ | (Minus | MinusDot), Pexp_constant(Pconst_float (n,m)) ->
+ {operand with pexp_desc = Pexp_constant(Pconst_float (negateString n,m))}
+ | (Token.Plus | PlusDot | Minus | MinusDot ), _ ->
+ let tokenLoc = mkLoc startPos tokenEnd in
+ let operator = "~" ^ Token.toString token in
+ Ast_helper.Exp.apply
+ ~loc:(mkLoc startPos operand.Parsetree.pexp_loc.loc_end)
+ (Ast_helper.Exp.ident ~loc:tokenLoc
+ (Location.mkloc (Longident.Lident operator) tokenLoc))
+ [Nolabel, operand]
+ | Token.Bang, _ ->
+ let tokenLoc = mkLoc startPos tokenEnd in
+ Ast_helper.Exp.apply
+ ~loc:(mkLoc startPos operand.Parsetree.pexp_loc.loc_end)
+ (Ast_helper.Exp.ident ~loc:tokenLoc
+ (Location.mkloc (Longident.Lident "not") tokenLoc))
+ [Nolabel, operand]
+ | _ ->
+ operand
+
+ let makeListExpression loc seq extOpt =
+ let rec handleSeq = function
+ | [] ->
+ begin match extOpt with
+ | Some ext -> ext
+ | None ->
+ let loc = {loc with Location.loc_ghost = true} in
+ let nil = Location.mkloc (Longident.Lident "[]") loc in
+ Ast_helper.Exp.construct ~loc nil None
+ end
+ | e1 :: el ->
+ let exp_el = handleSeq el in
+ let loc = mkLoc
+ e1.Parsetree.pexp_loc.Location.loc_start
+ exp_el.pexp_loc.loc_end
+ in
+ let arg = Ast_helper.Exp.tuple ~loc [e1; exp_el] in
+ Ast_helper.Exp.construct ~loc
+ (Location.mkloc (Longident.Lident "::") loc)
+ (Some arg)
+ in
+ let expr = handleSeq seq in
+ {expr with pexp_loc = loc}
+
+ let makeListPattern loc seq ext_opt =
+ let rec handle_seq = function
+ [] ->
+ let base_case = match ext_opt with
+ | Some ext ->
+ ext
+ | None ->
+ let loc = { loc with Location.loc_ghost = true} in
+ let nil = { Location.txt = Longident.Lident "[]"; loc } in
+ Ast_helper.Pat.construct ~loc nil None
+ in
+ base_case
+ | p1 :: pl ->
+ let pat_pl = handle_seq pl in
+ let loc =
+ mkLoc p1.Parsetree.ppat_loc.loc_start pat_pl.ppat_loc.loc_end in
+ let arg = Ast_helper.Pat.mk ~loc (Ppat_tuple [p1; pat_pl]) in
+ Ast_helper.Pat.mk ~loc (Ppat_construct(Location.mkloc (Longident.Lident "::") loc, Some arg))
+ in
+ handle_seq seq
+
+
+ (* {"foo": bar} -> Js.t({. foo: bar})
+ * {.. "foo": bar} -> Js.t({.. foo: bar})
+ * {..} -> Js.t({..}) *)
+ let makeBsObjType ~attrs ~loc ~closed rows =
+ let obj = Ast_helper.Typ.object_ ~loc rows closed in
+ let jsDotTCtor =
+ Location.mkloc (Longident.Ldot (Longident.Lident "Js", "t")) loc
+ in
+ Ast_helper.Typ.constr ~loc ~attrs jsDotTCtor [obj]
+
+ (* TODO: diagnostic reporting *)
+ let lidentOfPath longident =
+ match Longident.flatten longident |> List.rev with
+ | [] -> ""
+ | ident::_ -> ident
+
+ let makeNewtypes ~attrs ~loc newtypes exp =
+ let expr = List.fold_right (fun newtype exp ->
+ Ast_helper.Exp.mk ~loc (Pexp_newtype (newtype, exp))
+ ) newtypes exp
+ in {expr with pexp_attributes = attrs}
+
+ (* locally abstract types syntax sugar
+ * Transforms
+ * let f: type t u v. = (foo : list) => ...
+ * into
+ * let f = (type t u v. foo : list) => ...
+ *)
+ let wrapTypeAnnotation ~loc newtypes core_type body =
+ let exp = makeNewtypes ~attrs:[] ~loc newtypes
+ (Ast_helper.Exp.constraint_ ~loc body core_type)
+ in
+ let typ = Ast_helper.Typ.poly ~loc newtypes
+ (Ast_helper.Typ.varify_constructors newtypes core_type)
+ in
+ (exp, typ)
+
+ (**
+ * process the occurrence of _ in the arguments of a function application
+ * replace _ with a new variable, currently __x, in the arguments
+ * return a wrapping function that wraps ((__x) => ...) around an expression
+ * e.g. foo(_, 3) becomes (__x) => foo(__x, 3)
+ *)
+ let processUnderscoreApplication args =
+ let open Parsetree in
+ let exp_question = ref None in
+ let hidden_var = "__x" in
+ let check_arg ((lab, exp) as arg) =
+ match exp.pexp_desc with
+ | Pexp_ident ({ txt = Lident "_"} as id) ->
+ let new_id = Location.mkloc (Longident.Lident hidden_var) id.loc in
+ let new_exp = Ast_helper.Exp.mk (Pexp_ident new_id) ~loc:exp.pexp_loc in
+ exp_question := Some new_exp;
+ (lab, new_exp)
+ | _ ->
+ arg
+ in
+ let args = List.map check_arg args in
+ let wrap exp_apply =
+ match !exp_question with
+ | Some {pexp_loc=loc} ->
+ let pattern = Ast_helper.Pat.mk (Ppat_var (Location.mkloc hidden_var loc)) ~loc in
+ Ast_helper.Exp.mk (Pexp_fun (Nolabel, None, pattern, exp_apply)) ~loc
+ | None ->
+ exp_apply
+ in
+ (args, wrap)
+
+ let rec parseLident p =
+ let recoverLident p =
+ if (
+ Token.isKeyword p.Parser.token &&
+ p.Parser.prevEndPos.pos_lnum == p.startPos.pos_lnum
+ )
+ then (
+ Parser.err p (Diagnostics.lident p.Parser.token);
+ Parser.next p;
+ None
+ ) else (
+ let rec loop p =
+ if not (Recover.shouldAbortListParse p)
+ then begin
+ Parser.next p;
+ loop p
+ end
+ in
+ Parser.next p;
+ loop p;
+ match p.Parser.token with
+ | Lident _ -> Some ()
+ | _ -> None
+ )
+ in
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | Lident ident ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ (ident, loc)
+ | List ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ ("list", loc)
+ | _ ->
+ begin match recoverLident p with
+ | Some () ->
+ parseLident p
+ | None ->
+ ("_", mkLoc startPos p.prevEndPos)
+ end
+
+ let parseIdent ~msg ~startPos p =
+ match p.Parser.token with
+ | Lident ident
+ | Uident ident ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ (ident, loc)
+ | List ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ ("list", loc)
+ | _token ->
+ Parser.err p (Diagnostics.message msg);
+ Parser.next p;
+ ("_", mkLoc startPos p.prevEndPos)
+
+ let parseHashIdent ~startPos p =
+ Parser.expect Hash p;
+ parseIdent ~startPos ~msg:ErrorMessages.variantIdent p
+
+ (* Ldot (Ldot (Lident "Foo", "Bar"), "baz") *)
+ let parseValuePath p =
+ let startPos = p.Parser.startPos in
+ let rec aux p path =
+ match p.Parser.token with
+ | List -> Longident.Ldot(path, "list")
+ | Lident ident -> Longident.Ldot(path, ident)
+ | Uident uident ->
+ Parser.next p;
+ Parser.expect Dot p;
+ aux p (Ldot (path, uident))
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Longident.Lident "_"
+ in
+ let ident = match p.Parser.token with
+ | List -> Longident.Lident "list"
+ | Lident ident -> Longident.Lident ident
+ | Uident ident ->
+ Parser.next p;
+ Parser.expect Dot p;
+ aux p (Lident ident)
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Longident.Lident "_"
+ in
+ Parser.next p;
+ Location.mkloc ident (mkLoc startPos p.prevEndPos)
+
+ let parseValuePathTail p startPos ident =
+ let rec loop p path =
+ match p.Parser.token with
+ | Lident ident ->
+ Parser.next p;
+ Location.mkloc (Longident.Ldot(path, ident)) (mkLoc startPos p.prevEndPos)
+ | List ->
+ Parser.next p;
+ Location.mkloc (Longident.Ldot(path, "list")) (mkLoc startPos p.prevEndPos)
+ | Uident ident ->
+ Parser.next p;
+ Parser.expect Dot p;
+ loop p (Longident.Ldot (path, ident))
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Location.mknoloc path
+ in
+ loop p ident
+
+ let parseModuleLongIdentTail ~lowercase p startPos ident =
+ let rec loop p acc =
+ match p.Parser.token with
+ | List when lowercase ->
+ Parser.next p;
+ let lident = (Longident.Ldot (acc, "list")) in
+ Location.mkloc lident (mkLoc startPos p.prevEndPos)
+ | Lident ident when lowercase ->
+ Parser.next p;
+ let lident = (Longident.Ldot (acc, ident)) in
+ Location.mkloc lident (mkLoc startPos p.prevEndPos)
+ | Uident ident ->
+ Parser.next p;
+ let endPos = p.prevEndPos in
+ let lident = (Longident.Ldot (acc, ident)) in
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ loop p lident
+ | _ -> Location.mkloc lident (mkLoc startPos endPos)
+ end
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mkloc acc (mkLoc startPos p.prevEndPos)
+ in
+ loop p ident
+
+ (* Parses module identifiers:
+ Foo
+ Foo.Bar *)
+ let parseModuleLongIdent ~lowercase p =
+ (* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; *)
+ let startPos = p.Parser.startPos in
+ let moduleIdent = match p.Parser.token with
+ | List when lowercase ->
+ let loc = mkLoc startPos p.endPos in
+ Parser.next p;
+ Location.mkloc (Longident.Lident "list") loc
+ | Lident ident when lowercase ->
+ let loc = mkLoc startPos p.endPos in
+ let lident = Longident.Lident ident in
+ Parser.next p;
+ Location.mkloc lident loc
+ | Uident ident ->
+ let lident = Longident.Lident ident in
+ let endPos = p.endPos in
+ Parser.next p;
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ parseModuleLongIdentTail ~lowercase p startPos lident
+ | _ -> Location.mkloc lident (mkLoc startPos endPos)
+ end
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mkloc (Longident.Lident "_") (mkLoc startPos p.prevEndPos)
+ in
+ (* Parser.eatBreadcrumb p; *)
+ moduleIdent
+
+ (* `window.location` or `Math` or `Foo.Bar` *)
+ let parseIdentPath p =
+ let rec loop p acc =
+ match p.Parser.token with
+ | Uident ident | Lident ident ->
+ Parser.next p;
+ let lident = (Longident.Ldot (acc, ident)) in
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ loop p lident
+ | _ -> lident
+ end
+ | _t -> acc
+ in
+ match p.Parser.token with
+ | Lident ident | Uident ident ->
+ Parser.next p;
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ loop p (Longident.Lident ident)
+ | _ -> Longident.Lident ident
+ end
+ | _ ->
+ Longident.Lident "_"
+
+ let verifyJsxOpeningClosingName p nameExpr =
+ let closing = match p.Parser.token with
+ | Lident lident -> Parser.next p; Longident.Lident lident
+ | Uident _ ->
+ (parseModuleLongIdent ~lowercase:false p).txt
+ | _ -> Longident.Lident ""
+ in
+ match nameExpr.Parsetree.pexp_desc with
+ | Pexp_ident openingIdent ->
+ let opening =
+ let withoutCreateElement =
+ Longident.flatten openingIdent.txt
+ |> List.filter (fun s -> s <> "createElement")
+ in
+ match (Longident.unflatten withoutCreateElement) with
+ | Some li -> li
+ | None -> Longident.Lident ""
+ in
+ opening = closing
+ | _ -> assert false
+
+ let string_of_pexp_ident nameExpr =
+ match nameExpr.Parsetree.pexp_desc with
+ | Pexp_ident openingIdent ->
+ Longident.flatten openingIdent.txt
+ |> List.filter (fun s -> s <> "createElement")
+ |> String.concat "."
+ | _ -> ""
+
+ (* open-def ::=
+ * | open module-path
+ * | open! module-path *)
+ let parseOpenDescription ~attrs p =
+ Parser.leaveBreadcrumb p Grammar.OpenDescription;
+ let startPos = p.Parser.startPos in
+ Parser.expect Open p;
+ let override = if Parser.optional p Token.Bang then
+ Asttypes.Override
+ else
+ Asttypes.Fresh
+ in
+ let modident = parseModuleLongIdent ~lowercase:false p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.eatBreadcrumb p;
+ Ast_helper.Opn.mk ~loc ~attrs ~override modident
+
+ let hexValue x =
+ match x with
+ | '0' .. '9' ->
+ (Char.code x) - 48
+ | 'A' .. 'Z' ->
+ (Char.code x) - 55
+ | 'a' .. 'z' ->
+ (Char.code x) - 97
+ | _ -> 16
+
+ let parseStringLiteral s =
+ let len = String.length s in
+ let b = Buffer.create (String.length s) in
+
+ let rec loop i =
+ if i = len then
+ ()
+ else
+ let c = String.unsafe_get s i in
+ match c with
+ | '\\' as c ->
+ let nextIx = i + 1 in
+ if nextIx < len then
+ let nextChar = String.unsafe_get s nextIx in
+ begin match nextChar with
+ | 'n' ->
+ Buffer.add_char b '\010';
+ loop (nextIx + 1)
+ | 'r' ->
+ Buffer.add_char b '\013';
+ loop (nextIx + 1)
+ | 'b' ->
+ Buffer.add_char b '\008';
+ loop (nextIx + 1)
+ | 't' ->
+ Buffer.add_char b '\009';
+ loop (nextIx + 1)
+ | '\\' as c ->
+ Buffer.add_char b c;
+ loop (nextIx + 1)
+ | ' ' as c ->
+ Buffer.add_char b c;
+ loop (nextIx + 1)
+ | '\'' as c ->
+ Buffer.add_char b c;
+ loop (nextIx + 1)
+ | '\"' as c ->
+ Buffer.add_char b c;
+ loop (nextIx + 1)
+ | '0' .. '9' ->
+ if nextIx + 2 < len then
+ let c0 = nextChar in
+ let c1 = (String.unsafe_get s (nextIx + 1)) in
+ let c2 = (String.unsafe_get s (nextIx + 2)) in
+ let c =
+ 100 * (Char.code c0 - 48) +
+ 10 * (Char.code c1 - 48) +
+ (Char.code c2 - 48)
+ in
+ if (c < 0 || c > 255) then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b c0;
+ Buffer.add_char b c1;
+ Buffer.add_char b c2;
+ loop (nextIx + 3)
+ ) else (
+ Buffer.add_char b (Char.unsafe_chr c);
+ loop (nextIx + 3)
+ )
+ else (
+ Buffer.add_char b '\\';
+ Buffer.add_char b nextChar;
+ loop (nextIx + 1)
+ )
+ | 'o' ->
+ if nextIx + 3 < len then
+ let c0 = (String.unsafe_get s (nextIx + 1)) in
+ let c1 = (String.unsafe_get s (nextIx + 2)) in
+ let c2 = (String.unsafe_get s (nextIx + 3)) in
+ let c =
+ 64 * (Char.code c0 - 48) +
+ 8 * (Char.code c1 - 48) +
+ (Char.code c2 - 48)
+ in
+ if (c < 0 || c > 255) then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b '0';
+ Buffer.add_char b c0;
+ Buffer.add_char b c1;
+ Buffer.add_char b c2;
+ loop (nextIx + 4)
+ ) else (
+ Buffer.add_char b (Char.unsafe_chr c);
+ loop (nextIx + 4)
+ )
+ else (
+ Buffer.add_char b '\\';
+ Buffer.add_char b nextChar;
+ loop (nextIx + 1)
+ )
+ | 'x' as c ->
+ if nextIx + 2 < len then
+ let c0 = (String.unsafe_get s (nextIx + 1)) in
+ let c1 = (String.unsafe_get s (nextIx + 2)) in
+ let c = (16 * (hexValue c0)) + (hexValue c1) in
+ if (c < 0 || c > 255) then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b 'x';
+ Buffer.add_char b c0;
+ Buffer.add_char b c1;
+ loop (nextIx + 3)
+ ) else (
+ Buffer.add_char b (Char.unsafe_chr c);
+ loop (nextIx + 3)
+ )
+ else (
+ Buffer.add_char b '\\';
+ Buffer.add_char b c;
+ loop (nextIx + 2)
+ )
+ | _ ->
+ Buffer.add_char b c;
+ Buffer.add_char b nextChar;
+ loop (nextIx + 1)
+ end
+ else (
+ Buffer.add_char b c;
+ ()
+ )
+ | c ->
+ Buffer.add_char b c;
+ loop (i + 1)
+ in
+ loop 0;
+ Buffer.contents b
+
+ let parseTemplateStringLiteral s =
+ let len = String.length s in
+ let b = Buffer.create len in
+
+ let rec loop i =
+ if i < len then
+ let c = String.unsafe_get s i in
+ match c with
+ | '\\' as c ->
+ if i + 1 < len then
+ let nextChar = String.unsafe_get s (i + 1) in
+ begin match nextChar with
+ | '\\' as c ->
+ Buffer.add_char b c;
+ loop (i + 2)
+ | '$' as c ->
+ Buffer.add_char b c;
+ loop (i + 2)
+ | '`' as c ->
+ Buffer.add_char b c;
+ loop (i + 2)
+ | c ->
+ Buffer.add_char b '\\';
+ Buffer.add_char b c;
+ loop (i + 2)
+ end
+ else (
+ Buffer.add_char b c
+ )
+
+ | c ->
+ Buffer.add_char b c;
+ loop (i + 1)
+
+ else
+ ()
+ in
+ loop 0;
+ Buffer.contents b
+
+ (* constant ::= integer-literal *)
+ (* ∣ float-literal *)
+ (* ∣ string-literal *)
+ let parseConstant p =
+ let isNegative = match p.Parser.token with
+ | Token.Minus -> Parser.next p; true
+ | Plus -> Parser.next p; false
+ | _ -> false
+ in
+ let constant = match p.Parser.token with
+ | Int {i; suffix} ->
+ let intTxt = if isNegative then "-" ^ i else i in
+ Parsetree.Pconst_integer (intTxt, suffix)
+ | Float {f; suffix} ->
+ let floatTxt = if isNegative then "-" ^ f else f in
+ Parsetree.Pconst_float (floatTxt, suffix)
+ | String s ->
+ let txt = if p.mode = ParseForTypeChecker then
+ parseStringLiteral s
+ else
+ s
+ in
+ Pconst_string(txt, None)
+ | Character c -> Pconst_char c
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Pconst_string("", None)
+ in
+ Parser.next p;
+ constant
+
+ let parseCommaDelimitedRegion p ~grammar ~closing ~f =
+ Parser.leaveBreadcrumb p grammar;
+ let rec loop nodes =
+ match f p with
+ | Some node ->
+ begin match p.Parser.token with
+ | Comma ->
+ Parser.next p;
+ loop (node::nodes)
+ | token when token = closing || token = Eof ->
+ List.rev (node::nodes)
+ | _ ->
+ if not (p.token = Eof || p.token = closing || Recover.shouldAbortListParse p) then
+ Parser.expect Comma p;
+ if p.token = Semicolon then Parser.next p;
+ loop (node::nodes)
+ end
+ | None ->
+ if p.token = Eof || p.token = closing || Recover.shouldAbortListParse p then
+ List.rev nodes
+ else (
+ Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);
+ Parser.next p;
+ loop nodes
+ );
+ in
+ let nodes = loop [] in
+ Parser.eatBreadcrumb p;
+ nodes
+
+ let parseCommaDelimitedReversedList p ~grammar ~closing ~f =
+ Parser.leaveBreadcrumb p grammar;
+ let rec loop nodes =
+ match f p with
+ | Some node ->
+ begin match p.Parser.token with
+ | Comma ->
+ Parser.next p;
+ loop (node::nodes)
+ | token when token = closing || token = Eof ->
+ (node::nodes)
+ | _ ->
+ if not (p.token = Eof || p.token = closing || Recover.shouldAbortListParse p) then
+ Parser.expect Comma p;
+ if p.token = Semicolon then Parser.next p;
+ loop (node::nodes)
+ end
+ | None ->
+ if p.token = Eof || p.token = closing || Recover.shouldAbortListParse p then
+ nodes
+ else (
+ Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);
+ Parser.next p;
+ loop nodes
+ );
+ in
+ let nodes = loop [] in
+ Parser.eatBreadcrumb p;
+ nodes
+
+ let parseDelimitedRegion p ~grammar ~closing ~f =
+ Parser.leaveBreadcrumb p grammar;
+ let rec loop nodes =
+ match f p with
+ | Some node ->
+ loop (node::nodes)
+ | None ->
+ if (
+ p.Parser.token = Token.Eof ||
+ p.token = closing ||
+ Recover.shouldAbortListParse p
+ ) then
+ List.rev nodes
+ else (
+ Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);
+ Parser.next p;
+ loop nodes
+ )
+ in
+ let nodes = loop [] in
+ Parser.eatBreadcrumb p;
+ nodes
+
+ let parseRegion p ~grammar ~f =
+ Parser.leaveBreadcrumb p grammar;
+ let rec loop nodes =
+ match f p with
+ | Some node ->
+ loop (node::nodes)
+ | None ->
+ if p.Parser.token = Token.Eof || Recover.shouldAbortListParse p then
+ List.rev nodes
+ else (
+ Parser.err p (Diagnostics.unexpected p.token p.breadcrumbs);
+ Parser.next p;
+ loop nodes
+ )
+ in
+ let nodes = loop [] in
+ Parser.eatBreadcrumb p;
+ nodes
+
+ (* let-binding ::= pattern = expr *)
+ (* ∣ value-name { parameter } [: typexpr] [:> typexpr] = expr *)
+ (* ∣ value-name : poly-typexpr = expr *)
+
+ (* pattern ::= value-name *)
+ (* ∣ _ *)
+ (* ∣ constant *)
+ (* ∣ pattern as value-name *)
+ (* ∣ ( pattern ) *)
+ (* ∣ ( pattern : typexpr ) *)
+ (* ∣ pattern | pattern *)
+ (* ∣ constr pattern *)
+ (* ∣ #variant variant-pattern *)
+ (* ∣ ##type *)
+ (* ∣ / pattern { , pattern }+ / *)
+ (* ∣ { field [: typexpr] [= pattern] { ; field [: typexpr] [= pattern] } [; _ ] [ ; ] } *)
+ (* ∣ [ pattern { ; pattern } [ ; ] ] *)
+ (* ∣ pattern :: pattern *)
+ (* ∣ [| pattern { ; pattern } [ ; ] |] *)
+ (* ∣ char-literal .. char-literal *)
+ (* ∣ exception pattern *)
+ let rec parsePattern ?(alias=true) ?(or_=true) p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ let pat = match p.Parser.token with
+ | (True | False) as token ->
+ let endPos = p.endPos in
+ Parser.next p;
+ let loc = mkLoc startPos endPos in
+ Ast_helper.Pat.construct ~loc
+ (Location.mkloc (Longident.Lident (Token.toString token)) loc) None
+ | Int _ | String _ | Float _ | Character _ | Minus | Plus ->
+ let c = parseConstant p in
+ begin match p.token with
+ | DotDot ->
+ Parser.next p;
+ let c2 = parseConstant p in
+ Ast_helper.Pat.interval ~loc:(mkLoc startPos p.prevEndPos) c c2
+ | _ ->
+ Ast_helper.Pat.constant ~loc:(mkLoc startPos p.prevEndPos) c
+ end
+ | Lparen ->
+ Parser.next p;
+ begin match p.token with
+ | Rparen ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let lid = Location.mkloc (Longident.Lident "()") loc in
+ Ast_helper.Pat.construct ~loc lid None
+ | _ ->
+ let pat = parseConstrainedPattern p in
+ begin match p.token with
+ | Comma ->
+ Parser.next p;
+ parseTuplePattern ~attrs ~first:pat ~startPos p
+ | _ ->
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ {pat with ppat_loc = loc}
+ end
+ end
+ | Lbracket ->
+ parseArrayPattern ~attrs p
+ | Lbrace ->
+ parseRecordPattern ~attrs p
+ | Underscore ->
+ let endPos = p.endPos in
+ let loc = mkLoc startPos endPos in
+ Parser.next p;
+ Ast_helper.Pat.any ~loc ~attrs ()
+ | Lident ident ->
+ let endPos = p.endPos in
+ let loc = mkLoc startPos endPos in
+ Parser.next p;
+ Ast_helper.Pat.var ~loc ~attrs (Location.mkloc ident loc)
+ | Uident _ ->
+ let constr = parseModuleLongIdent ~lowercase:false p in
+ begin match p.Parser.token with
+ | Lparen ->
+ parseConstructorPatternArgs p constr startPos attrs
+ | _ ->
+ Ast_helper.Pat.construct ~loc:constr.loc ~attrs constr None
+ end
+ | Hash ->
+ let (ident, loc) = parseHashIdent ~startPos p in
+ begin match p.Parser.token with
+ | Lparen ->
+ parseVariantPatternArgs p ident startPos attrs
+ | _ ->
+ Ast_helper.Pat.variant ~loc ~attrs ident None
+ end
+ | HashHash ->
+ Parser.next p;
+ let ident = parseValuePath p in
+ let loc = mkLoc startPos ident.loc.loc_end in
+ Ast_helper.Pat.type_ ~loc ~attrs ident
+ | Exception ->
+ Parser.next p;
+ let pat = parsePattern ~alias:false ~or_:false p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.exception_ ~loc ~attrs pat
+ | Lazy ->
+ Parser.next p;
+ let pat = parsePattern ~alias:false ~or_:false p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.lazy_ ~loc ~attrs pat
+ | List ->
+ Parser.next p;
+ begin match p.token with
+ | Lbracket ->
+ parseListPattern ~startPos ~attrs p
+ | _ ->
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.var ~loc ~attrs (Location.mkloc "list" loc)
+ end
+ | Module ->
+ parseModulePattern ~attrs p
+ | Percent ->
+ let extension = parseExtension p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.extension ~loc ~attrs extension
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ begin match skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicPatternStart with
+ | None ->
+ Recover.defaultPattern()
+ | Some () ->
+ parsePattern p
+ end
+ in
+ let pat = if alias then parseAliasPattern ~attrs pat p else pat in
+ if or_ then parseOrPattern pat p else pat
+
+ and skipTokensAndMaybeRetry p ~isStartOfGrammar =
+ if Token.isKeyword p.Parser.token
+ && p.Parser.prevEndPos.pos_lnum == p.startPos.pos_lnum
+ then (
+ Parser.next p;
+ None
+ ) else (
+ if Recover.shouldAbortListParse p then
+ begin
+ if isStartOfGrammar p.Parser.token then
+ begin
+ Parser.next p;
+ Some ()
+ end
+ else
+ None
+ end
+ else
+ begin
+ Parser.next p;
+ let rec loop p =
+ if not (Recover.shouldAbortListParse p)
+ then begin
+ Parser.next p;
+ loop p
+ end in
+ loop p;
+ if isStartOfGrammar p.Parser.token then
+ Some ()
+ else
+ None
+ end
+ )
+
+ (* alias ::= pattern as lident *)
+ and parseAliasPattern ~attrs pattern p =
+ match p.Parser.token with
+ | As ->
+ Parser.next p;
+ let (name, loc) = parseLident p in
+ let name = Location.mkloc name loc in
+ Ast_helper.Pat.alias
+ ~loc:({pattern.ppat_loc with loc_end = p.prevEndPos})
+ ~attrs
+ pattern
+ name
+ | _ -> pattern
+
+ (* or ::= pattern | pattern
+ * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green *)
+ and parseOrPattern pattern1 p =
+ let rec loop pattern1 =
+ match p.Parser.token with
+ | Bar ->
+ Parser.next p;
+ let pattern2 = parsePattern ~or_:false p in
+ let loc = { pattern1.Parsetree.ppat_loc with
+ loc_end = pattern2.ppat_loc.loc_end
+ } in
+ loop (Ast_helper.Pat.or_ ~loc pattern1 pattern2)
+ | _ -> pattern1
+ in
+ loop pattern1
+
+ and parseNonSpreadPattern ~msg p =
+ let () = match p.Parser.token with
+ | DotDotDot ->
+ Parser.err p (Diagnostics.message msg);
+ Parser.next p;
+ | _ -> ()
+ in
+ match p.Parser.token with
+ | token when Grammar.isPatternStart token ->
+ let pat = parsePattern p in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in
+ Some (Ast_helper.Pat.constraint_ ~loc pat typ)
+ | _ -> Some pat
+ end
+ | _ -> None
+
+ and parseConstrainedPattern p =
+ let pat = parsePattern p in
+ match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc pat.ppat_loc.loc_start typ.Parsetree.ptyp_loc.loc_end in
+ Ast_helper.Pat.constraint_ ~loc pat typ
+ | _ -> pat
+
+ and parseConstrainedPatternRegion p =
+ match p.Parser.token with
+ | token when Grammar.isPatternStart token ->
+ Some (parseConstrainedPattern p)
+ | _ -> None
+
+ (* field ::=
+ * | longident
+ * | longident : pattern
+ * | longident as lident
+ *
+ * row ::=
+ * | field ,
+ * | field , _
+ * | field , _,
+ *)
+ and parseRecordPatternField p =
+ let startPos = p.Parser.startPos in
+ let label = parseValuePath p in
+ let pattern = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ parsePattern p
+ | _ ->
+ Ast_helper.Pat.var
+ ~loc:label.loc
+ (Location.mkloc (Longident.last label.txt) label.loc)
+ in
+ match p.token with
+ | As ->
+ Parser.next p;
+ let (name, loc) = parseLident p in
+ let name = Location.mkloc name loc in
+ let aliasPattern = Ast_helper.Pat.alias
+ ~loc:(mkLoc startPos p.prevEndPos)
+ pattern
+ name
+ in
+ (Location.mkloc label.txt (mkLoc startPos aliasPattern.ppat_loc.loc_end), aliasPattern)
+ | _ ->
+ (label, pattern)
+
+ (* TODO: there are better representations than PatField|Underscore ? *)
+ and parseRecordPatternItem p =
+ match p.Parser.token with
+ | DotDotDot ->
+ Parser.next p;
+ Some (true, PatField (parseRecordPatternField p))
+ | Uident _ | Lident _ ->
+ Some (false, PatField (parseRecordPatternField p))
+ | Underscore ->
+ Parser.next p;
+ Some (false, PatUnderscore)
+ | _ ->
+ None
+
+ and parseRecordPattern ~attrs p =
+ let startPos = p.startPos in
+ Parser.expect Lbrace p;
+ let rawFields =
+ parseCommaDelimitedReversedList p
+ ~grammar:PatternRecord
+ ~closing:Rbrace
+ ~f:parseRecordPatternItem
+ in
+ Parser.expect Rbrace p;
+ let (fields, closedFlag) =
+ let (rawFields, flag) = match rawFields with
+ | (_hasSpread, PatUnderscore)::rest ->
+ (rest, Asttypes.Open)
+ | rawFields ->
+ (rawFields, Asttypes.Closed)
+ in
+ List.fold_left (fun (fields, flag) curr ->
+ let (hasSpread, field) = curr in
+ match field with
+ | PatField field ->
+ if hasSpread then (
+ let (_, pattern) = field in
+ Parser.err ~startPos:pattern.Parsetree.ppat_loc.loc_start p (Diagnostics.message ErrorMessages.recordPatternSpread)
+ );
+ (field::fields, flag)
+ | PatUnderscore ->
+ (fields, flag)
+ ) ([], flag) rawFields
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.record ~loc ~attrs fields closedFlag
+
+ and parseTuplePattern ~attrs ~first ~startPos p =
+ let patterns =
+ parseCommaDelimitedRegion p
+ ~grammar:Grammar.PatternList
+ ~closing:Rparen
+ ~f:parseConstrainedPatternRegion
+ in
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.tuple ~loc ~attrs (first::patterns)
+
+ and parsePatternRegion p =
+ match p.Parser.token with
+ | DotDotDot ->
+ Parser.next p;
+ Some (true, parseConstrainedPattern p)
+ | token when Grammar.isPatternStart token ->
+ Some (false, parseConstrainedPattern p)
+ | _ -> None
+
+ and parseModulePattern ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Module p;
+ Parser.expect Lparen p;
+ let uident = match p.token with
+ | Uident uident ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc uident loc
+ | _ -> (* TODO: error recovery *)
+ Location.mknoloc "_"
+ in
+ begin match p.token with
+ | Colon ->
+ let colonStart = p.Parser.startPos in
+ Parser.next p;
+ let packageTypAttrs = parseAttributes p in
+ let packageType = parsePackageType ~startPos:colonStart ~attrs:packageTypAttrs p in
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let unpack = Ast_helper.Pat.unpack ~loc:uident.loc uident in
+ Ast_helper.Pat.constraint_
+ ~loc
+ ~attrs
+ unpack
+ packageType
+ | _ ->
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.unpack ~loc ~attrs uident
+ end
+
+ and parseListPattern ~startPos ~attrs p =
+ Parser.expect Lbracket p;
+ let listPatterns =
+ parseCommaDelimitedReversedList p
+ ~grammar:Grammar.PatternOcamlList
+ ~closing:Rbracket
+ ~f:parsePatternRegion
+ in
+ Parser.expect Rbracket p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let filterSpread (hasSpread, pattern) =
+ if hasSpread then (
+ Parser.err
+ ~startPos:pattern.Parsetree.ppat_loc.loc_start
+ p
+ (Diagnostics.message ErrorMessages.listPatternSpread);
+ pattern
+ ) else
+ pattern
+ in
+ match listPatterns with
+ | (true, pattern)::patterns ->
+ let patterns = patterns |> List.map filterSpread |> List.rev in
+ let pat = makeListPattern loc patterns (Some pattern) in
+ {pat with ppat_loc = loc; ppat_attributes = attrs;}
+ | patterns ->
+ let patterns = patterns |> List.map filterSpread |> List.rev in
+ let pat = makeListPattern loc patterns None in
+ {pat with ppat_loc = loc; ppat_attributes = attrs;}
+
+ and parseArrayPattern ~attrs p =
+ let startPos = p.startPos in
+ Parser.expect Lbracket p;
+ let patterns =
+ parseCommaDelimitedRegion
+ p
+ ~grammar:Grammar.PatternList
+ ~closing:Rbracket
+ ~f:(parseNonSpreadPattern ~msg:ErrorMessages.arrayPatternSpread)
+ in
+ Parser.expect Rbracket p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.array ~loc ~attrs patterns
+
+ and parseConstructorPatternArgs p constr startPos attrs =
+ let lparen = p.startPos in
+ Parser.expect Lparen p;
+ let args = parseCommaDelimitedRegion
+ p ~grammar:Grammar.PatternList ~closing:Rparen ~f:parseConstrainedPatternRegion
+ in
+ Parser.expect Rparen p;
+ let args = match args with
+ | [] ->
+ let loc = mkLoc lparen p.prevEndPos in
+ Some (
+ Ast_helper.Pat.construct ~loc (Location.mkloc (Longident.Lident "()") loc) None
+ )
+ | [{ppat_desc = Ppat_tuple _} as pat] as patterns ->
+ if p.mode = ParseForTypeChecker then
+ (* Some(1, 2) for type-checker *)
+ Some pat
+ else
+ (* Some((1, 2)) for printer *)
+ Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns)
+ | [pattern] -> Some pattern
+ | patterns ->
+ Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns)
+ in
+ Ast_helper.Pat.construct ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args
+
+ and parseVariantPatternArgs p ident startPos attrs =
+ let lparen = p.startPos in
+ Parser.expect Lparen p;
+ let patterns =
+ parseCommaDelimitedRegion
+ p ~grammar:Grammar.PatternList ~closing:Rparen ~f:parseConstrainedPatternRegion in
+ let args =
+ match patterns with
+ | [{ppat_desc = Ppat_tuple _} as pat] as patterns ->
+ if p.mode = ParseForTypeChecker then
+ (* #ident(1, 2) for type-checker *)
+ Some pat
+ else
+ (* #ident((1, 2)) for printer *)
+ Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns)
+ | [pattern] -> Some pattern
+ | patterns ->
+ Some (Ast_helper.Pat.tuple ~loc:(mkLoc lparen p.endPos) patterns)
+ in
+ Parser.expect Rparen p;
+ Ast_helper.Pat.variant ~loc:(mkLoc startPos p.prevEndPos) ~attrs ident args
+
+ and parseExpr ?(context=OrdinaryExpr) p =
+ let expr = parseOperandExpr ~context p in
+ let expr = parseBinaryExpr ~context ~a:expr p 1 in
+ parseTernaryExpr expr p
+
+ (* expr ? expr : expr *)
+ and parseTernaryExpr leftOperand p =
+ match p.Parser.token with
+ | Question ->
+ Parser.leaveBreadcrumb p Grammar.Ternary;
+ Parser.next p;
+ let trueBranch = parseExpr ~context:TernaryTrueBranchExpr p in
+ Parser.expect Colon p;
+ let falseBranch = parseExpr p in
+ Parser.eatBreadcrumb p;
+ let loc = {leftOperand.Parsetree.pexp_loc with
+ loc_start = leftOperand.pexp_loc.loc_start;
+ loc_end = falseBranch.Parsetree.pexp_loc.loc_end;
+ } in
+ Ast_helper.Exp.ifthenelse
+ ~attrs:[ternaryAttr] ~loc
+ leftOperand trueBranch (Some falseBranch)
+ | _ ->
+ leftOperand
+
+ and parseEs6ArrowExpression ?parameters p =
+ let startPos = p.Parser.startPos in
+ Parser.leaveBreadcrumb p Grammar.Es6ArrowExpr;
+ let parameters = match parameters with
+ | Some params -> params
+ | None -> parseParameters p
+ in
+ let returnType = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ Some (parseTypExpr ~es6Arrow:false p)
+ | _ ->
+ None
+ in
+ Parser.expect EqualGreater p;
+ let body =
+ let expr = parseExpr p in
+ match returnType with
+ | Some typ ->
+ Ast_helper.Exp.constraint_
+ ~loc:(mkLoc expr.pexp_loc.loc_start typ.Parsetree.ptyp_loc.loc_end) expr typ
+ | None -> expr
+ in
+ Parser.eatBreadcrumb p;
+ let endPos = p.prevEndPos in
+ let arrowExpr =
+ List.fold_right (fun parameter expr ->
+ match parameter with
+ | TermParameter {uncurried; attrs; label = lbl; expr = defaultExpr; pat; pos = startPos} ->
+ let attrs = if uncurried then uncurryAttr::attrs else attrs in
+ Ast_helper.Exp.fun_ ~loc:(mkLoc startPos endPos) ~attrs lbl defaultExpr pat expr
+ | TypeParameter {uncurried; attrs; locs = newtypes; pos = startPos} ->
+ let attrs = if uncurried then uncurryAttr::attrs else attrs in
+ makeNewtypes ~attrs ~loc:(mkLoc startPos endPos) newtypes expr
+ ) parameters body
+ in
+ {arrowExpr with pexp_loc = {arrowExpr.pexp_loc with loc_start = startPos}}
+
+ (*
+ * uncurried_parameter ::=
+ * | . parameter
+ *
+ * parameter ::=
+ * | pattern
+ * | pattern : type
+ * | ~ labelName
+ * | ~ labelName as pattern
+ * | ~ labelName as pattern : type
+ * | ~ labelName = expr
+ * | ~ labelName as pattern = expr
+ * | ~ labelName as pattern : type = expr
+ * | ~ labelName = ?
+ * | ~ labelName as pattern = ?
+ * | ~ labelName as pattern : type = ?
+ *
+ * labelName ::= lident
+ *)
+ and parseParameter p =
+ if (
+ p.Parser.token = Token.Typ ||
+ p.token = Tilde ||
+ p.token = Dot ||
+ Grammar.isPatternStart p.token
+ ) then (
+ let startPos = p.Parser.startPos in
+ let uncurried = Parser.optional p Token.Dot in
+ (* two scenarios:
+ * attrs ~lbl ...
+ * attrs pattern
+ * Attributes before a labelled arg, indicate that it's on the whole arrow expr
+ * Otherwise it's part of the pattern
+ * *)
+ let attrs = parseAttributes p in
+ if p.Parser.token = Typ then (
+ Parser.next p;
+ let lidents = parseLidentList p in
+ Some (TypeParameter {uncurried; attrs; locs = lidents; pos = startPos})
+ ) else (
+ let (attrs, lbl, pat) = match p.Parser.token with
+ | Tilde ->
+ Parser.next p;
+ let (lblName, loc) = parseLident p in
+ let propLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in
+ begin match p.Parser.token with
+ | Comma | Equal | Rparen ->
+ let loc = mkLoc startPos p.prevEndPos in
+ (
+ attrs,
+ Asttypes.Labelled lblName,
+ Ast_helper.Pat.var ~attrs:[propLocAttr] ~loc (Location.mkloc lblName loc)
+ )
+ | Colon ->
+ let lblEnd = p.prevEndPos in
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc startPos lblEnd in
+ let pat =
+ let pat = Ast_helper.Pat.var ~loc (Location.mkloc lblName loc) in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Pat.constraint_ ~attrs:[propLocAttr] ~loc pat typ in
+ (attrs, Asttypes.Labelled lblName, pat)
+ | As ->
+ Parser.next p;
+ let pat =
+ let pat = parseConstrainedPattern p in
+ {pat with ppat_attributes = propLocAttr::pat.ppat_attributes}
+ in
+ (attrs, Asttypes.Labelled lblName, pat)
+ | t ->
+ Parser.err p (Diagnostics.unexpected t p.breadcrumbs);
+ let loc = mkLoc startPos p.prevEndPos in
+ (
+ attrs,
+ Asttypes.Labelled lblName,
+ Ast_helper.Pat.var ~loc (Location.mkloc lblName loc)
+ )
+ end
+ | _ ->
+ let pattern = parseConstrainedPattern p in
+ let attrs = List.concat [attrs; pattern.ppat_attributes] in
+ ([], Asttypes.Nolabel, {pattern with ppat_attributes = attrs})
+ in
+ match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ let lbl = match lbl with
+ | Asttypes.Labelled lblName -> Asttypes.Optional lblName
+ | Asttypes.Optional _ as lbl -> lbl
+ | Asttypes.Nolabel -> Asttypes.Nolabel
+ in
+ begin match p.Parser.token with
+ | Question ->
+ Parser.next p;
+ Some (TermParameter {uncurried; attrs; label = lbl; expr = None; pat; pos = startPos})
+ | _ ->
+ let expr = parseConstrainedOrCoercedExpr p in
+ Some (TermParameter {uncurried; attrs; label = lbl; expr = Some expr; pat; pos = startPos})
+ end
+ | _ ->
+ Some (TermParameter {uncurried; attrs; label = lbl; expr = None; pat; pos = startPos})
+ )
+ ) else None
+
+ and parseParameterList p =
+ let parameters =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.ParameterList
+ ~f:parseParameter
+ ~closing:Rparen
+ p
+ in
+ Parser.expect Rparen p;
+ parameters
+
+ (* parameters ::=
+ * | _
+ * | lident
+ * | ()
+ * | (.)
+ * | ( parameter {, parameter} [,] )
+ *)
+ and parseParameters p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | Lident ident ->
+ Parser.next p;
+ let loc = mkLoc startPos p.Parser.prevEndPos in
+ [TermParameter {
+ uncurried = false;
+ attrs = [];
+ label = Asttypes.Nolabel;
+ expr = None;
+ pat = Ast_helper.Pat.var ~loc (Location.mkloc ident loc);
+ pos = startPos;
+ }]
+ | List ->
+ Parser.next p;
+ let loc = mkLoc startPos p.Parser.prevEndPos in
+ [TermParameter {
+ uncurried = false;
+ attrs = [];
+ label = Asttypes.Nolabel;
+ expr = None;
+ pat = Ast_helper.Pat.var ~loc (Location.mkloc "list" loc);
+ pos = startPos;
+ }]
+ | Underscore ->
+ Parser.next p;
+ let loc = mkLoc startPos p.Parser.prevEndPos in
+ [TermParameter {uncurried = false; attrs = []; label = Asttypes.Nolabel; expr = None; pat = Ast_helper.Pat.any ~loc (); pos = startPos}]
+ | Lparen ->
+ Parser.next p;
+ begin match p.Parser.token with
+ | Rparen ->
+ Parser.next p;
+ let loc = mkLoc startPos p.Parser.prevEndPos in
+ let unitPattern = Ast_helper.Pat.construct
+ ~loc (Location.mkloc (Longident.Lident "()") loc) None
+ in
+ [TermParameter {uncurried = false; attrs = []; label = Asttypes.Nolabel; expr = None; pat = unitPattern; pos = startPos}]
+ | Dot ->
+ Parser.next p;
+ begin match p.token with
+ | Rparen ->
+ Parser.next p;
+ let loc = mkLoc startPos p.Parser.prevEndPos in
+ let unitPattern = Ast_helper.Pat.construct
+ ~loc (Location.mkloc (Longident.Lident "()") loc) None
+ in
+ [TermParameter {uncurried = true; attrs = []; label = Asttypes.Nolabel; expr = None; pat = unitPattern; pos = startPos}]
+ | _ ->
+ begin match parseParameterList p with
+ | (TermParameter {attrs; label = lbl; expr = defaultExpr; pat = pattern; pos = startPos})::rest ->
+ (TermParameter {uncurried = true; attrs; label = lbl; expr = defaultExpr; pat = pattern; pos = startPos})::rest
+ | parameters -> parameters
+ end
+ end
+ | _ -> parseParameterList p
+ end
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ []
+
+ and parseCoercedExpr ~(expr: Parsetree.expression) p =
+ Parser.expect ColonGreaterThan p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc expr.pexp_loc.loc_start p.prevEndPos in
+ Ast_helper.Exp.coerce ~loc expr None typ
+
+ and parseConstrainedOrCoercedExpr p =
+ let expr = parseExpr p in
+ match p.Parser.token with
+ | ColonGreaterThan ->
+ parseCoercedExpr ~expr p
+ | Colon ->
+ Parser.next p;
+ begin match p.token with
+ | _ ->
+ let typ = parseTypExpr p in
+ let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in
+ let expr = Ast_helper.Exp.constraint_ ~loc expr typ in
+ begin match p.token with
+ | ColonGreaterThan ->
+ parseCoercedExpr ~expr p
+ | _ ->
+ expr
+ end
+ end
+ | _ -> expr
+
+
+ and parseConstrainedExprRegion p =
+ match p.Parser.token with
+ | token when Grammar.isExprStart token ->
+ let expr = parseExpr p in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in
+ Some (Ast_helper.Exp.constraint_ ~loc expr typ)
+ | _ -> Some expr
+ end
+ | _ -> None
+
+ (* Atomic expressions represent unambiguous expressions.
+ * This means that regardless of the context, these expressions
+ * are always interpreted correctly. *)
+ and parseAtomicExpr p =
+ Parser.leaveBreadcrumb p Grammar.ExprOperand;
+ let startPos = p.Parser.startPos in
+ let expr = match p.Parser.token with
+ | (True | False) as token ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.construct ~loc
+ (Location.mkloc (Longident.Lident (Token.toString token)) loc) None
+ | Int _ | String _ | Float _ | Character _ ->
+ let c = parseConstant p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.constant ~loc c
+ | Backtick ->
+ let expr = parseTemplateExpr p in
+ {expr with pexp_loc = mkLoc startPos p.prevEndPos}
+ | Uident _ | Lident _ ->
+ parseValueOrConstructor p
+ | Hash ->
+ parsePolyVariantExpr p
+ | Lparen ->
+ Parser.next p;
+ begin match p.Parser.token with
+ | Rparen ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.construct
+ ~loc (Location.mkloc (Longident.Lident "()") loc) None
+ | _t ->
+ let expr = parseConstrainedOrCoercedExpr p in
+ begin match p.token with
+ | Comma ->
+ Parser.next p;
+ parseTupleExpr ~startPos ~first:expr p
+ | _ ->
+ Parser.expect Rparen p;
+ expr
+ (* {expr with pexp_loc = mkLoc startPos p.prevEndPos}
+ * What does this location mean here? It means that when there's
+ * a parenthesized we keep the location here for whitespace interleaving.
+ * Without the closing paren in the location there will always be an extra
+ * line. For now we don't include it, because it does weird things
+ * with for comments. *)
+ end
+ end
+ | List ->
+ Parser.next p;
+ begin match p.token with
+ | Lbracket ->
+ parseListExpr ~startPos p
+ | _ ->
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.ident ~loc (Location.mkloc (Longident.Lident "list") loc)
+ end
+ | Module ->
+ Parser.next p;
+ parseFirstClassModuleExpr ~startPos p
+ | Lbracket ->
+ parseArrayExp p
+ | Lbrace ->
+ parseBracedOrRecordExpr p
+ | LessThan ->
+ parseJsx p
+ | Percent ->
+ let extension = parseExtension p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.extension ~loc extension
+ | Underscore as token ->
+ (* This case is for error recovery. Not sure if it's the correct place *)
+ Parser.err p (Diagnostics.lident token);
+ Parser.next p;
+ Recover.defaultExpr ()
+ | token ->
+ let errPos = p.prevEndPos in
+ begin match skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicExprStart with
+ | None ->
+ Parser.err ~startPos:errPos p (Diagnostics.unexpected token p.breadcrumbs);
+ Recover.defaultExpr ()
+ | Some () -> parseAtomicExpr p
+ end
+ in
+ Parser.eatBreadcrumb p;
+ expr
+
+ (* module(module-expr)
+ * module(module-expr : package-type) *)
+ and parseFirstClassModuleExpr ~startPos p =
+ Parser.expect Lparen p;
+
+ let modExpr = parseModuleExpr p in
+ let modEndLoc = p.prevEndPos in
+ begin match p.Parser.token with
+ | Colon ->
+ let colonStart = p.Parser.startPos in
+ Parser.next p;
+ let attrs = parseAttributes p in
+ let packageType = parsePackageType ~startPos:colonStart ~attrs p in
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos modEndLoc in
+ let firstClassModule = Ast_helper.Exp.pack ~loc modExpr in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.constraint_ ~loc firstClassModule packageType
+ | _ ->
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.pack ~loc modExpr
+ end
+
+ and parseBracketAccess p expr startPos =
+ Parser.leaveBreadcrumb p Grammar.ExprArrayAccess;
+ let lbracket = p.startPos in
+ Parser.next p;
+ let stringStart = p.startPos in
+ match p.Parser.token with
+ | String s ->
+ Parser.next p;
+ let stringEnd = p.prevEndPos in
+ Parser.expect Rbracket p;
+ let rbracket = p.prevEndPos in
+ let e =
+ let identLoc = mkLoc stringStart stringEnd in
+ let loc = mkLoc lbracket rbracket in
+ Ast_helper.Exp.apply ~loc
+ (Ast_helper.Exp.ident ~loc (Location.mkloc (Longident.Lident "##") loc))
+ [Nolabel, expr; Nolabel, (Ast_helper.Exp.ident ~loc:identLoc (Location.mkloc (Longident.Lident s) identLoc))]
+ in
+ let e = parsePrimaryExpr ~operand:e p in
+ let equalStart = p.startPos in
+ begin match p.token with
+ | Equal ->
+ Parser.next p;
+ let equalEnd = p.prevEndPos in
+ let rhsExpr = parseExpr p in
+ let loc = mkLoc startPos rhsExpr.pexp_loc.loc_end in
+ let operatorLoc = mkLoc equalStart equalEnd in
+ Ast_helper.Exp.apply ~loc
+ (Ast_helper.Exp.ident ~loc:operatorLoc (Location.mkloc (Longident.Lident "#=") operatorLoc))
+ [Nolabel, e; Nolabel, rhsExpr]
+ | _ -> e
+ end
+ | _ ->
+ let accessExpr = parseConstrainedOrCoercedExpr p in
+ Parser.expect Rbracket p;
+ let rbracket = p.prevEndPos in
+ let arrayLoc = mkLoc lbracket rbracket in
+ begin match p.token with
+ | Equal ->
+ Parser.leaveBreadcrumb p ExprArrayMutation;
+ Parser.next p;
+ let rhsExpr = parseExpr p in
+ let arraySet = Location.mkloc
+ (Longident.Ldot(Lident "Array", "set"))
+ arrayLoc
+ in
+ let endPos = p.prevEndPos in
+ let arraySet = Ast_helper.Exp.apply
+ ~loc:(mkLoc startPos endPos)
+ (Ast_helper.Exp.ident ~loc:arrayLoc arraySet)
+ [Nolabel, expr; Nolabel, accessExpr; Nolabel, rhsExpr]
+ in
+ Parser.eatBreadcrumb p;
+ arraySet
+ | _ ->
+ let endPos = p.prevEndPos in
+ let e =
+ Ast_helper.Exp.apply
+ ~loc:(mkLoc startPos endPos)
+ (Ast_helper.Exp.ident
+ ~loc:arrayLoc
+ (Location.mkloc (Longident.Ldot(Lident "Array", "get")) arrayLoc)
+ )
+ [Nolabel, expr; Nolabel, accessExpr]
+ in
+ Parser.eatBreadcrumb p;
+ parsePrimaryExpr ~operand:e p
+ end
+
+ (* * A primary expression represents
+ * - atomic-expr
+ * - john.age
+ * - array[0]
+ * - applyFunctionTo(arg1, arg2)
+ *
+ * The "operand" represents the expression that is operated on
+ *)
+ and parsePrimaryExpr ~operand ?(noCall=false) p =
+ let startPos = operand.pexp_loc.loc_start in
+ let rec loop p expr =
+ match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ let lident = parseValuePath p in
+ begin match p.Parser.token with
+ | Equal when noCall = false ->
+ Parser.leaveBreadcrumb p Grammar.ExprSetField;
+ Parser.next p;
+ let targetExpr = parseExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ let setfield = Ast_helper.Exp.setfield ~loc expr lident targetExpr in
+ Parser.eatBreadcrumb p;
+ setfield
+ | _ ->
+ let endPos = p.prevEndPos in
+ let loc = mkLoc startPos endPos in
+ loop p (Ast_helper.Exp.field ~loc expr lident)
+ end
+ | Lbracket when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum ->
+ parseBracketAccess p expr startPos
+ | Lparen when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum ->
+ loop p (parseCallExpr p expr)
+ | Backtick when noCall = false && p.prevEndPos.pos_lnum == p.startPos.pos_lnum ->
+ begin match expr.pexp_desc with
+ | Pexp_ident {txt = Longident.Lident ident} ->
+ parseTemplateExpr ~prefix:ident p
+ | _ ->
+ Parser.err
+ ~startPos:expr.pexp_loc.loc_start
+ ~endPos:expr.pexp_loc.loc_end
+ p
+ (Diagnostics.message "Tagged template literals are currently restricted to identifiers like: json`null`.");
+ parseTemplateExpr p
+ end
+ | _ -> expr
+ in
+ loop p operand
+
+ (* a unary expression is an expression with only one operand and
+ * unary operator. Examples:
+ * -1
+ * !condition
+ * -. 1.6
+ *)
+ and parseUnaryExpr p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | (Minus | MinusDot | Plus | PlusDot | Bang) as token ->
+ Parser.leaveBreadcrumb p Grammar.ExprUnary;
+ let tokenEnd = p.endPos in
+ Parser.next p;
+ let operand = parseUnaryExpr p in
+ let unaryExpr = makeUnaryExpr startPos tokenEnd token operand in
+ Parser.eatBreadcrumb p;
+ unaryExpr
+ | _ ->
+ parsePrimaryExpr ~operand:(parseAtomicExpr p) p
+
+ (* Represents an "operand" in a binary expression.
+ * If you have `a + b`, `a` and `b` both represent
+ * the operands of the binary expression with opeartor `+` *)
+ and parseOperandExpr ~context p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ let expr = match p.Parser.token with
+ | Assert ->
+ Parser.next p;
+ let expr = parseUnaryExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.assert_ ~loc expr
+ | Lazy ->
+ Parser.next p;
+ let expr = parseUnaryExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.lazy_ ~loc expr
+ | Try ->
+ parseTryExpression p
+ | If ->
+ parseIfExpression p
+ | For ->
+ parseForExpression p
+ | While ->
+ parseWhileExpression p
+ | Switch ->
+ parseSwitchExpression p
+ | _ ->
+ if (context != WhenExpr) &&
+ isEs6ArrowExpression ~inTernary:(context=TernaryTrueBranchExpr) p
+ then
+ parseEs6ArrowExpression p
+ else
+ parseUnaryExpr p
+ in
+ (* let endPos = p.Parser.prevEndPos in *)
+ {expr with
+ pexp_attributes = List.concat[expr.Parsetree.pexp_attributes; attrs];
+ (* pexp_loc = mkLoc startPos endPos *)
+ }
+
+ (* a binary expression is an expression that combines two expressions with an
+ * operator. Examples:
+ * a + b
+ * f(x) |> g(y)
+ *)
+ and parseBinaryExpr ?(context=OrdinaryExpr) ?a p prec =
+ let a = match a with
+ | Some e -> e
+ | None -> parseOperandExpr ~context p
+ in
+ let rec loop a =
+ let token = p.Parser.token in
+ let tokenPrec =
+ match token with
+ (* Can the minus be interpreted as a binary operator? Or is it a unary?
+ * let w = {
+ * x
+ * -10
+ * }
+ * vs
+ * let w = {
+ * width
+ * - gap
+ * }
+ *
+ * First case is unary, second is a binary operator.
+ * See Scanner.isBinaryOp *)
+ | Minus | MinusDot | LessThan when not (
+ Scanner.isBinaryOp p.scanner.src p.startPos.pos_cnum p.endPos.pos_cnum
+ ) && p.startPos.pos_lnum > p.prevEndPos.pos_lnum -> -1
+ | token -> Token.precedence token
+ in
+ if tokenPrec < prec then a
+ else begin
+ Parser.leaveBreadcrumb p (Grammar.ExprBinaryAfterOp token);
+ let startPos = p.startPos in
+ Parser.next p;
+ let endPos = p.prevEndPos in
+ let b = parseBinaryExpr ~context p (tokenPrec + 1) in
+ let loc = mkLoc a.Parsetree.pexp_loc.loc_start b.pexp_loc.loc_end in
+ let expr = Ast_helper.Exp.apply
+ ~loc
+ (makeInfixOperator p token startPos endPos)
+ [Nolabel, a; Nolabel, b]
+ in
+ loop expr
+ end
+ in
+ loop a
+
+ (* If we even need this, determines if < might be the start of jsx. Not 100% complete *)
+ (* and isStartOfJsx p = *)
+ (* Parser.lookahead p (fun p -> *)
+ (* match p.Parser.token with *)
+ (* | LessThan -> *)
+ (* Parser.next p; *)
+ (* begin match p.token with *)
+ (* | GreaterThan (* <> *) -> true *)
+ (* | Lident _ | Uident _ | List -> *)
+ (* ignore (parseJsxName p); *)
+ (* begin match p.token with *)
+ (* | GreaterThan (*
*) -> true *)
+ (* | Question (* true *)
+ (* | Lident _ | List -> *)
+ (* Parser.next p; *)
+ (* begin match p.token with *)
+ (* | Equal (* true *)
+ (* | _ -> false (* TODO *) *)
+ (* end *)
+ (* | Forwardslash (* *)
+ (* Parser.next p; *)
+ (* begin match p.token with *)
+ (* | GreaterThan (* *) -> true *)
+ (* | _ -> false *)
+ (* end *)
+ (* | _ -> *)
+ (* false *)
+ (* end *)
+ (* | _ -> false *)
+ (* end *)
+ (* | _ -> false *)
+ (* ) *)
+
+ and parseTemplateExpr ?(prefix="") p =
+ let hiddenOperator =
+ let op = Location.mknoloc (Longident.Lident "^") in
+ Ast_helper.Exp.ident op
+ in
+ let rec loop acc p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | TemplateTail txt ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ if String.length txt > 0 then
+ let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in
+ let str = Ast_helper.Exp.constant ~loc (Pconst_string(txt, Some prefix)) in
+ Ast_helper.Exp.apply ~loc hiddenOperator
+ [Nolabel, acc; Nolabel, str]
+ else
+ acc
+ | TemplatePart txt ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let expr = parseExprBlock p in
+ let fullLoc = mkLoc startPos p.prevEndPos in
+ Scanner.setTemplateMode p.scanner;
+ Parser.expect Rbrace p;
+ let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in
+ let str = Ast_helper.Exp.constant ~loc (Pconst_string(txt, Some prefix)) in
+ let next =
+ let a = if String.length txt > 0 then
+ Ast_helper.Exp.apply ~loc:fullLoc hiddenOperator [Nolabel, acc; Nolabel, str]
+ else acc
+ in
+ Ast_helper.Exp.apply ~loc:fullLoc hiddenOperator
+ [Nolabel, a; Nolabel, expr]
+ in
+ loop next p
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ acc
+ in
+ Scanner.setTemplateMode p.scanner;
+ Parser.expect Backtick p;
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | TemplateTail txt ->
+ let loc = mkLoc startPos p.endPos in
+ Parser.next p;
+ let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in
+ Ast_helper.Exp.constant ~loc (Pconst_string(txt, Some prefix))
+ | TemplatePart txt ->
+ let constantLoc = mkLoc startPos p.endPos in
+ Parser.next p;
+ let expr = parseExprBlock p in
+ let fullLoc = mkLoc startPos p.prevEndPos in
+ Scanner.setTemplateMode p.scanner;
+ Parser.expect Rbrace p;
+ let txt = if p.mode = ParseForTypeChecker then parseTemplateStringLiteral txt else txt in
+ let str = Ast_helper.Exp.constant ~loc:constantLoc (Pconst_string(txt, Some prefix)) in
+ let next =
+ if String.length txt > 0 then
+ Ast_helper.Exp.apply ~loc:fullLoc hiddenOperator [Nolabel, str; Nolabel, expr]
+ else
+ expr
+ in
+ loop next p
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Ast_helper.Exp.constant (Pconst_string("", None))
+
+ (* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int =>
+ * Also overparse constraints:
+ * let x = {
+ * let a = 1
+ * a + pi: int
+ * }
+ *
+ * We want to give a nice error message in these cases
+ * *)
+ and overParseConstrainedOrCoercedOrArrowExpression p expr =
+ match p.Parser.token with
+ | ColonGreaterThan ->
+ parseCoercedExpr ~expr p
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr ~es6Arrow:false p in
+ begin match p.Parser.token with
+ | EqualGreater ->
+ Parser.next p;
+ let body = parseExpr p in
+ let pat = match expr.pexp_desc with
+ | Pexp_ident longident ->
+ Ast_helper.Pat.var ~loc:expr.pexp_loc
+ (Location.mkloc
+ (Longident.flatten longident.txt |> String.concat ".")
+ longident.loc)
+ (* TODO: can we convert more expressions to patterns?*)
+ | _ ->
+ Ast_helper.Pat.var ~loc:expr.pexp_loc (Location.mkloc "pattern" expr.pexp_loc)
+ in
+ let arrow1 = Ast_helper.Exp.fun_
+ ~loc:(mkLoc expr.pexp_loc.loc_start body.pexp_loc.loc_end)
+ Asttypes.Nolabel
+ None
+ pat
+ (Ast_helper.Exp.constraint_ body typ)
+ in
+ let arrow2 = Ast_helper.Exp.fun_
+ ~loc:(mkLoc expr.pexp_loc.loc_start body.pexp_loc.loc_end)
+ Asttypes.Nolabel
+ None
+ (Ast_helper.Pat.constraint_ pat typ)
+ body
+ in
+ let msg =
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.text "Did you mean to annotate the parameter type or the return type?";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.text "1) ";
+ Printer.printExpression arrow1 CommentTable.empty;
+ Doc.line;
+ Doc.text "2) ";
+ Printer.printExpression arrow2 CommentTable.empty;
+ ]
+ )
+ ]
+ ) |> Doc.toString ~width:80
+ in
+ Parser.err
+ ~startPos:expr.pexp_loc.loc_start
+ ~endPos:body.pexp_loc.loc_end
+ p
+ (Diagnostics.message msg);
+ arrow1
+ | _ ->
+ let open Parsetree in
+ let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in
+ let expr = Ast_helper.Exp.constraint_ ~loc expr typ in
+ let () = Parser.err
+ ~startPos:expr.pexp_loc.loc_start
+ ~endPos:typ.ptyp_loc.loc_end
+ p
+ (Diagnostics.message
+ (Doc.breakableGroup ~forceBreak:true (Doc.concat [
+ Doc.text "Expressions with type constraints need to be wrapped in parens:";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Printer.addParens (Printer.printExpression expr CommentTable.empty);
+ ]
+ )
+ ]) |> Doc.toString ~width:80
+ ))
+ in
+ expr
+ end
+ | _ -> expr
+
+ and parseLetBindingBody ~startPos ~attrs p =
+ Parser.beginRegion p;
+ Parser.leaveBreadcrumb p Grammar.LetBinding;
+ let pat, exp =
+ let pat = parsePattern p in
+ match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ begin match p.token with
+ | Typ -> (* locally abstract types *)
+ Parser.next p;
+ let newtypes = parseLidentList p in
+ Parser.expect Dot p;
+ let typ = parseTypExpr p in
+ Parser.expect Equal p;
+ let expr = parseExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ let exp, poly = wrapTypeAnnotation ~loc newtypes typ expr in
+ let pat = Ast_helper.Pat.constraint_ ~loc pat poly in
+ (pat, exp)
+ | _ ->
+ let polyType = parsePolyTypeExpr p in
+ let loc = {pat.ppat_loc with loc_end = polyType.Parsetree.ptyp_loc.loc_end} in
+ let pat = Ast_helper.Pat.constraint_ ~loc pat polyType in
+ Parser.expect Token.Equal p;
+ let exp = parseExpr p in
+ let exp = overParseConstrainedOrCoercedOrArrowExpression p exp in
+ (pat, exp)
+ end
+ | _ ->
+ Parser.expect Token.Equal p;
+ let exp = overParseConstrainedOrCoercedOrArrowExpression p (parseExpr p) in
+ (pat, exp)
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ let vb = Ast_helper.Vb.mk ~loc ~attrs pat exp in
+ Parser.eatBreadcrumb p;
+ Parser.endRegion p;
+ vb
+
+ (* TODO: find a better way? Is it possible?
+ * let a = 1
+ * @attr
+ * and b = 2
+ *
+ * The problem is that without semi we need a lookahead to determine
+ * if the attr is on the letbinding or the start of a new thing
+ *
+ * let a = 1
+ * @attr
+ * let b = 1
+ *
+ * Here @attr should attach to something "new": `let b = 1`
+ * The parser state is forked, which is quite expensive…
+ *)
+ and parseAttributesAndBinding (p : Parser.t) =
+ let err = p.scanner.err in
+ let ch = p.scanner.ch in
+ let offset = p.scanner.offset in
+ let rdOffset = p.scanner.rdOffset in
+ let lineOffset = p.scanner.lineOffset in
+ let lnum = p.scanner.lnum in
+ let mode = p.scanner.mode in
+ let token = p.token in
+ let startPos = p.startPos in
+ let endPos = p.endPos in
+ let prevEndPos = p.prevEndPos in
+ let breadcrumbs = p.breadcrumbs in
+ let errors = p.errors in
+ let diagnostics = p.diagnostics in
+ let comments = p.comments in
+
+ match p.Parser.token with
+ | At ->
+ let attrs = parseAttributes p in
+ begin match p.Parser.token with
+ | And ->
+ attrs
+ | _ ->
+ p.scanner.err <- err;
+ p.scanner.ch <- ch;
+ p.scanner.offset <- offset;
+ p.scanner.rdOffset <- rdOffset;
+ p.scanner.lineOffset <- lineOffset;
+ p.scanner.lnum <- lnum;
+ p.scanner.mode <- mode;
+ p.token <- token;
+ p.startPos <- startPos;
+ p.endPos <- endPos;
+ p.prevEndPos <- prevEndPos;
+ p.breadcrumbs <- breadcrumbs;
+ p.errors <- errors;
+ p.diagnostics <- diagnostics;
+ p.comments <- comments;
+ []
+ end
+ | _ -> []
+
+ (* definition ::= let [rec] let-binding { and let-binding } *)
+ and parseLetBindings ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.optional p Let |> ignore;
+ let recFlag = if Parser.optional p Token.Rec then
+ Asttypes.Recursive
+ else
+ Asttypes.Nonrecursive
+ in
+ let first = parseLetBindingBody ~startPos ~attrs p in
+
+ let rec loop p bindings =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributesAndBinding p in
+ match p.Parser.token with
+ | And ->
+ Parser.next p;
+ let attrs = match p.token with
+ | Export ->
+ let exportLoc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ let genTypeAttr = (Location.mkloc "genType" exportLoc, Parsetree.PStr []) in
+ genTypeAttr::attrs
+ | _ -> attrs
+ in
+ ignore(Parser.optional p Let); (* overparse for fault tolerance *)
+ let letBinding = parseLetBindingBody ~startPos ~attrs p in
+ loop p (letBinding::bindings)
+ | _ ->
+ List.rev bindings
+ in
+ (recFlag, loop p [first])
+
+ (*
+ * div -> div
+ * Foo -> Foo.createElement
+ * Foo.Bar -> Foo.Bar.createElement
+ *)
+ and parseJsxName p =
+ let longident = match p.Parser.token with
+ | Lident ident ->
+ let identStart = p.startPos in
+ let identEnd = p.endPos in
+ Parser.next p;
+ let loc = mkLoc identStart identEnd in
+ Location.mkloc (Longident.Lident ident) loc
+ | Uident _ ->
+ let longident = parseModuleLongIdent ~lowercase:false p in
+ Location.mkloc (Longident.Ldot (longident.txt, "createElement")) longident.loc
+ | _ ->
+ let msg = "A jsx name should start with a lowercase or uppercase identifier, like: div in or Navbar in "
+ in
+ Parser.err p (Diagnostics.message msg);
+ Location.mknoloc (Longident.Lident "_")
+ in
+ Ast_helper.Exp.ident ~loc:longident.loc longident
+
+ and parseJsxOpeningOrSelfClosingElement ~startPos p =
+ let jsxStartPos = p.Parser.startPos in
+ let name = parseJsxName p in
+ let jsxProps = parseJsxProps p in
+ let children = match p.Parser.token with
+ | Forwardslash -> (* *)
+ let childrenStartPos = p.Parser.startPos in
+ Parser.next p;
+ let childrenEndPos = p.Parser.startPos in
+ Parser.expect GreaterThan p;
+ let loc = mkLoc childrenStartPos childrenEndPos in
+ makeListExpression loc [] None (* no children *)
+ | GreaterThan -> (* bar *)
+ let childrenStartPos = p.Parser.startPos in
+ Scanner.setJsxMode p.scanner;
+ Parser.next p;
+ let (spread, children) = parseJsxChildren p in
+ let childrenEndPos = p.Parser.startPos in
+ let () = match p.token with
+ | LessThanSlash -> Parser.next p
+ | LessThan -> Parser.next p; Parser.expect Forwardslash p
+ | token when Grammar.isStructureItemStart token -> ()
+ | _ -> Parser.expect LessThanSlash p
+ in
+ begin match p.Parser.token with
+ | Lident _ | Uident _ when verifyJsxOpeningClosingName p name ->
+ Parser.expect GreaterThan p;
+ let loc = mkLoc childrenStartPos childrenEndPos in
+ ( match spread, children with
+ | true, child :: _ ->
+ child
+ | _ ->
+ makeListExpression loc children None
+ )
+ | token ->
+ let () = if Grammar.isStructureItemStart token then (
+ let closing = "" ^ (string_of_pexp_ident name) ^ ">" in
+ let msg = Diagnostics.message ("Missing " ^ closing) in
+ Parser.err ~startPos ~endPos:p.prevEndPos p msg;
+ ) else (
+ let opening = "" ^ (string_of_pexp_ident name) ^ ">" in
+ let msg = "Closing jsx name should be the same as the opening name. Did you mean " ^ opening ^ " ?" in
+ Parser.err ~startPos ~endPos:p.prevEndPos p (Diagnostics.message msg);
+ Parser.expect GreaterThan p
+ )
+ in
+ let loc = mkLoc childrenStartPos childrenEndPos in
+ ( match spread, children with
+ | true, child :: _ ->
+ child
+ | _ ->
+ makeListExpression loc children None
+ )
+ end
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ makeListExpression Location.none [] None
+ in
+ let jsxEndPos = p.prevEndPos in
+ let loc = mkLoc jsxStartPos jsxEndPos in
+ Ast_helper.Exp.apply
+ ~loc
+ name
+ (List.concat [jsxProps; [
+ (Asttypes.Labelled "children", children);
+ (Asttypes.Nolabel, Ast_helper.Exp.construct (Location.mknoloc (Longident.Lident "()")) None)
+ ]])
+
+ (*
+ * jsx ::=
+ * | <> jsx-children >
+ * |
+ * | jsx-children
+ *
+ * jsx-children ::= primary-expr* * => 0 or more
+ *)
+ and parseJsx p =
+ Parser.leaveBreadcrumb p Grammar.Jsx;
+ let startPos = p.Parser.startPos in
+ Parser.expect LessThan p;
+ let jsxExpr = match p.Parser.token with
+ | Lident _ | Uident _ ->
+ parseJsxOpeningOrSelfClosingElement ~startPos p
+ | GreaterThan -> (* fragment: <> foo > *)
+ parseJsxFragment p
+ | _ ->
+ parseJsxName p
+ in
+ {jsxExpr with pexp_attributes = [jsxAttr]}
+
+ (*
+ * jsx-fragment ::=
+ * | <> >
+ * | <> jsx-children >
+ *)
+ and parseJsxFragment p =
+ let childrenStartPos = p.Parser.startPos in
+ Scanner.setJsxMode p.scanner;
+ Parser.expect GreaterThan p;
+ let (_spread, children) = parseJsxChildren p in
+ let childrenEndPos = p.Parser.startPos in
+ Parser.expect LessThanSlash p;
+ Parser.expect GreaterThan p;
+ let loc = mkLoc childrenStartPos childrenEndPos in
+ makeListExpression loc children None
+
+
+ (*
+ * jsx-prop ::=
+ * | lident
+ * | ?lident
+ * | lident = jsx_expr
+ * | lident = ?jsx_expr
+ *)
+ and parseJsxProp p =
+ Parser.leaveBreadcrumb p Grammar.JsxAttribute;
+ match p.Parser.token with
+ | Question | Lident _ ->
+ let optional = Parser.optional p Question in
+ let (name, loc) = parseLident p in
+ let propLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in
+ (* optional punning: *)
+ if optional then
+ Some (
+ Asttypes.Optional name,
+ Ast_helper.Exp.ident ~attrs:[propLocAttr]
+ ~loc (Location.mkloc (Longident.Lident name) loc)
+ )
+ else begin
+ match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ (* no punning *)
+ let optional = Parser.optional p Question in
+ let attrExpr =
+ let e = parsePrimaryExpr ~operand:(parseAtomicExpr p) p in
+ {e with pexp_attributes = propLocAttr::e.pexp_attributes}
+ in
+ let label =
+ if optional then Asttypes.Optional name else Asttypes.Labelled name
+ in
+ Some (label, attrExpr)
+ | _ ->
+ let attrExpr =
+ Ast_helper.Exp.ident ~loc ~attrs:[propLocAttr]
+ (Location.mknoloc (Longident.Lident name)) in
+ let label =
+ if optional then Asttypes.Optional name else Asttypes.Labelled name
+ in
+ Some (label, attrExpr)
+ end
+ | _ ->
+ None
+
+ and parseJsxProps p =
+ parseRegion
+ ~grammar:Grammar.JsxAttribute
+ ~f:parseJsxProp
+ p
+
+ and parseJsxChildren p =
+ let rec loop p children =
+ match p.Parser.token with
+ | Token.Eof | LessThanSlash ->
+ Scanner.popMode p.scanner Jsx;
+ List.rev children
+ | LessThan ->
+ (* Imagine:
<
+ * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and
+ * determines the correct token to disambiguate *)
+ let token = Scanner.reconsiderLessThan p.scanner in
+ if token = LessThan then
+ let child = parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p in
+ loop p (child::children)
+ else (* LessThanSlash *)
+ let () = p.token <- token in
+ let () = Scanner.popMode p.scanner Jsx in
+ List.rev children
+ | token when Grammar.isJsxChildStart token ->
+ let () = Scanner.popMode p.scanner Jsx in
+ let child = parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p in
+ loop p (child::children)
+ | _ ->
+ Scanner.popMode p.scanner Jsx;
+ List.rev children
+ in
+ match p.Parser.token with
+ | DotDotDot ->
+ Parser.next p;
+ (true, [parsePrimaryExpr ~operand:(parseAtomicExpr p) ~noCall:true p])
+ | _ -> (false, loop p [])
+
+ and parseBracedOrRecordExpr p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lbrace p;
+ match p.Parser.token with
+ | Rbrace ->
+ Parser.err p (Diagnostics.unexpected Rbrace p.breadcrumbs);
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ Ast_helper.Exp.construct ~attrs:[braces] ~loc
+ (Location.mkloc (Longident.Lident "()") loc) None
+ | DotDotDot ->
+ (* beginning of record spread, parse record *)
+ Parser.next p;
+ let spreadExpr = parseConstrainedOrCoercedExpr p in
+ Parser.expect Comma p;
+ let expr = parseRecordExpr ~startPos ~spread:(Some spreadExpr) [] p in
+ Parser.expect Rbrace p;
+ expr
+ | String s ->
+ let field =
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc (Longident.Lident s) loc
+ in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let fieldExpr = parseExpr p in
+ Parser.optional p Comma |> ignore;
+ let expr = parseRecordExprWithStringKeys ~startPos (field, fieldExpr) p in
+ Parser.expect Rbrace p;
+ expr
+ | _ ->
+ let constant = Ast_helper.Exp.constant ~loc:field.loc (Parsetree.Pconst_string(s, None)) in
+ let a = parsePrimaryExpr ~operand:constant p in
+ let e = parseBinaryExpr ~a p 1 in
+ let e = parseTernaryExpr e p in
+ begin match p.Parser.token with
+ | Semicolon ->
+ Parser.next p;
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ | Rbrace ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {e with pexp_attributes = braces::e.pexp_attributes}
+ | _ ->
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ end
+ end
+ | Uident _ | Lident _ ->
+ let valueOrConstructor = parseValueOrConstructor p in
+ begin match valueOrConstructor.pexp_desc with
+ | Pexp_ident pathIdent ->
+ let identEndPos = p.prevEndPos in
+ begin match p.Parser.token with
+ | Comma ->
+ Parser.next p;
+ let expr = parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p in
+ Parser.expect Rbrace p;
+ expr
+ | Colon ->
+ Parser.next p;
+ let fieldExpr = parseExpr p in
+ begin match p.token with
+ | Rbrace ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.record ~loc [(pathIdent, fieldExpr)] None
+ | _ ->
+ Parser.expect Comma p;
+ let expr = parseRecordExpr ~startPos [(pathIdent, fieldExpr)] p in
+ Parser.expect Rbrace p;
+ expr
+ end
+ (* error case *)
+ | Lident _ ->
+ if p.prevEndPos.pos_lnum < p.startPos.pos_lnum then (
+ Parser.expect Comma p;
+ let expr = parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p in
+ Parser.expect Rbrace p;
+ expr
+ ) else (
+ Parser.expect Colon p;
+ let expr = parseRecordExpr ~startPos [(pathIdent, valueOrConstructor)] p in
+ Parser.expect Rbrace p;
+ expr
+ )
+ | Semicolon ->
+ Parser.next p;
+ let expr = parseExprBlock ~first:(Ast_helper.Exp.ident pathIdent) p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ | Rbrace ->
+ Parser.next p;
+ let expr = Ast_helper.Exp.ident ~loc:pathIdent.loc pathIdent in
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ | EqualGreater ->
+ let loc = mkLoc startPos identEndPos in
+ let ident = Location.mkloc (Longident.last pathIdent.txt) loc in
+ let a = parseEs6ArrowExpression
+ ~parameters:[TermParameter {
+ uncurried = false;
+ attrs = [];
+ label = Asttypes.Nolabel;
+ expr = None;
+ pat = Ast_helper.Pat.var ident;
+ pos = startPos;
+ }]
+ p
+ in
+ let e = parseBinaryExpr ~a p 1 in
+ let e = parseTernaryExpr e p in
+ begin match p.Parser.token with
+ | Semicolon ->
+ Parser.next p;
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ | Rbrace ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {e with pexp_attributes = braces::e.pexp_attributes}
+ | _ ->
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ end
+ | _ ->
+ Parser.leaveBreadcrumb p Grammar.ExprBlock;
+ let a = parsePrimaryExpr ~operand:(Ast_helper.Exp.ident ~loc:pathIdent.loc pathIdent) p in
+ let e = parseBinaryExpr ~a p 1 in
+ let e = parseTernaryExpr e p in
+ Parser.eatBreadcrumb p;
+ begin match p.Parser.token with
+ | Semicolon ->
+ Parser.next p;
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ | Rbrace ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {e with pexp_attributes = braces::e.pexp_attributes}
+ | _ ->
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ end
+ end
+ | _ ->
+ Parser.leaveBreadcrumb p Grammar.ExprBlock;
+ let a = parsePrimaryExpr ~operand:valueOrConstructor p in
+ let e = parseBinaryExpr ~a p 1 in
+ let e = parseTernaryExpr e p in
+ Parser.eatBreadcrumb p;
+ begin match p.Parser.token with
+ | Semicolon ->
+ Parser.next p;
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ | Rbrace ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {e with pexp_attributes = braces::e.pexp_attributes}
+ | _ ->
+ let expr = parseExprBlock ~first:e p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+ end
+ end
+ | _ ->
+ let expr = parseExprBlock p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let braces = makeBracesAttr loc in
+ {expr with pexp_attributes = braces::expr.pexp_attributes}
+
+ and parseRecordRowWithStringKey p =
+ match p.Parser.token with
+ | String s ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ let field = Location.mkloc (Longident.Lident s) loc in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let fieldExpr = parseExpr p in
+ Some (field, fieldExpr)
+ | _ ->
+ Some (field, Ast_helper.Exp.ident ~loc:field.loc field)
+ end
+ | _ -> None
+
+ and parseRecordRow p =
+ let () = match p.Parser.token with
+ | Token.DotDotDot ->
+ Parser.err p (Diagnostics.message ErrorMessages.recordExprSpread);
+ Parser.next p;
+ | _ -> ()
+ in
+ match p.Parser.token with
+ | Lident _ | Uident _ | List ->
+ let field = parseValuePath p in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let fieldExpr = parseExpr p in
+ Some (field, fieldExpr)
+ | _ ->
+ Some (field, Ast_helper.Exp.ident ~loc:field.loc field)
+ end
+ | _ -> None
+
+ and parseRecordExprWithStringKeys ~startPos firstRow p =
+ let rows = firstRow::(
+ parseCommaDelimitedRegion ~grammar:Grammar.RecordRowsStringKey ~closing:Rbrace ~f:parseRecordRowWithStringKey p
+ ) in
+ let loc = mkLoc startPos p.endPos in
+ let recordStrExpr = Ast_helper.Str.eval ~loc (
+ Ast_helper.Exp.record ~loc rows None
+ ) in
+ Ast_helper.Exp.extension ~loc
+ (Location.mkloc "bs.obj" loc, Parsetree.PStr [recordStrExpr])
+
+ and parseRecordExpr ~startPos ?(spread=None) rows p =
+ let exprs =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.RecordRows
+ ~closing:Rbrace
+ ~f:parseRecordRow p
+ in
+ let rows = List.concat [rows; exprs] in
+ let () = match rows with
+ | [] ->
+ let msg = "Record spread needs at least one field that's updated" in
+ Parser.err p (Diagnostics.message msg);
+ | _rows -> ()
+ in
+ let loc = mkLoc startPos p.endPos in
+ Ast_helper.Exp.record ~loc rows spread
+
+ and parseExprBlockItem p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Module ->
+ Parser.next p;
+ begin match p.token with
+ | Lparen ->
+ parseFirstClassModuleExpr ~startPos p
+ | _ ->
+ let name = match p.Parser.token with
+ | Uident ident ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc ident loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ let body = parseModuleBindingBody p in
+ Parser.optional p Semicolon |> ignore;
+ let expr = parseExprBlock p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.letmodule ~loc name body expr
+ end
+ | Exception ->
+ let extensionConstructor = parseExceptionDef ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let blockExpr = parseExprBlock p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.letexception ~loc extensionConstructor blockExpr
+ | Open ->
+ let od = parseOpenDescription ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let blockExpr = parseExprBlock p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.open_ ~loc od.popen_override od.popen_lid blockExpr
+ | Let ->
+ let (recFlag, letBindings) = parseLetBindings ~attrs p in
+ let next = match p.Parser.token with
+ | Semicolon ->
+ Parser.next p;
+ if Grammar.isBlockExprStart p.Parser.token then
+ parseExprBlock p
+ else
+ let loc = mkLoc p.startPos p.endPos in
+ Ast_helper.Exp.construct ~loc
+ (Location.mkloc (Longident.Lident "()") loc) None
+ | token when Grammar.isBlockExprStart token ->
+ parseExprBlock p
+ | _ ->
+ let loc = mkLoc p.startPos p.endPos in
+ Ast_helper.Exp.construct ~loc (Location.mkloc (Longident.Lident "()") loc) None
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.let_ ~loc recFlag letBindings next
+ | _ ->
+ let e1 =
+ let expr = parseExpr p in
+ {expr with pexp_attributes = List.concat [attrs; expr.pexp_attributes]}
+ in
+ ignore (Parser.optional p Semicolon);
+ if Grammar.isBlockExprStart p.Parser.token then
+ let e2 = parseExprBlock p in
+ let loc = {e1.pexp_loc with loc_end = e2.pexp_loc.loc_end} in
+ Ast_helper.Exp.sequence ~loc e1 e2
+ else e1
+
+ (* blockExpr ::= expr
+ * | expr ;
+ * | expr ; blockExpr
+ * | module ... ; blockExpr
+ * | open ... ; blockExpr
+ * | exception ... ; blockExpr
+ * | let ...
+ * | let ... ;
+ * | let ... ; blockExpr
+ *
+ * note: semi should be made optional
+ * a block of expression is always
+ *)
+ and parseExprBlock ?first p =
+ Parser.leaveBreadcrumb p Grammar.ExprBlock;
+ let item = match first with
+ | Some e -> e
+ | None -> parseExprBlockItem p
+ in
+ let blockExpr = match p.Parser.token with
+ | Semicolon ->
+ Parser.next p;
+ if Grammar.isBlockExprStart p.Parser.token then
+ let next = parseExprBlockItem p in
+ ignore(Parser.optional p Semicolon);
+ let loc = {item.pexp_loc with loc_end = next.pexp_loc.loc_end} in
+ Ast_helper.Exp.sequence ~loc item next
+ else
+ item
+ | token when Grammar.isBlockExprStart token ->
+ let next = parseExprBlockItem p in
+ ignore(Parser.optional p Semicolon);
+ let loc = {item.pexp_loc with loc_end = next.pexp_loc.loc_end} in
+ Ast_helper.Exp.sequence ~loc item next
+ | _ ->
+ item
+ in
+ Parser.eatBreadcrumb p;
+ overParseConstrainedOrCoercedOrArrowExpression p blockExpr
+
+ and parseTryExpression p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Try p;
+ let expr = parseExpr ~context:WhenExpr p in
+ Parser.expect Catch p;
+ Parser.expect Lbrace p;
+ let cases = parsePatternMatching p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.try_ ~loc expr cases
+
+ and parseIfExpression p =
+ Parser.beginRegion p;
+ Parser.leaveBreadcrumb p Grammar.ExprIf;
+ let startPos = p.Parser.startPos in
+ Parser.expect If p;
+ Parser.leaveBreadcrumb p Grammar.IfCondition;
+ (* doesn't make sense to try es6 arrow here? *)
+ let conditionExpr = parseExpr ~context:WhenExpr p in
+ Parser.eatBreadcrumb p;
+ Parser.leaveBreadcrumb p IfBranch;
+ Parser.expect Lbrace p;
+ let thenExpr = parseExprBlock p in
+ Parser.expect Rbrace p;
+ Parser.eatBreadcrumb p;
+ let elseExpr = match p.Parser.token with
+ | Else ->
+ Parser.endRegion p;
+ Parser.leaveBreadcrumb p Grammar.ElseBranch;
+ Parser.next p;
+ Parser.beginRegion p;
+ let elseExpr = match p.token with
+ | If ->
+ parseIfExpression p
+ | _ ->
+ Parser.expect Lbrace p;
+ let blockExpr = parseExprBlock p in
+ Parser.expect Rbrace p;
+ blockExpr
+ in
+ Parser.eatBreadcrumb p;
+ Parser.endRegion p;
+ Some elseExpr
+ | _ ->
+ Parser.endRegion p;
+ None
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.eatBreadcrumb p;
+ Ast_helper.Exp.ifthenelse ~loc conditionExpr thenExpr elseExpr
+
+ and parseForRest hasOpeningParen pattern startPos p =
+ Parser.expect In p;
+ let e1 = parseExpr p in
+ let direction = match p.Parser.token with
+ | To -> Asttypes.Upto
+ | Downto -> Asttypes.Downto
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Asttypes.Upto
+ in
+ Parser.next p;
+ let e2 = parseExpr ~context:WhenExpr p in
+ if hasOpeningParen then Parser.expect Rparen p;
+ Parser.expect Lbrace p;
+ let bodyExpr = parseExprBlock p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.for_ ~loc pattern e1 e2 direction bodyExpr
+
+ and parseForExpression p =
+ let startPos = p.Parser.startPos in
+ Parser.expect For p;
+ match p.token with
+ | Lparen ->
+ let lparen = p.startPos in
+ Parser.next p;
+ begin match p.token with
+ | Rparen ->
+ Parser.next p;
+ let unitPattern =
+ let loc = mkLoc lparen p.prevEndPos in
+ let lid = Location.mkloc (Longident.Lident "()") loc in
+ Ast_helper.Pat.construct lid None
+ in
+ parseForRest false (parseAliasPattern ~attrs:[] unitPattern p) startPos p
+ | _ ->
+ let pat = parsePattern p in
+ begin match p.token with
+ | Comma ->
+ Parser.next p;
+ let tuplePattern =
+ parseTuplePattern ~attrs:[] ~startPos:lparen ~first:pat p
+ in
+ let pattern = parseAliasPattern ~attrs:[] tuplePattern p in
+ parseForRest false pattern startPos p
+ | _ ->
+ parseForRest true pat startPos p
+ end
+ end
+ | _ ->
+ parseForRest false (parsePattern p) startPos p
+
+ and parseWhileExpression p =
+ let startPos = p.Parser.startPos in
+ Parser.expect While p;
+ let expr1 = parseExpr ~context:WhenExpr p in
+ Parser.expect Lbrace p;
+ let expr2 = parseExprBlock p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.while_ ~loc expr1 expr2
+
+ and parsePatternMatchCase p =
+ Parser.beginRegion p;
+ Parser.leaveBreadcrumb p Grammar.PatternMatchCase;
+ match p.Parser.token with
+ | Token.Bar ->
+ Parser.next p;
+ let lhs = parsePattern p in
+ let guard = match p.Parser.token with
+ | When ->
+ Parser.next p;
+ Some (parseExpr ~context:WhenExpr p)
+ | _ ->
+ None
+ in
+ let () = match p.token with
+ | EqualGreater -> Parser.next p
+ | _ -> Recover.recoverEqualGreater p
+ in
+ let rhs = parseExprBlock p in
+ Parser.endRegion p;
+ Parser.eatBreadcrumb p;
+ Some (Ast_helper.Exp.case lhs ?guard rhs)
+ | _ ->
+ Parser.endRegion p;
+ None
+
+ and parsePatternMatching p =
+ Parser.leaveBreadcrumb p Grammar.PatternMatching;
+ let cases =
+ parseDelimitedRegion
+ ~grammar:Grammar.PatternMatching
+ ~closing:Rbrace
+ ~f:parsePatternMatchCase
+ p
+ in
+ let () = match cases with
+ | [] -> Parser.err ~startPos:p.prevEndPos p (
+ Diagnostics.message "Pattern matching needs at least one case"
+ )
+ | _ -> ()
+ in
+ cases
+
+ and parseSwitchExpression p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Switch p;
+ let switchExpr = parseExpr ~context:WhenExpr p in
+ Parser.expect Lbrace p;
+ let cases = parsePatternMatching p in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.match_ ~loc switchExpr cases
+
+ (*
+ * argument ::=
+ * | _ (* syntax sugar *)
+ * | expr
+ * | expr : type
+ * | ~ label-name
+ * | ~ label-name
+ * | ~ label-name ?
+ * | ~ label-name = expr
+ * | ~ label-name = _ (* syntax sugar *)
+ * | ~ label-name = expr : type
+ * | ~ label-name = ? expr
+ * | ~ label-name = ? _ (* syntax sugar *)
+ * | ~ label-name = ? expr : type
+ *
+ * uncurried_argument ::=
+ * | . argument
+ *)
+ and parseArgument p =
+ if (
+ p.Parser.token = Token.Tilde ||
+ p.token = Dot ||
+ p.token = Underscore ||
+ Grammar.isExprStart p.token
+ ) then (
+ match p.Parser.token with
+ | Dot ->
+ let uncurried = true in
+ let startPos = p.Parser.startPos in
+ Parser.next(p);
+ begin match p.token with
+ (* apply(.) *)
+ | Rparen ->
+ let loc = mkLoc startPos p.prevEndPos in
+ let unitExpr = Ast_helper.Exp.construct ~loc
+ (Location.mkloc (Longident.Lident "()") loc) None
+ in
+ Some (uncurried, Asttypes.Nolabel, unitExpr)
+ | _ ->
+ parseArgument2 p ~uncurried
+ end
+ | _ ->
+ parseArgument2 p ~uncurried:false
+ ) else
+ None
+
+ and parseArgument2 p ~uncurried =
+ match p.Parser.token with
+ (* foo(_), do not confuse with foo(_ => x), TODO: performance *)
+ | Underscore when not (isEs6ArrowExpression ~inTernary:false p) ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ let exp = Ast_helper.Exp.ident ~loc (
+ Location.mkloc (Longident.Lident "_") loc
+ ) in
+ Some (uncurried, Asttypes.Nolabel, exp)
+ | Tilde ->
+ Parser.next p;
+ (* TODO: nesting of pattern matches not intuitive for error recovery *)
+ begin match p.Parser.token with
+ | Lident ident ->
+ let startPos = p.startPos in
+ Parser.next p;
+ let endPos = p.prevEndPos in
+ let loc = mkLoc startPos endPos in
+ let propLocAttr = (Location.mkloc "ns.namedArgLoc" loc, Parsetree.PStr []) in
+ let identExpr = Ast_helper.Exp.ident ~attrs:[propLocAttr] ~loc (
+ Location.mkloc (Longident.Lident ident) loc
+ ) in
+ begin match p.Parser.token with
+ | Question ->
+ Parser.next p;
+ Some (uncurried, Asttypes.Optional ident, identExpr)
+ | Equal ->
+ Parser.next p;
+ let label = match p.Parser.token with
+ | Question ->
+ Parser.next p;
+ Asttypes.Optional ident
+ | _ ->
+ Labelled ident
+ in
+ let expr = match p.Parser.token with
+ | Underscore when not (isEs6ArrowExpression ~inTernary:false p) ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Ast_helper.Exp.ident ~loc (
+ Location.mkloc (Longident.Lident "_") loc
+ )
+ | _ ->
+ let expr = parseConstrainedOrCoercedExpr p in
+ {expr with pexp_attributes = propLocAttr::expr.pexp_attributes}
+ in
+ Some (uncurried, label, expr)
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ let expr = Ast_helper.Exp.constraint_ ~attrs:[propLocAttr] ~loc identExpr typ in
+ Some (uncurried, Labelled ident, expr)
+ | _ ->
+ Some (uncurried, Labelled ident, identExpr)
+ end
+ | t ->
+ Parser.err p (Diagnostics.lident t);
+ Some (uncurried, Nolabel, Recover.defaultExpr ())
+ end
+ | _ -> Some (uncurried, Nolabel, parseConstrainedOrCoercedExpr p)
+
+ and parseCallExpr p funExpr =
+ Parser.expect Lparen p;
+ let startPos = p.Parser.startPos in
+ Parser.leaveBreadcrumb p Grammar.ExprCall;
+ let args =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.ArgumentList
+ ~closing:Rparen
+ ~f:parseArgument p
+ in
+ Parser.expect Rparen p;
+ let args = match args with
+ | [] ->
+ let loc = mkLoc startPos p.prevEndPos in
+ (* No args -> unit sugar: `foo()` *)
+ [ false,
+ Asttypes.Nolabel,
+ Ast_helper.Exp.construct
+ ~loc (Location.mkloc (Longident.Lident "()") loc) None
+ ]
+ | args -> args
+ in
+ let loc = {funExpr.pexp_loc with loc_end = p.prevEndPos} in
+ let args = match args with
+ | (u, lbl, expr)::args ->
+ let group (grp, acc) (uncurried, lbl, expr) =
+ let (_u, grp) = grp in
+ if uncurried == true then
+ ((true, [lbl, expr]), ((_u, (List.rev grp))::acc))
+ else
+ ((_u, ((lbl, expr)::grp)), acc)
+ in
+ let ((_u, grp), acc) = List.fold_left group((u, [lbl, expr]), []) args in
+ List.rev ((_u, (List.rev grp))::acc)
+ | [] -> []
+ in
+ let apply = List.fold_left (fun callBody group ->
+ let (uncurried, args) = group in
+ let (args, wrap) = processUnderscoreApplication args in
+ let exp = if uncurried then
+ let attrs = [uncurryAttr] in
+ Ast_helper.Exp.apply ~loc ~attrs callBody args
+ else
+ Ast_helper.Exp.apply ~loc callBody args
+ in
+ wrap exp
+ ) funExpr args
+ in
+ Parser.eatBreadcrumb p;
+ apply
+
+ and parseValueOrConstructor p =
+ let startPos = p.Parser.startPos in
+ let rec aux p acc =
+ match p.Parser.token with
+ | Uident ident ->
+ let endPosLident = p.endPos in
+ Parser.next p;
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ aux p (ident::acc)
+ | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum ->
+ let lparen = p.startPos in
+ let args = parseConstructorArgs p in
+ let rparen = p.prevEndPos in
+ let lident = buildLongident (ident::acc) in
+ let tail = match args with
+ | [] -> None
+ | [{Parsetree.pexp_desc = Pexp_tuple _} as arg] as args ->
+ let loc = mkLoc lparen rparen in
+ if p.mode = ParseForTypeChecker then
+ (* Some(1, 2) for type-checker *)
+ Some arg
+ else
+ (* Some((1, 2)) for printer *)
+ Some (Ast_helper.Exp.tuple ~loc args)
+ | [arg] ->
+ Some arg
+ | args ->
+ let loc = mkLoc lparen rparen in
+ Some (Ast_helper.Exp.tuple ~loc args)
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ let identLoc = mkLoc startPos endPosLident in
+ Ast_helper.Exp.construct ~loc (Location.mkloc lident identLoc) tail
+ | _ ->
+ let loc = mkLoc startPos p.prevEndPos in
+ let lident = buildLongident (ident::acc) in
+ Ast_helper.Exp.construct ~loc (Location.mkloc lident loc) None
+ end
+ | Lident ident ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let lident = buildLongident (ident::acc) in
+ Ast_helper.Exp.ident ~loc (Location.mkloc lident loc)
+ | List ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let lident = buildLongident ("list"::acc) in
+ Ast_helper.Exp.ident ~loc (Location.mkloc lident loc)
+ | token ->
+ Parser.next p;
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Recover.defaultExpr()
+ in
+ aux p []
+
+ and parsePolyVariantExpr p =
+ let startPos = p.startPos in
+ let (ident, _loc) = parseHashIdent ~startPos p in
+ begin match p.Parser.token with
+ | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum ->
+ let lparen = p.startPos in
+ let args = parseConstructorArgs p in
+ let rparen = p.prevEndPos in
+ let loc_paren = mkLoc lparen rparen in
+ let tail = match args with
+ | [] -> None
+ | [{Parsetree.pexp_desc = Pexp_tuple _} as expr ] as args ->
+ if p.mode = ParseForTypeChecker then
+ (* #a(1, 2) for type-checker *)
+ Some expr
+ else
+ (* #a((1, 2)) for type-checker *)
+ Some (Ast_helper.Exp.tuple ~loc:loc_paren args)
+ | [arg] -> Some arg
+ | args ->
+ (* #a((1, 2)) for printer *)
+ Some (Ast_helper.Exp.tuple ~loc:loc_paren args)
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.variant ~loc ident tail
+ | _ ->
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Exp.variant ~loc ident None
+ end
+
+ and parseConstructorArgs p =
+ let lparen = p.Parser.startPos in
+ Parser.expect Lparen p;
+ let args =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.ExprList ~f:parseConstrainedExprRegion ~closing:Rparen p
+ in
+ Parser.expect Rparen p;
+ match args with
+ | [] ->
+ let loc = mkLoc lparen p.prevEndPos in
+ [Ast_helper.Exp.construct
+ ~loc (Location.mkloc (Longident.Lident "()") loc) None]
+ | args -> args
+
+ and parseTupleExpr ~first ~startPos p =
+ let exprs =
+ parseCommaDelimitedRegion
+ p ~grammar:Grammar.ExprList ~closing:Rparen ~f:parseConstrainedExprRegion
+ in
+ Parser.expect Rparen p;
+ Ast_helper.Exp.tuple ~loc:(mkLoc startPos p.prevEndPos) (first::exprs)
+
+ and parseSpreadExprRegion p =
+ match p.Parser.token with
+ | DotDotDot ->
+ Parser.next p;
+ let expr = parseConstrainedOrCoercedExpr p in
+ Some (true, expr)
+ | token when Grammar.isExprStart token ->
+ Some (false, parseConstrainedOrCoercedExpr p)
+ | _ -> None
+
+ and parseListExpr ~startPos p =
+ Parser.expect Lbracket p;
+ let listExprs =
+ parseCommaDelimitedReversedList
+ p ~grammar:Grammar.ListExpr ~closing:Rbracket ~f:parseSpreadExprRegion
+ in
+ Parser.expect Rbracket p;
+ let loc = mkLoc startPos p.prevEndPos in
+ match listExprs with
+ | (true, expr)::exprs ->
+ let exprs = exprs |> List.map snd |> List.rev in
+ makeListExpression loc exprs (Some expr)
+ | exprs ->
+ let exprs =
+ exprs
+ |> List.map (fun (spread, expr) ->
+ if spread then
+ Parser.err p (Diagnostics.message ErrorMessages.listExprSpread);
+ expr)
+ |> List.rev
+ in
+ makeListExpression loc exprs None
+
+ (* Overparse ... and give a nice error message *)
+ and parseNonSpreadExp ~msg p =
+ let () = match p.Parser.token with
+ | DotDotDot ->
+ Parser.err p (Diagnostics.message msg);
+ Parser.next p;
+ | _ -> ()
+ in
+ match p.Parser.token with
+ | token when Grammar.isExprStart token ->
+ let expr = parseExpr p in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc expr.pexp_loc.loc_start typ.ptyp_loc.loc_end in
+ Some (Ast_helper.Exp.constraint_ ~loc expr typ)
+ | _ -> Some expr
+ end
+ | _ -> None
+
+ and parseArrayExp p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lbracket p;
+ let exprs =
+ parseCommaDelimitedRegion
+ p
+ ~grammar:Grammar.ExprList
+ ~closing:Rbracket
+ ~f:(parseNonSpreadExp ~msg:ErrorMessages.arrayExprSpread)
+ in
+ Parser.expect Rbracket p;
+ Ast_helper.Exp.array ~loc:(mkLoc startPos p.prevEndPos) exprs
+
+ (* TODO: check attributes in the case of poly type vars,
+ * might be context dependend: parseFieldDeclaration (see ocaml) *)
+ and parsePolyTypeExpr p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | SingleQuote ->
+ let vars = parseTypeVarList p in
+ begin match vars with
+ | _v1::_v2::_ ->
+ Parser.expect Dot p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.poly ~loc vars typ
+ | [var] ->
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.poly ~loc vars typ
+ | EqualGreater ->
+ Parser.next p;
+ let typ = Ast_helper.Typ.var ~loc:var.loc var.txt in
+ let returnType = parseTypExpr ~alias:false p in
+ let loc = mkLoc typ.Parsetree.ptyp_loc.loc_start p.prevEndPos in
+ Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType
+ | _ ->
+ Ast_helper.Typ.var ~loc:var.loc var.txt
+ end
+ | _ -> assert false
+ end
+ | _ ->
+ parseTypExpr p
+
+ (* 'a 'b 'c *)
+ and parseTypeVarList p =
+ let rec loop p vars =
+ match p.Parser.token with
+ | SingleQuote ->
+ Parser.next p;
+ let (lident, loc) = parseLident p in
+ let var = Location.mkloc lident loc in
+ loop p (var::vars)
+ | _ ->
+ List.rev vars
+ in
+ loop p []
+
+ and parseLidentList p =
+ let rec loop p ls =
+ match p.Parser.token with
+ | Lident lident ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ loop p ((Location.mkloc lident loc)::ls)
+ | _ ->
+ List.rev ls
+ in
+ loop p []
+
+ and parseAtomicTypExpr ~attrs p =
+ Parser.leaveBreadcrumb p Grammar.AtomicTypExpr;
+ let startPos = p.Parser.startPos in
+ let typ = match p.Parser.token with
+ | SingleQuote ->
+ Parser.next p;
+ let (ident, loc) = parseLident p in
+ Ast_helper.Typ.var ~loc ~attrs ident
+ | Underscore ->
+ let endPos = p.endPos in
+ Parser.next p;
+ Ast_helper.Typ.any ~loc:(mkLoc startPos endPos) ~attrs ()
+ | Lparen ->
+ Parser.next p;
+ begin match p.Parser.token with
+ | Rparen ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let unitConstr = Location.mkloc (Longident.Lident "unit") loc in
+ Ast_helper.Typ.constr ~attrs unitConstr []
+ | _ ->
+ let t = parseTypExpr p in
+ begin match p.token with
+ | Comma ->
+ Parser.next p;
+ parseTupleType ~attrs ~first:t ~startPos p
+ | _ ->
+ Parser.expect Rparen p;
+ {t with
+ ptyp_loc = mkLoc startPos p.prevEndPos;
+ ptyp_attributes = List.concat [attrs; t.ptyp_attributes]}
+ end
+ end
+ | Lbracket ->
+ parsePolymorphicVariantType ~attrs p
+ | Uident _ | Lident _ | List ->
+ let constr = parseValuePath p in
+ let args = parseTypeConstructorArgs ~constrName:constr p in
+ Ast_helper.Typ.constr ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args
+ | Module ->
+ Parser.next p;
+ Parser.expect Lparen p;
+ let packageType = parsePackageType ~startPos ~attrs p in
+ Parser.expect Rparen p;
+ {packageType with ptyp_loc = mkLoc startPos p.prevEndPos}
+ | Percent ->
+ let extension = parseExtension p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.extension ~attrs ~loc extension
+ | Lbrace ->
+ parseBsObjectType ~attrs p
+ | token ->
+ begin match skipTokensAndMaybeRetry p ~isStartOfGrammar:Grammar.isAtomicTypExprStart with
+ | Some () ->
+ parseAtomicTypExpr ~attrs p
+ | None ->
+ Parser.err ~startPos:p.prevEndPos p (Diagnostics.unexpected token p.breadcrumbs);
+ Recover.defaultType()
+ end
+ in
+ Parser.eatBreadcrumb p;
+ typ
+
+ (* package-type ::=
+ | modtype-path
+ ∣ modtype-path with package-constraint { and package-constraint }
+ *)
+ and parsePackageType ~startPos ~attrs p =
+ let modTypePath = parseModuleLongIdent ~lowercase:true p in
+ begin match p.Parser.token with
+ | With ->
+ Parser.next p;
+ let constraints = parsePackageConstraints p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.package ~loc ~attrs modTypePath constraints
+ | _ ->
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.package ~loc ~attrs modTypePath []
+ end
+
+ (* package-constraint { and package-constraint } *)
+ and parsePackageConstraints p =
+ let first =
+ Parser.expect Typ p;
+ let typeConstr = parseValuePath p in
+ Parser.expect Equal p;
+ let typ = parseTypExpr p in
+ (typeConstr, typ)
+ in
+ let rest = parseRegion
+ ~grammar:Grammar.PackageConstraint
+ ~f:parsePackageConstraint
+ p
+ in
+ first::rest
+
+ (* and type typeconstr = typexpr *)
+ and parsePackageConstraint p =
+ match p.Parser.token with
+ | And ->
+ Parser.next p;
+ Parser.expect Typ p;
+ let typeConstr = parseValuePath p in
+ Parser.expect Equal p;
+ let typ = parseTypExpr p in
+ Some (typeConstr, typ)
+ | _ -> None
+
+ and parseBsObjectType ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lbrace p;
+ let closedFlag = match p.token with
+ | DotDot -> Parser.next p; Asttypes.Open
+ | Dot -> Parser.next p; Asttypes.Closed
+ | _ -> Asttypes.Closed
+ in
+ let fields =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ makeBsObjType ~attrs ~loc ~closed:closedFlag fields
+
+ (* TODO: check associativity in combination with attributes *)
+ and parseTypeAlias p typ =
+ match p.Parser.token with
+ | As ->
+ Parser.next p;
+ Parser.expect SingleQuote p;
+ let (ident, _loc) = parseLident p in
+ (* TODO: how do we parse attributes here? *)
+ Ast_helper.Typ.alias ~loc:(mkLoc typ.Parsetree.ptyp_loc.loc_start p.prevEndPos) typ ident
+ | _ -> typ
+
+
+ (* type_parameter ::=
+ * | type_expr
+ * | ~ident: type_expr
+ * | ~ident: type_expr=?
+ *
+ * note:
+ * | attrs ~ident: type_expr -> attrs are on the arrow
+ * | attrs type_expr -> attrs are here part of the type_expr
+ *
+ * uncurried_type_parameter ::=
+ * | . type_parameter
+ *)
+ and parseTypeParameter p =
+ if (
+ p.Parser.token = Token.Tilde ||
+ p.token = Dot ||
+ Grammar.isTypExprStart p.token
+ ) then (
+ let startPos = p.Parser.startPos in
+ let uncurried = Parser.optional p Dot in
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Tilde ->
+ Parser.next p;
+ let (name, _loc) = parseLident p in
+ Parser.expect ~grammar:Grammar.TypeExpression Colon p;
+ let typ = parseTypExpr p in
+ begin match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ Parser.expect Question p;
+ Some (uncurried, attrs, Asttypes.Optional name, typ, startPos)
+ | _ ->
+ Some (uncurried, attrs, Asttypes.Labelled name, typ, startPos)
+ end
+ | Lident _ | List ->
+ let (name, loc) = parseLident p in
+ begin match p.token with
+ | Colon ->
+ let () =
+ let error = Diagnostics.message
+ ("Parameter names start with a `~`, like: ~" ^ name)
+ in
+ Parser.err ~startPos:loc.loc_start ~endPos:loc.loc_end p error
+ in
+ Parser.next p;
+ let typ = parseTypExpr p in
+ begin match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ Parser.expect Question p;
+ Some (uncurried, attrs, Asttypes.Optional name, typ, startPos)
+ | _ ->
+ Some (uncurried, attrs, Asttypes.Labelled name, typ, startPos)
+ end
+ | _ ->
+ let constr = Location.mkloc (Longident.Lident name) loc in
+ let args = parseTypeConstructorArgs ~constrName:constr p in
+ let typ = Ast_helper.Typ.constr ~loc:(mkLoc startPos p.prevEndPos) ~attrs constr args
+ in
+
+ let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in
+ let typ = parseTypeAlias p typ in
+ Some (uncurried, [], Asttypes.Nolabel, typ, startPos)
+ end
+ | _ ->
+ let typ = parseTypExpr p in
+ let typWithAttributes = {typ with ptyp_attributes = List.concat[attrs; typ.ptyp_attributes]} in
+ Some (uncurried, [], Asttypes.Nolabel, typWithAttributes, startPos)
+ ) else
+ None
+
+ (* (int, ~x:string, float) *)
+ and parseTypeParameters p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lparen p;
+ match p.Parser.token with
+ | Rparen ->
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let unitConstr = Location.mkloc (Longident.Lident "unit") loc in
+ let typ = Ast_helper.Typ.constr unitConstr [] in
+ [(false, [], Asttypes.Nolabel, typ, startPos)]
+ | _ ->
+ let params =
+ parseCommaDelimitedRegion ~grammar:Grammar.TypeParameters ~closing:Rparen ~f:parseTypeParameter p
+ in
+ Parser.expect Rparen p;
+ params
+
+ and parseEs6ArrowType ~attrs p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | Tilde ->
+ Parser.next p;
+ let (name, _loc) = parseLident p in
+ Parser.expect ~grammar:Grammar.TypeExpression Colon p;
+ let typ = parseTypExpr ~alias:false ~es6Arrow:false p in
+ let arg = match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ Parser.expect Question p;
+ Asttypes.Optional name
+ | _ ->
+ Asttypes.Labelled name
+ in
+ Parser.expect EqualGreater p;
+ let returnType = parseTypExpr ~alias:false p in
+ Ast_helper.Typ.arrow ~attrs arg typ returnType
+ | _ ->
+ let parameters = parseTypeParameters p in
+ Parser.expect EqualGreater p;
+ let returnType = parseTypExpr ~alias:false p in
+ let endPos = p.prevEndPos in
+ let typ = List.fold_right (fun (uncurried, attrs, argLbl, typ, startPos) t ->
+ let attrs = if uncurried then uncurryAttr::attrs else attrs in
+ Ast_helper.Typ.arrow ~loc:(mkLoc startPos endPos) ~attrs argLbl typ t
+ ) parameters returnType
+ in
+ {typ with
+ ptyp_attributes = List.concat [typ.ptyp_attributes; attrs];
+ ptyp_loc = mkLoc startPos p.prevEndPos}
+
+ (*
+ * typexpr ::=
+ * | 'ident
+ * | _
+ * | (typexpr)
+ * | typexpr => typexpr --> es6 arrow
+ * | (typexpr, typexpr) => typexpr --> es6 arrow
+ * | /typexpr, typexpr, typexpr/ --> tuple
+ * | typeconstr
+ * | typeconstr
+ * | typeconstr
+ * | typexpr as 'ident
+ * | %attr-id --> extension
+ * | %attr-id(payload) --> extension
+ *
+ * typeconstr ::=
+ * | lident
+ * | uident.lident
+ * | uident.uident.lident --> long module path
+ *)
+ and parseTypExpr ?attrs ?(es6Arrow=true) ?(alias=true) p =
+ (* Parser.leaveBreadcrumb p Grammar.TypeExpression; *)
+ let startPos = p.Parser.startPos in
+ let attrs = match attrs with
+ | Some attrs ->
+ attrs
+ | None ->
+ parseAttributes p in
+ let typ = if es6Arrow && isEs6ArrowType p then
+ parseEs6ArrowType ~attrs p
+ else
+ let typ = parseAtomicTypExpr ~attrs p in
+ parseArrowTypeRest ~es6Arrow ~startPos typ p
+ in
+ let typ = if alias then parseTypeAlias p typ else typ in
+ (* Parser.eatBreadcrumb p; *)
+ typ
+
+ and parseArrowTypeRest ~es6Arrow ~startPos typ p =
+ match p.Parser.token with
+ | (EqualGreater | MinusGreater) as token when es6Arrow == true ->
+ (* error recovery *)
+ if token = MinusGreater then (
+ Parser.expect EqualGreater p;
+ );
+ Parser.next p;
+ let returnType = parseTypExpr ~alias:false p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType
+ | _ -> typ
+
+ and parseTypExprRegion p =
+ if Grammar.isTypExprStart p.Parser.token then
+ Some (parseTypExpr p)
+ else
+ None
+
+ and parseTupleType ~attrs ~first ~startPos p =
+ let typexprs =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.TypExprList
+ ~closing:Rparen
+ ~f:parseTypExprRegion
+ p
+ in
+ Parser.expect Rparen p;
+ let tupleLoc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.tuple ~attrs ~loc:tupleLoc (first::typexprs)
+
+ and parseTypeConstructorArgRegion p =
+ if Grammar.isTypExprStart p.Parser.token then
+ Some (parseTypExpr p)
+ else if p.token = LessThan then (
+ Parser.next p;
+ parseTypeConstructorArgRegion p
+ ) else
+ None
+
+ (* Js.Nullable.value<'a> *)
+ and parseTypeConstructorArgs ~constrName p =
+ let opening = p.Parser.token in
+ let openingStartPos = p.startPos in
+ match opening with
+ | LessThan | Lparen ->
+ Scanner.setDiamondMode p.scanner;
+ Parser.next p;
+ let typeArgs =
+ (* TODO: change Grammar.TypExprList to TypArgList!!! Why did I wrote this? *)
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.TypExprList
+ ~closing:GreaterThan
+ ~f:parseTypeConstructorArgRegion
+ p
+ in
+ let () = match p.token with
+ | Rparen when opening = Token.Lparen ->
+ let typ = Ast_helper.Typ.constr constrName typeArgs in
+ let msg =
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.text "Type parameters require angle brackets:";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Printer.printTypExpr typ CommentTable.empty;
+ ]
+ )
+ ]
+ ) |> Doc.toString ~width:80
+ in
+ Parser.err ~startPos:openingStartPos p (Diagnostics.message msg);
+ Parser.next p
+ | _ ->
+ Parser.expect GreaterThan p
+ in
+ Scanner.popMode p.scanner Diamond;
+ typeArgs
+ | _ -> []
+
+ (* string-field-decl ::=
+ * | string: poly-typexpr
+ * | attributes string-field-decl *)
+ and parseStringFieldDeclaration p =
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | String name ->
+ let nameStartPos = p.startPos in
+ let nameEndPos = p.endPos in
+ Parser.next p;
+ let fieldName = Location.mkloc name (mkLoc nameStartPos nameEndPos) in
+ Parser.expect ~grammar:Grammar.TypeExpression Colon p;
+ let typ = parsePolyTypeExpr p in
+ Some(Parsetree.Otag (fieldName, attrs, typ))
+ | _token ->
+ None
+
+ (* field-decl ::=
+ * | [mutable] field-name : poly-typexpr
+ * | attributes field-decl *)
+ and parseFieldDeclaration p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ let mut = if Parser.optional p Token.Mutable then
+ Asttypes.Mutable
+ else
+ Asttypes.Immutable
+ in
+ let (lident, loc) = match p.token with
+ | List ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ ("list", loc)
+ | _ -> parseLident p
+ in
+ let name = Location.mkloc lident loc in
+ let typ = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ parsePolyTypeExpr p
+ | _ ->
+ Ast_helper.Typ.constr ~loc:name.loc {name with txt = Lident name.txt} []
+ in
+ let loc = mkLoc startPos typ.ptyp_loc.loc_end in
+ Ast_helper.Type.field ~attrs ~loc ~mut name typ
+
+
+ and parseFieldDeclarationRegion p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ let mut = if Parser.optional p Token.Mutable then
+ Asttypes.Mutable
+ else
+ Asttypes.Immutable
+ in
+ match p.token with
+ | Lident _ | List ->
+ let (lident, loc) = match p.token with
+ | List ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ ("list", loc)
+ | _ -> parseLident p
+ in
+ let name = Location.mkloc lident loc in
+ let typ = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ parsePolyTypeExpr p
+ | _ ->
+ Ast_helper.Typ.constr ~loc:name.loc {name with txt = Lident name.txt} []
+ in
+ let loc = mkLoc startPos typ.ptyp_loc.loc_end in
+ Some(Ast_helper.Type.field ~attrs ~loc ~mut name typ)
+ | _ ->
+ None
+
+ (* record-decl ::=
+ * | { field-decl }
+ * | { field-decl, field-decl }
+ * | { field-decl, field-decl, field-decl, }
+ *)
+ and parseRecordDeclaration p =
+ Parser.leaveBreadcrumb p Grammar.RecordDecl;
+ Parser.expect Lbrace p;
+ let rows =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.RecordDecl
+ ~closing:Rbrace
+ ~f:parseFieldDeclarationRegion
+ p
+ in
+ Parser.expect Rbrace p;
+ Parser.eatBreadcrumb p;
+ rows
+
+ (* constr-args ::=
+ * | (typexpr)
+ * | (typexpr, typexpr)
+ * | (typexpr, typexpr, typexpr,)
+ * | (record-decl)
+ *
+ * TODO: should we overparse inline-records in every position?
+ * Give a good error message afterwards?
+ *)
+ and parseConstrDeclArgs p =
+ let constrArgs = match p.Parser.token with
+ | Lparen ->
+ Parser.next p;
+ (* TODO: this could use some cleanup/stratification *)
+ begin match p.Parser.token with
+ | Lbrace ->
+ let lbrace = p.startPos in
+ Parser.next p;
+ let startPos = p.Parser.startPos in
+ begin match p.Parser.token with
+ | DotDot | Dot ->
+ let closedFlag = match p.token with
+ | DotDot -> Parser.next p; Asttypes.Open
+ | Dot -> Parser.next p; Asttypes.Closed
+ | _ -> Asttypes.Closed
+ in
+ let fields =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let typ = makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields in
+ Parser.optional p Comma |> ignore;
+ let moreArgs =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.TypExprList
+ ~closing:Rparen
+ ~f:parseTypExprRegion
+ p
+ in
+ Parser.expect Rparen p;
+ Parsetree.Pcstr_tuple (typ::moreArgs)
+ | _ ->
+ let attrs = parseAttributes p in
+ begin match p.Parser.token with
+ | String _ ->
+ let closedFlag = Asttypes.Closed in
+ let fields = match attrs with
+ | [] ->
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ | attrs ->
+ let first =
+ Parser.leaveBreadcrumb p Grammar.StringFieldDeclarations;
+ let field = match parseStringFieldDeclaration p with
+ | Some field -> field
+ | None -> assert false
+ in
+ (* parse comma after first *)
+ let () = match p.Parser.token with
+ | Rbrace | Eof -> ()
+ | Comma -> Parser.next p
+ | _ -> Parser.expect Comma p
+ in
+ Parser.eatBreadcrumb p;
+ begin match field with
+ | Parsetree.Otag (label, _, ct) -> Parsetree.Otag (label, attrs, ct)
+ | Oinherit ct -> Oinherit ct
+ end
+ in
+ first::(
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ ) in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let typ = makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields in
+ Parser.optional p Comma |> ignore;
+ let moreArgs =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.TypExprList
+ ~closing:Rparen
+ ~f:parseTypExprRegion p
+ in
+ Parser.expect Rparen p;
+ Parsetree.Pcstr_tuple (typ::moreArgs)
+ | _ ->
+ let fields = match attrs with
+ | [] ->
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.FieldDeclarations
+ ~closing:Rbrace
+ ~f:parseFieldDeclarationRegion
+ p
+ | attrs ->
+ let first =
+ let field = parseFieldDeclaration p in
+ Parser.expect Comma p;
+ {field with Parsetree.pld_attributes = attrs}
+ in
+ first::(
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.FieldDeclarations
+ ~closing:Rbrace
+ ~f:parseFieldDeclarationRegion
+ p
+ )
+ in
+ let () = match fields with
+ | [] -> Parser.err ~startPos:lbrace p (
+ Diagnostics.message "An inline record declaration needs at least one field"
+ )
+ | _ -> ()
+ in
+ Parser.expect Rbrace p;
+ Parser.optional p Comma |> ignore;
+ Parser.expect Rparen p;
+ Parsetree.Pcstr_record fields
+ end
+ end
+ | _ ->
+ let args =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.TypExprList
+ ~closing:Rparen
+ ~f:parseTypExprRegion
+ p
+ in
+ Parser.expect Rparen p;
+ Parsetree.Pcstr_tuple args
+ end
+ | _ -> Pcstr_tuple []
+ in
+ let res = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ Some (parseTypExpr p)
+ | _ -> None
+ in
+ (constrArgs, res)
+
+ (* constr-decl ::=
+ * | constr-name
+ * | attrs constr-name
+ * | constr-name const-args
+ * | attrs constr-name const-args *)
+ and parseTypeConstructorDeclarationWithBar p =
+ match p.Parser.token with
+ | Bar ->
+ let startPos = p.Parser.startPos in
+ Parser.next p;
+ Some (parseTypeConstructorDeclaration ~startPos p)
+ | _ -> None
+
+ and parseTypeConstructorDeclaration ~startPos p =
+ Parser.leaveBreadcrumb p Grammar.ConstructorDeclaration;
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Uident uident ->
+ let uidentLoc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ let (args, res) = parseConstrDeclArgs p in
+ Parser.eatBreadcrumb p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Type.constructor ~loc ~attrs ?res ~args (Location.mkloc uident uidentLoc)
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Ast_helper.Type.constructor (Location.mknoloc "_")
+
+ (* [|] constr-decl { | constr-decl } *)
+ and parseTypeConstructorDeclarations ?first p =
+ let firstConstrDecl = match first with
+ | None ->
+ let startPos = p.Parser.startPos in
+ ignore (Parser.optional p Token.Bar);
+ parseTypeConstructorDeclaration ~startPos p
+ | Some firstConstrDecl ->
+ firstConstrDecl
+ in
+ firstConstrDecl::(
+ parseRegion
+ ~grammar:Grammar.ConstructorDeclaration
+ ~f:parseTypeConstructorDeclarationWithBar
+ p
+ )
+
+ (*
+ * type-representation ::=
+ * ∣ = [ | ] constr-decl { | constr-decl }
+ * ∣ = private [ | ] constr-decl { | constr-decl }
+ * | = |
+ * ∣ = private |
+ * ∣ = record-decl
+ * ∣ = private record-decl
+ * | = ..
+ *)
+ and parseTypeRepresentation p =
+ Parser.leaveBreadcrumb p Grammar.TypeRepresentation;
+ (* = consumed *)
+ let privateFlag =
+ if Parser.optional p Token.Private
+ then Asttypes.Private
+ else Asttypes.Public
+ in
+ let kind = match p.Parser.token with
+ | Bar | Uident _ ->
+ Parsetree.Ptype_variant (parseTypeConstructorDeclarations p)
+ | Lbrace ->
+ Parsetree.Ptype_record (parseRecordDeclaration p)
+ | DotDot ->
+ Parser.next p;
+ Ptype_open
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ (* TODO: I have no idea if this is even remotely a good idea *)
+ Parsetree.Ptype_variant []
+ in
+ Parser.eatBreadcrumb p;
+ (privateFlag, kind)
+
+ (* type-param ::=
+ * | variance 'lident
+ * | variance _
+ *
+ * variance ::=
+ * | +
+ * | -
+ * | (* empty *)
+ *)
+ and parseTypeParam p =
+ let variance = match p.Parser.token with
+ | Plus -> Parser.next p; Asttypes.Covariant
+ | Minus -> Parser.next p; Contravariant
+ | _ -> Invariant
+ in
+ match p.Parser.token with
+ | SingleQuote ->
+ Parser.next p;
+ let (ident, loc) = parseLident p in
+ Some (Ast_helper.Typ.var ~loc ident, variance)
+ | Underscore ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Some (Ast_helper.Typ.any ~loc (), variance)
+ (* TODO: should we try parsing lident as 'ident ? *)
+ | _token ->
+ None
+
+ (* type-params ::=
+ * |
+ * ∣
+ * ∣
+ * ∣
+ *
+ * TODO: when we have pretty-printer show an error
+ * with the actual code corrected. *)
+ and parseTypeParams ~parent p =
+ let opening = p.Parser.token in
+ match opening with
+ | LessThan | Lparen when p.startPos.pos_lnum == p.prevEndPos.pos_lnum ->
+ Scanner.setDiamondMode p.scanner;
+ let openingStartPos = p.startPos in
+ Parser.leaveBreadcrumb p Grammar.TypeParams;
+ Parser.next p;
+ let params =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.TypeParams
+ ~closing:GreaterThan
+ ~f:parseTypeParam
+ p
+ in
+ let () = match p.token with
+ | Rparen when opening = Token.Lparen ->
+ let msg =
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.text "Type parameters require angle brackets:";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.concat [
+ Printer.printLongident parent.Location.txt;
+ Printer.printTypeParams params CommentTable.empty;
+ ]
+ ]
+ )
+ ]
+ ) |> Doc.toString ~width:80
+ in
+ Parser.err ~startPos:openingStartPos p (Diagnostics.message msg);
+ Parser.next p
+ | _ ->
+ Parser.expect GreaterThan p
+ in
+ Scanner.popMode p.scanner Diamond;
+ Parser.eatBreadcrumb p;
+ params
+ | _ -> []
+
+ (* type-constraint ::= constraint ' ident = typexpr *)
+ and parseTypeConstraint p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | Token.Constraint ->
+ Parser.next p;
+ Parser.expect SingleQuote p;
+ begin match p.Parser.token with
+ | Lident ident ->
+ let identLoc = mkLoc startPos p.endPos in
+ Parser.next p;
+ Parser.expect Equal p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Typ.var ~loc:identLoc ident, typ, loc)
+ | t ->
+ Parser.err p (Diagnostics.lident t);
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Typ.any (), parseTypExpr p, loc)
+ end
+ | _ -> None
+
+ (* type-constraints ::=
+ * | (* empty *)
+ * | type-constraint
+ * | type-constraint type-constraint
+ * | type-constraint type-constraint type-constraint (* 0 or more *)
+ *)
+ and parseTypeConstraints p =
+ parseRegion
+ ~grammar:Grammar.TypeConstraint
+ ~f:parseTypeConstraint
+ p
+
+ and parseTypeEquationOrConstrDecl p =
+ let uidentStartPos = p.Parser.startPos in
+ match p.Parser.token with
+ | Uident uident ->
+ Parser.next p;
+ begin match p.Parser.token with
+ | Dot ->
+ Parser.next p;
+ let typeConstr =
+ parseValuePathTail p uidentStartPos (Longident.Lident uident)
+ in
+ let loc = mkLoc uidentStartPos p.prevEndPos in
+ let typ = parseTypeAlias p (
+ Ast_helper.Typ.constr ~loc typeConstr (parseTypeConstructorArgs ~constrName:typeConstr p)
+ ) in
+ begin match p.token with
+ | Equal ->
+ Parser.next p;
+ let (priv, kind) = parseTypeRepresentation p in
+ (Some typ, priv, kind)
+ | EqualGreater ->
+ Parser.next p;
+ let returnType = parseTypExpr ~alias:false p in
+ let loc = mkLoc uidentStartPos p.prevEndPos in
+ let arrowType = Ast_helper.Typ.arrow ~loc Asttypes.Nolabel typ returnType in
+ let typ = parseTypeAlias p arrowType in
+ (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)
+ | _ -> (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)
+ end
+ | _ ->
+ let uidentEndPos = p.endPos in
+ let (args, res) = parseConstrDeclArgs p in
+ let first = Some (
+ let uidentLoc = mkLoc uidentStartPos uidentEndPos in
+ Ast_helper.Type.constructor
+ ~loc:(mkLoc uidentStartPos p.prevEndPos)
+ ?res
+ ~args
+ (Location.mkloc uident uidentLoc)
+ ) in
+ (None, Asttypes.Public, Parsetree.Ptype_variant (parseTypeConstructorDeclarations p ?first))
+ end
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ (* TODO: is this a good idea? *)
+ (None, Asttypes.Public, Parsetree.Ptype_abstract)
+
+ and parseRecordOrBsObjectDecl p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lbrace p;
+ match p.Parser.token with
+ | DotDot | Dot ->
+ let closedFlag = match p.token with
+ | DotDot -> Parser.next p; Asttypes.Open
+ | Dot -> Parser.next p; Asttypes.Closed
+ | _ -> Asttypes.Closed
+ in
+ let fields =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let typ =
+ makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields
+ |> parseTypeAlias p
+ in
+ let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in
+ (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)
+ | _ ->
+ let attrs = parseAttributes p in
+ begin match p.Parser.token with
+ | String _ ->
+ let closedFlag = Asttypes.Closed in
+ let fields = match attrs with
+ | [] ->
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ | attrs ->
+ let first =
+ Parser.leaveBreadcrumb p Grammar.StringFieldDeclarations;
+ let field = match parseStringFieldDeclaration p with
+ | Some field -> field
+ | None -> assert false
+ in
+ (* parse comma after first *)
+ let () = match p.Parser.token with
+ | Rbrace | Eof -> ()
+ | Comma -> Parser.next p
+ | _ -> Parser.expect Comma p
+ in
+ Parser.eatBreadcrumb p;
+ begin match field with
+ | Parsetree.Otag (label, _, ct) -> Parsetree.Otag (label, attrs, ct)
+ | Oinherit ct -> Oinherit ct
+ end
+ in
+ first::(
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.StringFieldDeclarations
+ ~closing:Rbrace
+ ~f:parseStringFieldDeclaration
+ p
+ )
+ in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let typ =
+ makeBsObjType ~attrs:[] ~loc ~closed:closedFlag fields |> parseTypeAlias p
+ in
+ let typ = parseArrowTypeRest ~es6Arrow:true ~startPos typ p in
+ (Some typ, Asttypes.Public, Parsetree.Ptype_abstract)
+ | _ ->
+ Parser.leaveBreadcrumb p Grammar.RecordDecl;
+ let fields = match attrs with
+ | [] ->
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.FieldDeclarations
+ ~closing:Rbrace
+ ~f:parseFieldDeclarationRegion
+ p
+ | attr::_ as attrs ->
+ let first =
+ let field = parseFieldDeclaration p in
+ Parser.optional p Comma |> ignore;
+ {field with
+ Parsetree.pld_attributes = attrs;
+ pld_loc = {
+ field.Parsetree.pld_loc with loc_start =
+ (attr |> fst).loc.loc_start
+ }
+ }
+ in
+ first::(
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.FieldDeclarations
+ ~closing:Rbrace
+ ~f:parseFieldDeclarationRegion
+ p
+ )
+ in
+ let () = match fields with
+ | [] -> Parser.err ~startPos p (
+ Diagnostics.message "A record needs at least one field"
+ )
+ | _ -> ()
+ in
+ Parser.expect Rbrace p;
+ Parser.eatBreadcrumb p;
+ (None, Asttypes.Public, Parsetree.Ptype_record fields)
+ end
+
+ and parsePrivateEqOrRepr p =
+ Parser.expect Private p;
+ match p.Parser.token with
+ | Lbrace ->
+ let (manifest, _ ,kind) = parseRecordOrBsObjectDecl p in
+ (manifest, Asttypes.Private, kind)
+ | Uident _ ->
+ let (manifest, _, kind) = parseTypeEquationOrConstrDecl p in
+ (manifest, Asttypes.Private, kind)
+ | Bar | DotDot ->
+ let (_, kind) = parseTypeRepresentation p in
+ (None, Asttypes.Private, kind)
+ | t when Grammar.isTypExprStart t ->
+ (Some (parseTypExpr p), Asttypes.Private, Parsetree.Ptype_abstract)
+ | _ ->
+ let (_, kind) = parseTypeRepresentation p in
+ (None, Asttypes.Private, kind)
+
+ (*
+ polymorphic-variant-type ::=
+ | [ tag-spec-first { | tag-spec } ]
+ | [> [ tag-spec ] { | tag-spec } ]
+ | [< [|] tag-spec-full { | tag-spec-full } [ > { `tag-name }+ ] ]
+
+ tag-spec-first ::= `tag-name [ of typexpr ]
+ | [ typexpr ] | tag-spec
+
+ tag-spec ::= `tag-name [ of typexpr ]
+ | typexpr
+
+ tag-spec-full ::= `tag-name [ of [&] typexpr { & typexpr } ]
+ | typexpr
+ *)
+ and parsePolymorphicVariantType ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lbracket p;
+ match p.token with
+ | GreaterThan ->
+ Parser.next p;
+ let rowFields =
+ begin match p.token with
+ | Rbracket ->
+ []
+ | Bar ->
+ parseTagSpecs p
+ | _ ->
+ let rowField = parseTagSpec p in
+ rowField :: parseTagSpecs p
+ end
+ in
+ let variant =
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.variant ~attrs ~loc rowFields Open None in
+ Parser.expect Rbracket p;
+ variant
+ | LessThan ->
+ Parser.next p;
+ Parser.optional p Bar |> ignore;
+ let rowField = parseTagSpecFull p in
+ let rowFields = parseTagSpecFulls p in
+ let tagNames =
+ if p.token == GreaterThan
+ then begin
+ Parser.next p;
+ let rec loop p = match p.Parser.token with
+ | Rbracket -> []
+ | _ ->
+ let (ident, _loc) = parseHashIdent ~startPos:p.startPos p in
+ ident :: loop p
+ in
+ loop p
+ end
+ else [] in
+ let variant =
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.variant ~attrs ~loc (rowField :: rowFields) Closed (Some tagNames) in
+ Parser.expect Rbracket p;
+ variant
+ | _ ->
+ let rowFields1 = parseTagSpecFirst p in
+ let rowFields2 = parseTagSpecs p in
+ let variant =
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Typ.variant ~attrs ~loc (rowFields1 @ rowFields2) Closed None in
+ Parser.expect Rbracket p;
+ variant
+
+ and parseTagSpecFulls p =
+ match p.Parser.token with
+ | Rbracket ->
+ []
+ | GreaterThan ->
+ []
+ | Bar ->
+ Parser.next p;
+ let rowField = parseTagSpecFull p in
+ rowField ::parseTagSpecFulls p
+ | _ ->
+ []
+
+ and parseTagSpecFull p =
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Hash ->
+ parsePolymorphicVariantTypeSpecHash ~attrs ~full:true p
+ | _ ->
+ let typ = parseTypExpr ~attrs p in
+ Parsetree.Rinherit typ
+
+ and parseTagSpecs p =
+ match p.Parser.token with
+ | Bar ->
+ Parser.next p;
+ let rowField = parseTagSpec p in
+ rowField :: parseTagSpecs p
+ | _ ->
+ []
+
+ and parseTagSpec p =
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Hash ->
+ parsePolymorphicVariantTypeSpecHash ~attrs ~full:false p
+ | _ ->
+ let typ = parseTypExpr ~attrs p in
+ Parsetree.Rinherit typ
+
+ and parseTagSpecFirst p =
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Bar ->
+ Parser.next p;
+ [parseTagSpec p]
+ | Hash ->
+ [parsePolymorphicVariantTypeSpecHash ~attrs ~full:false p]
+ | _ ->
+ let typ = parseTypExpr ~attrs p in
+ Parser.expect Bar p;
+ [Parsetree.Rinherit typ; parseTagSpec p]
+
+ and parsePolymorphicVariantTypeSpecHash ~attrs ~full p : Parsetree.row_field =
+ let startPos = p.Parser.startPos in
+ let (ident, loc) = parseHashIdent ~startPos p in
+ let rec loop p =
+ match p.Parser.token with
+ | Band when full ->
+ Parser.next p;
+ let rowField = parsePolymorphicVariantTypeArgs p in
+ rowField :: loop p
+ | _ ->
+ []
+ in
+ let firstTuple, tagContainsAConstantEmptyConstructor =
+ match p.Parser.token with
+ | Band when full ->
+ Parser.next p;
+ [parsePolymorphicVariantTypeArgs p], true
+ | Lparen ->
+ [parsePolymorphicVariantTypeArgs p], false
+ | _ ->
+ [], true
+ in
+ let tuples = firstTuple @ loop p in
+ Parsetree.Rtag (
+ Location.mkloc ident loc,
+ attrs,
+ tagContainsAConstantEmptyConstructor,
+ tuples
+ )
+
+ and parsePolymorphicVariantTypeArgs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lparen p;
+ let args = parseCommaDelimitedRegion
+ ~grammar:Grammar.TypExprList
+ ~closing:Rparen
+ ~f:parseTypExprRegion
+ p
+ in
+ Parser.expect Rparen p;
+ let attrs = [] in
+ let loc = mkLoc startPos p.prevEndPos in
+ match args with
+ | [{ptyp_desc = Ptyp_tuple _} as typ] as types ->
+ if p.mode = ParseForTypeChecker then
+ typ
+ else
+ Ast_helper.Typ.tuple ~loc ~attrs types
+ | [typ] -> typ
+ | types -> Ast_helper.Typ.tuple ~loc ~attrs types
+
+ and parseTypeEquationAndRepresentation p =
+ match p.Parser.token with
+ | Equal | Bar as token ->
+ if token = Bar then Parser.expect Equal p;
+ Parser.next p;
+ begin match p.Parser.token with
+ | Uident _ ->
+ parseTypeEquationOrConstrDecl p
+ | Lbrace ->
+ parseRecordOrBsObjectDecl p
+ | Private ->
+ parsePrivateEqOrRepr p
+ | Bar | DotDot ->
+ let (priv, kind) = parseTypeRepresentation p in
+ (None, priv, kind)
+ | _ ->
+ let manifest = Some (parseTypExpr p) in
+ begin match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ let (priv, kind) = parseTypeRepresentation p in
+ (manifest, priv, kind)
+ | _ ->
+ (manifest, Public, Parsetree.Ptype_abstract)
+ end
+ end
+ | _ -> (None, Public, Parsetree.Ptype_abstract)
+
+ (* type-definition ::= type [rec] typedef { and typedef }
+ * typedef ::= typeconstr-name [type-params] type-information
+ * type-information ::= [type-equation] [type-representation] { type-constraint }
+ * type-equation ::= = typexpr *)
+ and parseTypeDef ~attrs ~startPos p =
+ Parser.leaveBreadcrumb p Grammar.TypeDef;
+ (* let attrs = match attrs with | Some attrs -> attrs | None -> parseAttributes p in *)
+ Parser.leaveBreadcrumb p Grammar.TypeConstrName;
+ let (name, loc) = parseLident p in
+ let typeConstrName = Location.mkloc name loc in
+ Parser.eatBreadcrumb p;
+ let params =
+ let constrName = Location.mkloc (Longident.Lident name) loc in
+ parseTypeParams ~parent:constrName p in
+ let typeDef =
+ let (manifest, priv, kind) = parseTypeEquationAndRepresentation p in
+ let cstrs = parseTypeConstraints p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Type.mk
+ ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest typeConstrName
+ in
+ Parser.eatBreadcrumb p;
+ typeDef
+
+ and parseTypeExtension ~params ~attrs ~name p =
+ Parser.expect PlusEqual p;
+ let priv =
+ if Parser.optional p Token.Private
+ then Asttypes.Private
+ else Asttypes.Public
+ in
+ let constrStart = p.Parser.startPos in
+ Parser.optional p Bar |> ignore;
+ let first =
+ let (attrs, name, kind) = match p.Parser.token with
+ | Bar ->
+ Parser.next p;
+ parseConstrDef ~parseAttrs:true p
+ | _ ->
+ parseConstrDef ~parseAttrs:true p
+ in
+ let loc = mkLoc constrStart p.prevEndPos in
+ Ast_helper.Te.constructor ~loc ~attrs name kind
+ in
+ let rec loop p cs =
+ match p.Parser.token with
+ | Bar ->
+ let startPos = p.Parser.startPos in
+ Parser.next p;
+ let (attrs, name, kind) = parseConstrDef ~parseAttrs:true p in
+ let extConstr =
+ Ast_helper.Te.constructor ~attrs ~loc:(mkLoc startPos p.prevEndPos) name kind
+ in
+ loop p (extConstr::cs)
+ | _ ->
+ List.rev cs
+ in
+ let constructors = loop p [first] in
+ Ast_helper.Te.mk ~attrs ~params ~priv name constructors
+
+ and parseTypeDefinitions ~attrs ~name ~params ~startPos p =
+ let typeDef =
+ let (manifest, priv, kind) = parseTypeEquationAndRepresentation p in
+ let cstrs = parseTypeConstraints p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Type.mk
+ ~loc ~attrs ~priv ~kind ~params ~cstrs ?manifest
+ {name with txt = lidentOfPath name.Location.txt}
+ in
+ let rec loop p defs =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributesAndBinding p in
+ match p.Parser.token with
+ | And ->
+ Parser.next p;
+ let attrs = match p.token with
+ | Export ->
+ let exportLoc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ let genTypeAttr = (Location.mkloc "genType" exportLoc, Parsetree.PStr []) in
+ genTypeAttr::attrs
+ | _ -> attrs
+ in
+ let typeDef = parseTypeDef ~attrs ~startPos p in
+ loop p (typeDef::defs)
+ | _ ->
+ List.rev defs
+ in
+ loop p [typeDef]
+
+ (* TODO: decide if we really want type extensions (eg. type x += Blue)
+ * It adds quite a bit of complexity that can be avoided,
+ * implemented for now. Needed to get a feel for the complexities of
+ * this territory of the grammar *)
+ and parseTypeDefinitionOrExtension ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Token.Typ p;
+ let recFlag = match p.token with
+ | Rec -> Parser.next p; Asttypes.Recursive
+ | Lident "nonrec" ->
+ Parser.next p;
+ Asttypes.Nonrecursive
+ | _ -> Asttypes.Nonrecursive
+ in
+ let name = parseValuePath p in
+ let params = parseTypeParams ~parent:name p in
+ match p.Parser.token with
+ | PlusEqual ->
+ TypeExt(parseTypeExtension ~params ~attrs ~name p)
+ | _ ->
+ let typeDefs = parseTypeDefinitions ~attrs ~name ~params ~startPos p in
+ TypeDef {recFlag; types = typeDefs}
+
+ and parsePrimitive p =
+ match p.Parser.token with
+ | String s -> Parser.next p; Some s
+ | _ -> None
+
+ and parsePrimitives p =
+ match (parseRegion ~grammar:Grammar.Primitive ~f:parsePrimitive p) with
+ | [] ->
+ let msg = "An external definition should have at least one primitive. Example: \"setTimeout\"" in
+ Parser.err p (Diagnostics.message msg);
+ []
+ | primitives -> primitives
+
+ (* external value-name : typexp = external-declaration *)
+ and parseExternalDef ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.leaveBreadcrumb p Grammar.External;
+ Parser.expect Token.External p;
+ let (name, loc) = parseLident p in
+ let name = Location.mkloc name loc in
+ Parser.expect ~grammar:(Grammar.TypeExpression) Colon p;
+ let typExpr = parseTypExpr p in
+ Parser.expect Equal p;
+ let prim = parsePrimitives p in
+ let loc = mkLoc startPos p.prevEndPos in
+ let vb = Ast_helper.Val.mk ~loc ~attrs ~prim name typExpr in
+ Parser.eatBreadcrumb p;
+ vb
+
+ (* constr-def ::=
+ * | constr-decl
+ * | constr-name = constr
+ *
+ * constr-decl ::= constr-name constr-args
+ * constr-name ::= uident
+ * constr ::= path-uident *)
+ and parseConstrDef ~parseAttrs p =
+ let attrs = if parseAttrs then parseAttributes p else [] in
+ let name = match p.Parser.token with
+ | Uident name ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc name loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ let kind = match p.Parser.token with
+ | Lparen ->
+ let (args, res) = parseConstrDeclArgs p in
+ Parsetree.Pext_decl (args, res)
+ | Equal ->
+ Parser.next p;
+ let longident = parseModuleLongIdent ~lowercase:false p in
+ Parsetree.Pext_rebind longident
+ | _ ->
+ Parsetree.Pext_decl (Pcstr_tuple [], None)
+ in
+ (attrs, name, kind)
+
+ (*
+ * exception-definition ::=
+ * | exception constr-decl
+ * ∣ exception constr-name = constr
+ *
+ * constr-name ::= uident
+ * constr ::= long_uident *)
+ and parseExceptionDef ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Token.Exception p;
+ let (_, name, kind) = parseConstrDef ~parseAttrs:false p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Te.constructor ~loc ~attrs name kind
+
+ (* module structure on the file level *)
+ and parseImplementation p : Parsetree.structure =
+ parseRegion p ~grammar:Grammar.Implementation ~f:parseStructureItemRegion
+ [@@progress (Parser.next, Parser.expect, Parser.checkProgress)]
+
+ and parseStructureItemRegion p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Open ->
+ let openDescription = parseOpenDescription ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.open_ ~loc openDescription)
+ | Let ->
+ let (recFlag, letBindings) = parseLetBindings ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.value ~loc recFlag letBindings)
+ | Typ ->
+ Parser.beginRegion p;
+ begin match parseTypeDefinitionOrExtension ~attrs p with
+ | TypeDef {recFlag; types} ->
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.endRegion p;
+ Some (Ast_helper.Str.type_ ~loc recFlag types)
+ | TypeExt(ext) ->
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.endRegion p;
+ Some (Ast_helper.Str.type_extension ~loc ext)
+ end
+ | External ->
+ let externalDef = parseExternalDef ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.primitive ~loc externalDef)
+ | Import ->
+ let importDescr = parseJsImport ~startPos ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ let structureItem = JsFfi.toParsetree importDescr in
+ Some {structureItem with pstr_loc = loc}
+ | Exception ->
+ let exceptionDef = parseExceptionDef ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.exception_ ~loc exceptionDef)
+ | Include ->
+ let includeStatement = parseIncludeStatement ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.include_ ~loc includeStatement)
+ | Export ->
+ let structureItem = parseJsExport ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some {structureItem with pstr_loc = loc}
+ | Module ->
+ let structureItem = parseModuleOrModuleTypeImplOrPackExpr ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some {structureItem with pstr_loc = loc}
+ | AtAt ->
+ let attr = parseStandaloneAttribute p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.attribute ~loc attr)
+ | PercentPercent ->
+ let extension = parseExtension ~moduleLanguage:true p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Str.extension ~attrs ~loc extension)
+ | token when Grammar.isExprStart token ->
+ let prevEndPos = p.Parser.endPos in
+ let exp = parseExpr p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.checkProgress ~prevEndPos ~result:(Ast_helper.Str.eval ~loc ~attrs exp) p
+ | _ -> None
+
+ and parseJsImport ~startPos ~attrs p =
+ Parser.expect Token.Import p;
+ let importSpec = match p.Parser.token with
+ | Token.Lident _ | Token.At ->
+ let decl = match parseJsFfiDeclaration p with
+ | Some decl -> decl
+ | None -> assert false
+ in
+ JsFfi.Default decl
+ | _ -> JsFfi.Spec(parseJsFfiDeclarations p)
+ in
+ let scope = parseJsFfiScope p in
+ let loc = mkLoc startPos p.prevEndPos in
+ JsFfi.importDescr ~attrs ~importSpec ~scope ~loc
+
+ and parseJsExport ~attrs p =
+ let exportStart = p.Parser.startPos in
+ Parser.expect Token.Export p;
+ let exportLoc = mkLoc exportStart p.prevEndPos in
+ let genTypeAttr = (Location.mkloc "genType" exportLoc, Parsetree.PStr []) in
+ let attrs = genTypeAttr::attrs in
+ match p.Parser.token with
+ | Typ ->
+ begin match parseTypeDefinitionOrExtension ~attrs p with
+ | TypeDef {recFlag; types} ->
+ Ast_helper.Str.type_ recFlag types
+ | TypeExt(ext) ->
+ Ast_helper.Str.type_extension ext
+ end
+ | (* Let *) _ ->
+ let (recFlag, letBindings) = parseLetBindings ~attrs p in
+ Ast_helper.Str.value recFlag letBindings
+
+ and parseJsFfiScope p =
+ match p.Parser.token with
+ | Token.Lident "from" ->
+ Parser.next p;
+ begin match p.token with
+ | String s -> Parser.next p; JsFfi.Module s
+ | Uident _ | Lident _ ->
+ let value = parseIdentPath p in
+ JsFfi.Scope value
+ | _ -> JsFfi.Global
+ end
+ | _ -> JsFfi.Global
+
+ and parseJsFfiDeclarations p =
+ Parser.expect Token.Lbrace p;
+ let decls = parseCommaDelimitedRegion
+ ~grammar:Grammar.JsFfiImport
+ ~closing:Rbrace
+ ~f:parseJsFfiDeclaration
+ p
+ in
+ Parser.expect Rbrace p;
+ decls
+
+ and parseJsFfiDeclaration p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Lident _ ->
+ let (ident, _) = parseLident p in
+ let alias = match p.token with
+ | As ->
+ Parser.next p;
+ let (ident, _) = parseLident p in
+ ident
+ | _ ->
+ ident
+ in
+ Parser.expect Token.Colon p;
+ let typ = parseTypExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (JsFfi.decl ~loc ~alias ~attrs ~name:ident ~typ)
+ | _ -> None
+
+ (* include-statement ::= include module-expr *)
+ and parseIncludeStatement ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Token.Include p;
+ let modExpr = parseModuleExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Incl.mk ~loc ~attrs modExpr
+
+ and parseAtomicModuleExpr p =
+ let startPos = p.Parser.startPos in
+ match p.Parser.token with
+ | Uident _ident ->
+ let longident = parseModuleLongIdent ~lowercase:false p in
+ Ast_helper.Mod.ident ~loc:longident.loc longident
+ | Lbrace ->
+ Parser.next p;
+ let structure = Ast_helper.Mod.structure (
+ parseDelimitedRegion
+ ~grammar:Grammar.Structure
+ ~closing:Rbrace
+ ~f:parseStructureItemRegion
+ p
+ ) in
+ Parser.expect Rbrace p;
+ let endPos = p.prevEndPos in
+ {structure with pmod_loc = mkLoc startPos endPos}
+ | Lparen ->
+ Parser.next p;
+ let modExpr = match p.token with
+ | Rparen ->
+ Ast_helper.Mod.structure ~loc:(mkLoc startPos p.prevEndPos) []
+ | _ ->
+ parseConstrainedModExpr p
+ in
+ Parser.expect Rparen p;
+ modExpr
+ | Lident "unpack" -> (* TODO: should this be made a keyword?? *)
+ Parser.next p;
+ Parser.expect Lparen p;
+ let expr = parseExpr p in
+ begin match p.Parser.token with
+ | Colon ->
+ let colonStart = p.Parser.startPos in
+ Parser.next p;
+ let attrs = parseAttributes p in
+ let packageType = parsePackageType ~startPos:colonStart ~attrs p in
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ let constraintExpr = Ast_helper.Exp.constraint_
+ ~loc
+ expr packageType
+ in
+ Ast_helper.Mod.unpack ~loc constraintExpr
+ | _ ->
+ Parser.expect Rparen p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Mod.unpack ~loc expr
+ end
+ | Percent ->
+ let extension = parseExtension p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Mod.extension ~loc extension
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Recover.defaultModuleExpr()
+
+ and parsePrimaryModExpr p =
+ let startPos = p.Parser.startPos in
+ let modExpr = parseAtomicModuleExpr p in
+ let rec loop p modExpr =
+ match p.Parser.token with
+ | Lparen when p.prevEndPos.pos_lnum == p.startPos.pos_lnum ->
+ loop p (parseModuleApplication p modExpr)
+ | _ -> modExpr
+ in
+ let modExpr = loop p modExpr in
+ {modExpr with pmod_loc = mkLoc startPos p.prevEndPos}
+
+ (*
+ * functor-arg ::=
+ * | uident : modtype
+ * | _ : modtype
+ * | modtype --> "punning" for _ : modtype
+ * | attributes functor-arg
+ *)
+ and parseFunctorArg p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Uident ident ->
+ Parser.next p;
+ let uidentEndPos = p.prevEndPos in
+ begin match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let moduleType = parseModuleType p in
+ let loc = mkLoc startPos uidentEndPos in
+ let argName = Location.mkloc ident loc in
+ Some (attrs, argName, Some moduleType, startPos)
+ | Dot ->
+ Parser.next p;
+ let moduleType =
+ let moduleLongIdent =
+ parseModuleLongIdentTail ~lowercase:false p startPos (Longident.Lident ident) in
+ Ast_helper.Mty.ident ~loc:moduleLongIdent.loc moduleLongIdent
+ in
+ let argName = Location.mknoloc "_" in
+ Some (attrs, argName, Some moduleType, startPos)
+ | _ ->
+ let loc = mkLoc startPos uidentEndPos in
+ let modIdent = Location.mkloc (Longident.Lident ident) loc in
+ let moduleType = Ast_helper.Mty.ident ~loc modIdent in
+ let argName = Location.mknoloc "_" in
+ Some (attrs, argName, Some moduleType, startPos)
+ end
+ | Underscore ->
+ Parser.next p;
+ let argName = Location.mkloc "_" (mkLoc startPos p.prevEndPos) in
+ Parser.expect Colon p;
+ let moduleType = parseModuleType p in
+ Some (attrs, argName, Some moduleType, startPos)
+ | _ ->
+ None
+
+ and parseFunctorArgs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lparen p;
+ let args =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.FunctorArgs
+ ~closing:Rparen
+ ~f:parseFunctorArg
+ p
+ in
+ Parser.expect Rparen p;
+ match args with
+ | [] ->
+ [[], Location.mkloc "*" (mkLoc startPos p.prevEndPos), None, startPos]
+ | args -> args
+
+ and parseFunctorModuleExpr p =
+ let startPos = p.Parser.startPos in
+ let args = parseFunctorArgs p in
+ let returnType = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ Some (parseModuleType ~es6Arrow:false p)
+ | _ -> None
+ in
+ Parser.expect EqualGreater p;
+ let rhsModuleExpr =
+ let modExpr = parseModuleExpr p in
+ match returnType with
+ | Some modType ->
+ Ast_helper.Mod.constraint_
+ ~loc:(mkLoc modExpr.pmod_loc.loc_start modType.Parsetree.pmty_loc.loc_end)
+ modExpr modType
+ | None -> modExpr
+ in
+ let endPos = p.prevEndPos in
+ let modExpr = List.fold_right (fun (attrs, name, moduleType, startPos) acc ->
+ Ast_helper.Mod.functor_
+ ~loc:(mkLoc startPos endPos)
+ ~attrs
+ name moduleType acc
+ ) args rhsModuleExpr
+ in
+ {modExpr with pmod_loc = mkLoc startPos endPos}
+
+ (* module-expr ::=
+ * | module-path
+ * ∣ { structure-items }
+ * ∣ functorArgs => module-expr
+ * ∣ module-expr(module-expr)
+ * ∣ ( module-expr )
+ * ∣ ( module-expr : module-type )
+ * | extension
+ * | attributes module-expr *)
+ and parseModuleExpr p =
+ let attrs = parseAttributes p in
+ let modExpr = if isEs6ArrowFunctor p then
+ parseFunctorModuleExpr p
+ else
+ parsePrimaryModExpr p
+ in
+ {modExpr with pmod_attributes = List.concat [modExpr.pmod_attributes; attrs]}
+
+ and parseConstrainedModExpr p =
+ let modExpr = parseModuleExpr p in
+ match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ let modType = parseModuleType p in
+ let loc = mkLoc modExpr.pmod_loc.loc_start modType.pmty_loc.loc_end in
+ Ast_helper.Mod.constraint_ ~loc modExpr modType
+ | _ -> modExpr
+
+ and parseConstrainedModExprRegion p =
+ if Grammar.isModExprStart p.Parser.token then
+ Some (parseConstrainedModExpr p)
+ else
+ None
+
+ and parseModuleApplication p modExpr =
+ let startPos = p.Parser.startPos in
+ Parser.expect Lparen p;
+ let args =
+ parseCommaDelimitedRegion
+ ~grammar:Grammar.ModExprList
+ ~closing:Rparen
+ ~f:parseConstrainedModExprRegion
+ p
+ in
+ Parser.expect Rparen p;
+ let args = match args with
+ | [] ->
+ let loc = mkLoc startPos p.prevEndPos in
+ [Ast_helper.Mod.structure ~loc []]
+ | args -> args
+ in
+ List.fold_left (fun modExpr arg ->
+ Ast_helper.Mod.apply
+ ~loc:(mkLoc modExpr.Parsetree.pmod_loc.loc_start arg.Parsetree.pmod_loc.loc_end)
+ modExpr arg
+ ) modExpr args
+
+ and parseModuleOrModuleTypeImplOrPackExpr ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Module p;
+ match p.Parser.token with
+ | Typ -> parseModuleTypeImpl ~attrs startPos p
+ | Lparen ->
+ let expr = parseFirstClassModuleExpr ~startPos p in
+ Ast_helper.Str.eval ~attrs expr
+ | _ -> parseMaybeRecModuleBinding ~attrs ~startPos p
+
+ and parseModuleTypeImpl ~attrs startPos p =
+ Parser.expect Typ p;
+ let nameStart = p.Parser.startPos in
+ let name = match p.Parser.token with
+ | List ->
+ Parser.next p;
+ let loc = mkLoc nameStart p.prevEndPos in
+ Location.mkloc "list" loc
+ | Lident ident ->
+ Parser.next p;
+ let loc = mkLoc nameStart p.prevEndPos in
+ Location.mkloc ident loc
+ | Uident ident ->
+ Parser.next p;
+ let loc = mkLoc nameStart p.prevEndPos in
+ Location.mkloc ident loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ Parser.expect Equal p;
+ let moduleType = parseModuleType p in
+ let moduleTypeDeclaration =
+ Ast_helper.Mtd.mk
+ ~attrs
+ ~loc:(mkLoc nameStart p.prevEndPos)
+ ~typ:moduleType
+ name
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Str.modtype ~loc moduleTypeDeclaration
+
+ (* definition ::=
+ ∣ module rec module-name : module-type = module-expr { and module-name
+ : module-type = module-expr } *)
+ and parseMaybeRecModuleBinding ~attrs ~startPos p =
+ match p.Parser.token with
+ | Token.Rec ->
+ Parser.next p;
+ Ast_helper.Str.rec_module (parseModuleBindings ~startPos ~attrs p)
+ | _ ->
+ Ast_helper.Str.module_ (parseModuleBinding ~attrs ~startPos:p.Parser.startPos p)
+
+ and parseModuleBinding ~attrs ~startPos p =
+ let name = match p.Parser.token with
+ | Uident ident ->
+ let startPos = p.Parser.startPos in
+ Parser.next p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Location.mkloc ident loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ let body = parseModuleBindingBody p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Mb.mk ~attrs ~loc name body
+
+ and parseModuleBindingBody p =
+ (* TODO: make required with good error message when rec module binding *)
+ let returnModType = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ Some (parseModuleType p)
+ | _ -> None
+ in
+ Parser.expect Equal p;
+ let modExpr = parseModuleExpr p in
+ match returnModType with
+ | Some modType ->
+ Ast_helper.Mod.constraint_
+ ~loc:(mkLoc modType.pmty_loc.loc_start modExpr.pmod_loc.loc_end)
+ modExpr modType
+ | None -> modExpr
+
+
+ (* module-name : module-type = module-expr
+ * { and module-name : module-type = module-expr } *)
+ and parseModuleBindings ~attrs ~startPos p =
+ let rec loop p acc =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributesAndBinding p in
+ match p.Parser.token with
+ | And ->
+ Parser.next p;
+ ignore(Parser.optional p Module); (* over-parse for fault-tolerance *)
+ let modBinding = parseModuleBinding ~attrs ~startPos p in
+ loop p (modBinding::acc)
+ | _ -> List.rev acc
+ in
+ let first = parseModuleBinding ~attrs ~startPos p in
+ loop p [first]
+
+ and parseAtomicModuleType p =
+ let startPos = p.Parser.startPos in
+ let moduleType = match p.Parser.token with
+ | Uident _ | Lident _ | List ->
+ (* Ocaml allows module types to end with lowercase: module Foo : bar = { ... }
+ * lets go with uppercase terminal for now *)
+ let moduleLongIdent = parseModuleLongIdent ~lowercase:true p in
+ Ast_helper.Mty.ident ~loc:moduleLongIdent.loc moduleLongIdent
+ | Lparen ->
+ Parser.next p;
+ let mty = parseModuleType p in
+ Parser.expect Rparen p;
+ {mty with pmty_loc = mkLoc startPos p.prevEndPos}
+ | Lbrace ->
+ Parser.next p;
+ let spec =
+ parseDelimitedRegion
+ ~grammar:Grammar.Signature
+ ~closing:Rbrace
+ ~f:parseSignatureItemRegion
+ p
+ in
+ Parser.expect Rbrace p;
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Mty.signature ~loc spec
+ | Module -> (* TODO: check if this is still atomic when implementing first class modules*)
+ parseModuleTypeOf p
+ | Percent ->
+ let extension = parseExtension p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Mty.extension ~loc extension
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Recover.defaultModuleType()
+ in
+ let moduleTypeLoc = mkLoc startPos p.prevEndPos in
+ {moduleType with pmty_loc = moduleTypeLoc}
+
+ and parseFunctorModuleType p =
+ let startPos = p.Parser.startPos in
+ let args = parseFunctorArgs p in
+ Parser.expect EqualGreater p;
+ let rhs = parseModuleType p in
+ let endPos = p.prevEndPos in
+ let modType = List.fold_right (fun (attrs, name, moduleType, startPos) acc ->
+ Ast_helper.Mty.functor_
+ ~loc:(mkLoc startPos endPos)
+ ~attrs
+ name moduleType acc
+ ) args rhs
+ in
+ {modType with pmty_loc = mkLoc startPos endPos}
+
+ (* Module types are the module-level equivalent of type expressions: they
+ * specify the general shape and type properties of modules.
+ *
+ * module-type ::=
+ * | modtype-path
+ * | { signature }
+ * | ( module-type ) --> parenthesized module-type
+ * | functor-args => module-type --> functor
+ * | module-type => module-type --> functor
+ * | module type of module-expr
+ * | attributes module-type
+ * | module-type with-mod-constraints
+ * | extension
+ *)
+ and parseModuleType ?(es6Arrow=true) ?(with_=true) p =
+ let attrs = parseAttributes p in
+ let modty = if es6Arrow && isEs6ArrowFunctor p then
+ parseFunctorModuleType p
+ else
+ let modty = parseAtomicModuleType p in
+ match p.Parser.token with
+ | EqualGreater when es6Arrow == true ->
+ Parser.next p;
+ let rhs = parseModuleType ~with_:false p in
+ let str = Location.mknoloc "_" in
+ let loc = mkLoc modty.pmty_loc.loc_start p.prevEndPos in
+ Ast_helper.Mty.functor_ ~loc str (Some modty) rhs
+ | _ -> modty
+ in
+ let moduleType = { modty with
+ pmty_attributes = List.concat [modty.pmty_attributes; attrs]
+ } in
+ if with_ then
+ parseWithConstraints moduleType p
+ else moduleType
+
+
+ and parseWithConstraints moduleType p =
+ match p.Parser.token with
+ | With ->
+ Parser.next p;
+ let first = parseWithConstraint p in
+ let rec loop p acc =
+ match p.Parser.token with
+ | And ->
+ Parser.next p;
+ loop p ((parseWithConstraint p)::acc)
+ | _ ->
+ List.rev acc
+ in
+ let constraints = loop p [first] in
+ let loc = mkLoc moduleType.pmty_loc.loc_start p.prevEndPos in
+ Ast_helper.Mty.with_ ~loc moduleType constraints
+ | _ ->
+ moduleType
+
+ (* mod-constraint ::=
+ * | type typeconstr type-equation type-constraints?
+ * ∣ type typeconstr-name := typexpr
+ * ∣ module module-path = extended-module-path
+ * ∣ module module-path := extended-module-path
+ *
+ * TODO: split this up into multiple functions, better errors *)
+ and parseWithConstraint p =
+ match p.Parser.token with
+ | Module ->
+ Parser.next p;
+ let modulePath = parseModuleLongIdent ~lowercase:false p in
+ begin match p.Parser.token with
+ | ColonEqual ->
+ Parser.next p;
+ let lident = parseModuleLongIdent ~lowercase:false p in
+ Parsetree.Pwith_modsubst (modulePath, lident)
+ | Equal ->
+ Parser.next p;
+ let lident = parseModuleLongIdent ~lowercase:false p in
+ Parsetree.Pwith_module (modulePath, lident)
+ | token ->
+ (* TODO: revisit *)
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ let lident = parseModuleLongIdent ~lowercase:false p in
+ Parsetree.Pwith_modsubst (modulePath, lident)
+ end
+ | Typ ->
+ Parser.next p;
+ let typeConstr = parseValuePath p in
+ let params = parseTypeParams ~parent:typeConstr p in
+ begin match p.Parser.token with
+ | ColonEqual ->
+ Parser.next p;
+ let typExpr = parseTypExpr p in
+ Parsetree.Pwith_typesubst (
+ typeConstr,
+ Ast_helper.Type.mk
+ ~loc:typeConstr.loc
+ ~params
+ ~manifest:typExpr
+ (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc)
+ )
+ | Equal ->
+ Parser.next p;
+ let typExpr = parseTypExpr p in
+ let typeConstraints = parseTypeConstraints p in
+ Parsetree.Pwith_type (
+ typeConstr,
+ Ast_helper.Type.mk
+ ~loc:typeConstr.loc
+ ~params
+ ~manifest:typExpr
+ ~cstrs:typeConstraints
+ (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc)
+ )
+ | token ->
+ (* TODO: revisit *)
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ let typExpr = parseTypExpr p in
+ let typeConstraints = parseTypeConstraints p in
+ Parsetree.Pwith_type (
+ typeConstr,
+ Ast_helper.Type.mk
+ ~loc:typeConstr.loc
+ ~params
+ ~manifest:typExpr
+ ~cstrs:typeConstraints
+ (Location.mkloc (Longident.last typeConstr.txt) typeConstr.loc)
+ )
+ end
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ exit (-1) (* TODO: handle this case *)
+
+ and parseModuleTypeOf p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Module p;
+ Parser.expect Typ p;
+ Parser.expect Of p;
+ let moduleExpr = parseModuleExpr p in
+ Ast_helper.Mty.typeof_ ~loc:(mkLoc startPos p.prevEndPos) moduleExpr
+
+ (* module signature on the file level *)
+ and parseSpecification p =
+ parseRegion ~grammar:Grammar.Specification ~f:parseSignatureItemRegion p
+ [@@progress (Parser.next, Parser.expect, Parser.checkProgress)]
+
+ and parseSignatureItemRegion p =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributes p in
+ match p.Parser.token with
+ | Let ->
+ Parser.beginRegion p;
+ let valueDesc = parseSignLetDesc ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.endRegion p;
+ Some (Ast_helper.Sig.value ~loc valueDesc)
+ | Typ ->
+ Parser.beginRegion p;
+ begin match parseTypeDefinitionOrExtension ~attrs p with
+ | TypeDef {recFlag; types} ->
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.endRegion p;
+ Some (Ast_helper.Sig.type_ ~loc recFlag types)
+ | TypeExt(ext) ->
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Parser.endRegion p;
+ Some (Ast_helper.Sig.type_extension ~loc ext)
+ end
+ | External ->
+ let externalDef = parseExternalDef ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.value ~loc externalDef)
+ | Exception ->
+ let exceptionDef = parseExceptionDef ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.exception_ ~loc exceptionDef)
+ | Open ->
+ let openDescription = parseOpenDescription ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.open_ ~loc openDescription)
+ | Include ->
+ Parser.next p;
+ let moduleType = parseModuleType p in
+ let includeDescription = Ast_helper.Incl.mk
+ ~loc:(mkLoc startPos p.prevEndPos)
+ ~attrs
+ moduleType
+ in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.include_ ~loc includeDescription)
+ | Module ->
+ Parser.next p;
+ begin match p.Parser.token with
+ | Uident _ ->
+ let modDecl = parseModuleDeclarationOrAlias ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.module_ ~loc modDecl)
+ | Rec ->
+ let recModule = parseRecModuleSpec ~attrs ~startPos p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.rec_module ~loc recModule)
+ | Typ ->
+ Some (parseModuleTypeDeclaration ~attrs ~startPos p)
+ | _t ->
+ let modDecl = parseModuleDeclarationOrAlias ~attrs p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.module_ ~loc modDecl)
+ end
+ | AtAt ->
+ let attr = parseStandaloneAttribute p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.attribute ~loc attr)
+ | PercentPercent ->
+ let extension = parseExtension ~moduleLanguage:true p in
+ Parser.optional p Semicolon |> ignore;
+ let loc = mkLoc startPos p.prevEndPos in
+ Some (Ast_helper.Sig.extension ~attrs ~loc extension)
+ | Import ->
+ Parser.next p;
+ parseSignatureItemRegion p
+ | _ ->
+ None
+
+ (* module rec module-name : module-type { and module-name: module-type } *)
+ and parseRecModuleSpec ~attrs ~startPos p =
+ Parser.expect Rec p;
+ let rec loop p spec =
+ let startPos = p.Parser.startPos in
+ let attrs = parseAttributesAndBinding p in
+ match p.Parser.token with
+ | And ->
+ (* TODO: give a good error message when with constraint, no parens
+ * and ASet: (Set.S with type elt = A.t)
+ * and BTree: (Btree.S with type elt = A.t)
+ * Without parens, the `and` signals the start of another
+ * `with-constraint`
+ *)
+ Parser.expect And p;
+ let decl = parseRecModuleDeclaration ~attrs ~startPos p in
+ loop p (decl::spec)
+ | _ ->
+ List.rev spec
+ in
+ let first = parseRecModuleDeclaration ~attrs ~startPos p in
+ loop p [first]
+
+ (* module-name : module-type *)
+ and parseRecModuleDeclaration ~attrs ~startPos p =
+ let name = match p.Parser.token with
+ | Uident modName ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc modName loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ Parser.expect Colon p;
+ let modType = parseModuleType p in
+ Ast_helper.Md.mk ~loc:(mkLoc startPos p.prevEndPos) ~attrs name modType
+
+ and parseModuleDeclarationOrAlias ~attrs p =
+ let startPos = p.Parser.startPos in
+ let moduleName = match p.Parser.token with
+ | Uident ident ->
+ let loc = mkLoc p.Parser.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc ident loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ let body = match p.Parser.token with
+ | Colon ->
+ Parser.next p;
+ parseModuleType p
+ | Equal ->
+ Parser.next p;
+ let lident = parseModuleLongIdent ~lowercase:false p in
+ Ast_helper.Mty.alias lident
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ Recover.defaultModuleType()
+ in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Md.mk ~loc ~attrs moduleName body
+
+ and parseModuleTypeDeclaration ~attrs ~startPos p =
+ Parser.expect Typ p;
+ let moduleName = match p.Parser.token with
+ | Uident ident ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc ident loc
+ | Lident ident ->
+ let loc = mkLoc p.startPos p.endPos in
+ Parser.next p;
+ Location.mkloc ident loc
+ | t ->
+ Parser.err p (Diagnostics.uident t);
+ Location.mknoloc "_"
+ in
+ let typ = match p.Parser.token with
+ | Equal ->
+ Parser.next p;
+ Some (parseModuleType p)
+ | _ -> None
+ in
+ let moduleDecl = Ast_helper.Mtd.mk ~attrs ?typ moduleName in
+ Ast_helper.Sig.modtype ~loc:(mkLoc startPos p.prevEndPos) moduleDecl
+
+ and parseSignLetDesc ~attrs p =
+ let startPos = p.Parser.startPos in
+ Parser.expect Let p;
+ let (name, loc) = parseLident p in
+ let name = Location.mkloc name loc in
+ Parser.expect Colon p;
+ let typExpr = parsePolyTypeExpr p in
+ let loc = mkLoc startPos p.prevEndPos in
+ Ast_helper.Val.mk ~loc ~attrs name typExpr
+
+(* attr-id ::= lowercase-ident
+ ∣ capitalized-ident
+ ∣ attr-id . attr-id *)
+ and parseAttributeId p =
+ let startPos = p.Parser.startPos in
+ let rec loop p acc =
+ match p.Parser.token with
+ | Lident ident | Uident ident ->
+ Parser.next p;
+ let id = acc ^ ident in
+ begin match p.Parser.token with
+ | Dot -> Parser.next p; loop p (id ^ ".")
+ | _ -> id
+ end
+ | token when Token.isKeyword token ->
+ Parser.next p;
+ let id = acc ^ (Token.toString token) in
+ begin match p.Parser.token with
+ | Dot -> Parser.next p; loop p (id ^ ".")
+ | _ -> id
+ end
+ | token ->
+ Parser.err p (Diagnostics.unexpected token p.breadcrumbs);
+ acc
+ in
+ let id = loop p "" in
+ let endPos = p.prevEndPos in
+ Location.mkloc id (mkLoc startPos endPos)
+
+ (*
+ * payload ::= empty
+ * | ( structure-item )
+ *
+ * TODO: what about multiple structure items?
+ * @attr({let x = 1; let x = 2})
+ *
+ * Also what about type-expressions and specifications?
+ * @attr(:myType) ???
+ *)
+ and parsePayload p =
+ match p.Parser.token with
+ | Lparen when p.startPos.pos_cnum = p.prevEndPos.pos_cnum ->
+ Parser.next p;
+ begin match p.token with
+ | Colon ->
+ Parser.next p;
+ let typ = parseTypExpr p in
+ Parser.expect Rparen p;
+ Parsetree.PTyp typ
+ | _ ->
+ let items = parseDelimitedRegion
+ ~grammar:Grammar.Structure
+ ~closing:Rparen
+ ~f:parseStructureItemRegion
+ p
+ in
+ Parser.expect Rparen p;
+ Parsetree.PStr items
+ end
+ | _ -> Parsetree.PStr []
+
+ (* type attribute = string loc * payload *)
+ and parseAttribute p =
+ match p.Parser.token with
+ | At ->
+ Parser.next p;
+ let attrId = parseAttributeId p in
+ let payload = parsePayload p in
+ Some(attrId, payload)
+ | _ -> None
+
+ and parseAttributes p =
+ parseRegion p
+ ~grammar:Grammar.Attribute
+ ~f:parseAttribute
+
+ (*
+ * standalone-attribute ::=
+ * | @@ atribute-id
+ * | @@ attribute-id ( structure-item )
+ *)
+ and parseStandaloneAttribute p =
+ Parser.expect AtAt p;
+ let attrId = parseAttributeId p in
+ let payload = parsePayload p in
+ (attrId, payload)
+
+ (* extension ::= % attr-id attr-payload
+ * | %% attr-id(
+ * expr ::= ...
+ * ∣ extension
+ *
+ * typexpr ::= ...
+ * ∣ extension
+ *
+ * pattern ::= ...
+ * ∣ extension
+ *
+ * module-expr ::= ...
+ * ∣ extension
+ *
+ * module-type ::= ...
+ * ∣ extension
+ *
+ * class-expr ::= ...
+ * ∣ extension
+ *
+ * class-type ::= ...
+ * ∣ extension
+ *
+ *
+ * item extension nodes usable in structures and signature
+ *
+ * item-extension ::= %% attr-id
+ * | %% attr-id(structure-item)
+ *
+ * attr-payload ::= structure-item
+ *
+ * ~moduleLanguage represents whether we're on the module level or not
+ *)
+ and parseExtension ?(moduleLanguage=false) p =
+ if moduleLanguage then
+ Parser.expect PercentPercent p
+ else
+ Parser.expect Percent p;
+ let attrId = parseAttributeId p in
+ let payload = parsePayload p in
+ (attrId, payload)
+end
+
+module OutcomePrinter: sig
+ open Format
+ open Outcometree
+
+ val out_value : (formatter -> out_value -> unit) ref [@@live]
+ val out_type : (formatter -> out_type -> unit) ref [@@live]
+ val out_class_type : (formatter -> out_class_type -> unit) ref [@@live]
+ val out_module_type : (formatter -> out_module_type -> unit) ref [@@live]
+ val out_sig_item : (formatter -> out_sig_item -> unit) ref [@@live]
+ val out_signature : (formatter -> out_sig_item list -> unit) ref [@@live]
+ val out_type_extension : (formatter -> out_type_extension -> unit) ref [@@live]
+ val out_phrase : (formatter -> out_phrase -> unit) ref [@@live]
+
+ val parenthesized_ident : string -> bool [@@live]
+end = struct
+ (* Napkin doesn't have parenthesized identifiers.
+ * We don't support custom operators. *)
+ let parenthesized_ident _name = true
+
+ (* TODO: better allocation strategy for the buffer *)
+ let escapeStringContents s =
+ let len = String.length s in
+ let b = Buffer.create len in
+ for i = 0 to len - 1 do
+ let c = (String.get [@doesNotRaise]) s i in
+ if c = '\008' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b 'b';
+ ) else if c = '\009' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b 't';
+ ) else if c = '\010' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b 'n';
+ ) else if c = '\013' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b 'r';
+ ) else if c = '\034' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b '"';
+ ) else if c = '\092' then (
+ Buffer.add_char b '\\';
+ Buffer.add_char b '\\';
+ ) else (
+ Buffer.add_char b c;
+ );
+ done;
+ Buffer.contents b
+
+ (* let rec print_ident fmt ident = match ident with
+ | Outcometree.Oide_ident s -> Format.pp_print_string fmt s
+ | Oide_dot (id, s) ->
+ print_ident fmt id;
+ Format.pp_print_char fmt '.';
+ Format.pp_print_string fmt s
+ | Oide_apply (id1, id2) ->
+ print_ident fmt id1;
+ Format.pp_print_char fmt '(';
+ print_ident fmt id2;
+ Format.pp_print_char fmt ')' *)
+
+ let rec printOutIdentDoc (ident : Outcometree.out_ident) =
+ match ident with
+ | Oide_ident s -> Doc.text s
+ | Oide_dot (ident, s) -> Doc.concat [
+ printOutIdentDoc ident;
+ Doc.dot;
+ Doc.text s;
+ ]
+ | Oide_apply (call, arg) ->Doc.concat [
+ printOutIdentDoc call;
+ Doc.lparen;
+ printOutIdentDoc arg;
+ Doc.rparen;
+ ]
+
+ let printOutAttributeDoc (outAttribute: Outcometree.out_attribute) =
+ Doc.concat [
+ Doc.text "@";
+ Doc.text outAttribute.oattr_name;
+ ]
+
+ let printOutAttributesDoc (attrs: Outcometree.out_attribute list) =
+ match attrs with
+ | [] -> Doc.nil
+ | attrs ->
+ Doc.concat [
+ Doc.group (
+ Doc.join ~sep:Doc.line (List.map printOutAttributeDoc attrs)
+ );
+ Doc.line;
+ ]
+
+ let rec collectArrowArgs (outType: Outcometree.out_type) args =
+ match outType with
+ | Otyp_arrow (label, argType, returnType) ->
+ let arg = (label, argType) in
+ collectArrowArgs returnType (arg::args)
+ | _ as returnType ->
+ (List.rev args, returnType)
+
+ let rec collectFunctorArgs (outModuleType: Outcometree.out_module_type) args =
+ match outModuleType with
+ | Omty_functor (lbl, optModType, returnModType) ->
+ let arg = (lbl, optModType) in
+ collectFunctorArgs returnModType (arg::args)
+ | _ ->
+ (List.rev args, outModuleType)
+
+ let rec printOutTypeDoc (outType: Outcometree.out_type) =
+ match outType with
+ | Otyp_abstract | Otyp_variant _ (* don't support poly-variants atm *) | Otyp_open -> Doc.nil
+ | Otyp_alias (typ, aliasTxt) ->
+ Doc.concat [
+ printOutTypeDoc typ;
+ Doc.text " as '";
+ Doc.text aliasTxt
+ ]
+ | Otyp_constr (outIdent, []) ->
+ printOutIdentDoc outIdent
+ | Otyp_manifest (typ1, typ2) ->
+ Doc.concat [
+ printOutTypeDoc typ1;
+ Doc.text " = ";
+ printOutTypeDoc typ2;
+ ]
+ | Otyp_record record ->
+ printRecordDeclarationDoc ~inline:true record
+ | Otyp_stuff txt -> Doc.text txt
+ | Otyp_var (ng, s) -> Doc.concat [
+ Doc.text ("'" ^ (if ng then "_" else ""));
+ Doc.text s
+ ]
+ | Otyp_object (fields, rest) -> printObjectFields fields rest
+ | Otyp_class _ -> Doc.nil
+ | Otyp_attribute (typ, attribute) ->
+ Doc.group (
+ Doc.concat [
+ printOutAttributeDoc attribute;
+ Doc.line;
+ printOutTypeDoc typ;
+ ]
+ )
+ (* example: Red | Blue | Green | CustomColour(float, float, float) *)
+ | Otyp_sum constructors ->
+ printOutConstructorsDoc constructors
+
+ (* example: {"name": string, "age": int} *)
+ | Otyp_constr (
+ (Oide_dot ((Oide_ident "Js"), "t")),
+ [Otyp_object (fields, rest)]
+ ) -> printObjectFields fields rest
+
+ (* example: node *)
+ | Otyp_constr (outIdent, args) ->
+ let argsDoc = match args with
+ | [] -> Doc.nil
+ | args ->
+ Doc.concat [
+ Doc.lessThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutTypeDoc args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.greaterThan;
+ ]
+ in
+ Doc.group (
+ Doc.concat [
+ printOutIdentDoc outIdent;
+ argsDoc;
+ ]
+ )
+ | Otyp_tuple tupleArgs ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutTypeDoc tupleArgs
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ | Otyp_poly (vars, outType) ->
+ Doc.group (
+ Doc.concat [
+ Doc.join ~sep:Doc.space (
+ List.map (fun var -> Doc.text ("'" ^ var)) vars
+ );
+ printOutTypeDoc outType;
+ ]
+ )
+ | Otyp_arrow _ as typ ->
+ let (typArgs, typ) = collectArrowArgs typ [] in
+ let args = Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun (lbl, typ) ->
+ if lbl = "" then
+ printOutTypeDoc typ
+ else
+ Doc.group (
+ Doc.concat [
+ Doc.text ("~" ^ lbl ^ ": ");
+ printOutTypeDoc typ
+ ]
+ )
+ ) typArgs
+ ) in
+ let argsDoc =
+ let needsParens = match typArgs with
+ | [_, (Otyp_tuple _ | Otyp_arrow _)] -> true
+ (* single argument should not be wrapped *)
+ | ["", _] -> false
+ | _ -> true
+ in
+ if needsParens then
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ args;
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ else args
+ in
+ Doc.concat [
+ argsDoc;
+ Doc.text " => ";
+ printOutTypeDoc typ;
+ ]
+ | Otyp_module (_modName, _stringList, _outTypes) ->
+ Doc.nil
+
+ and printObjectFields fields rest =
+ let dots = match rest with
+ | Some non_gen -> Doc.text ((if non_gen then "_" else "") ^ "..")
+ | None -> Doc.nil
+ in
+ Doc.group (
+ Doc.concat [
+ Doc.lbrace;
+ dots;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun (lbl, outType) -> Doc.group (
+ Doc.concat [
+ Doc.text ("\"" ^ lbl ^ "\": ");
+ printOutTypeDoc outType;
+ ]
+ )) fields
+ )
+ ]
+ );
+ Doc.softLine;
+ Doc.trailingComma;
+ Doc.rbrace;
+ ]
+ )
+
+
+ and printOutConstructorsDoc constructors =
+ Doc.group (
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ Doc.join ~sep:Doc.line (
+ List.mapi (fun i constructor ->
+ Doc.concat [
+ if i > 0 then Doc.text "| " else Doc.ifBreaks (Doc.text "| ") Doc.nil;
+ printOutConstructorDoc constructor;
+ ]
+ ) constructors
+ )
+ ]
+ )
+ )
+
+ and printOutConstructorDoc (name, args, gadt) =
+ let gadtDoc = match gadt with
+ | Some outType ->
+ Doc.concat [
+ Doc.text ": ";
+ printOutTypeDoc outType
+ ]
+ | None -> Doc.nil
+ in
+ let argsDoc = match args with
+ | [] -> Doc.nil
+ | [Otyp_record record] ->
+ (* inline records
+ * | Root({
+ * mutable value: 'value,
+ * mutable updatedTime: float,
+ * })
+ *)
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ printRecordDeclarationDoc ~inline:true record;
+ );
+ Doc.rparen;
+ ]
+ | _types ->
+ Doc.indent (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutTypeDoc args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ in
+ Doc.group (
+ Doc.concat [
+ Doc.text name;
+ argsDoc;
+ gadtDoc
+ ]
+ )
+
+ and printRecordDeclRowDoc (name, mut, arg) =
+ Doc.group (
+ Doc.concat [
+ if mut then Doc.text "mutable " else Doc.nil;
+ Doc.text name;
+ Doc.text ": ";
+ printOutTypeDoc arg;
+ ]
+ )
+
+ and printRecordDeclarationDoc ~inline rows =
+ let content = Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printRecordDeclRowDoc rows
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbrace;
+ ] in
+ if not inline then
+ Doc.group content
+ else content
+
+ let printOutType fmt outType =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutTypeDoc outType))
+
+ let printTypeParameterDoc (typ, (co, cn)) =
+ Doc.concat [
+ if not cn then Doc.text "+" else if not co then Doc.text "-" else Doc.nil;
+ if typ = "_" then Doc.text "_" else Doc.text ("'" ^ typ)
+ ]
+
+
+ let rec printOutSigItemDoc (outSigItem : Outcometree.out_sig_item) =
+ match outSigItem with
+ | Osig_class _ | Osig_class_type _ -> Doc.nil
+ | Osig_ellipsis -> Doc.dotdotdot
+ | Osig_value valueDecl ->
+ Doc.group (
+ Doc.concat [
+ printOutAttributesDoc valueDecl.oval_attributes;
+ Doc.text (
+ match valueDecl.oval_prims with | [] -> "let " | _ -> "external "
+ );
+ Doc.text valueDecl.oval_name;
+ Doc.text ":";
+ Doc.space;
+ printOutTypeDoc valueDecl.oval_type;
+ match valueDecl.oval_prims with
+ | [] -> Doc.nil
+ | primitives -> Doc.indent (
+ Doc.concat [
+ Doc.text " =";
+ Doc.line;
+ Doc.group (
+ Doc.join ~sep:Doc.line (List.map (fun prim -> Doc.text ("\"" ^ prim ^ "\"")) primitives)
+ )
+ ]
+ )
+ ]
+ )
+ | Osig_typext (outExtensionConstructor, _outExtStatus) ->
+ printOutExtensionConstructorDoc outExtensionConstructor
+ | Osig_modtype (modName, Omty_signature []) ->
+ Doc.concat [
+ Doc.text "module type ";
+ Doc.text modName;
+ ]
+ | Osig_modtype (modName, outModuleType) ->
+ Doc.group (
+ Doc.concat [
+ Doc.text "module type ";
+ Doc.text modName;
+ Doc.text " = ";
+ printOutModuleTypeDoc outModuleType;
+ ]
+ )
+ | Osig_module (modName, Omty_alias ident, _) ->
+ Doc.group (
+ Doc.concat [
+ Doc.text "module ";
+ Doc.text modName;
+ Doc.text " =";
+ Doc.line;
+ printOutIdentDoc ident;
+ ]
+ )
+ | Osig_module (modName, outModType, outRecStatus) ->
+ Doc.group (
+ Doc.concat [
+ Doc.text (
+ match outRecStatus with
+ | Orec_not -> "module "
+ | Orec_first -> "module rec "
+ | Orec_next -> "and"
+ );
+ Doc.text modName;
+ Doc.text " = ";
+ printOutModuleTypeDoc outModType;
+ ]
+ )
+ | Osig_type (outTypeDecl, outRecStatus) ->
+ (* TODO: manifest ? *)
+ let attrs = match outTypeDecl.otype_immediate, outTypeDecl.otype_unboxed with
+ | false, false -> Doc.nil
+ | true, false ->
+ Doc.concat [Doc.text "@immediate"; Doc.line]
+ | false, true ->
+ Doc.concat [Doc.text "@unboxed"; Doc.line]
+ | true, true ->
+ Doc.concat [Doc.text "@immediate @unboxed"; Doc.line]
+ in
+ let kw = Doc.text (
+ match outRecStatus with
+ | Orec_not -> "type "
+ | Orec_first -> "type rec "
+ | Orec_next -> "and "
+ ) in
+ let typeParams = match outTypeDecl.otype_params with
+ | [] -> Doc.nil
+ | _params -> Doc.group (
+ Doc.concat [
+ Doc.lessThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printTypeParameterDoc outTypeDecl.otype_params
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.greaterThan;
+ ]
+ )
+ in
+ let privateDoc = match outTypeDecl.otype_private with
+ | Asttypes.Private -> Doc.text "private "
+ | Public -> Doc.nil
+ in
+ let kind = match outTypeDecl.otype_type with
+ | Otyp_open -> Doc.concat [
+ Doc.text " = ";
+ privateDoc;
+ Doc.text "..";
+ ]
+ | Otyp_abstract -> Doc.nil
+ | Otyp_record record -> Doc.concat [
+ Doc.text " = ";
+ privateDoc;
+ printRecordDeclarationDoc ~inline:false record;
+ ]
+ | typ -> Doc.concat [
+ Doc.text " = ";
+ printOutTypeDoc typ
+ ]
+ in
+ let constraints = match outTypeDecl.otype_cstrs with
+ | [] -> Doc.nil
+ | _ -> Doc.group (
+ Doc.concat [
+ Doc.line;
+ Doc.indent (
+ Doc.concat [
+ Doc.hardLine;
+ Doc.join ~sep:Doc.line (List.map (fun (typ1, typ2) ->
+ Doc.group (
+ Doc.concat [
+ Doc.text "constraint ";
+ printOutTypeDoc typ1;
+ Doc.text " =";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printOutTypeDoc typ2;
+ ]
+ )
+ ]
+ )
+ ) outTypeDecl.otype_cstrs)
+ ]
+ )
+ ]
+ ) in
+ Doc.group (
+ Doc.concat [
+ attrs;
+ Doc.group (
+ Doc.concat [
+ attrs;
+ kw;
+ Doc.text outTypeDecl.otype_name;
+ typeParams;
+ kind
+ ]
+ );
+ constraints
+ ]
+ )
+
+ and printOutModuleTypeDoc (outModType : Outcometree.out_module_type) =
+ match outModType with
+ | Omty_abstract -> Doc.nil
+ | Omty_ident ident -> printOutIdentDoc ident
+ (* example: module Increment = (M: X_int) => X_int *)
+ | Omty_functor _ ->
+ let (args, returnModType) = collectFunctorArgs outModType [] in
+ let argsDoc = match args with
+ | [_, None] -> Doc.text "()"
+ | args ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun (lbl, optModType) -> Doc.group (
+ Doc.concat [
+ Doc.text lbl;
+ match optModType with
+ | None -> Doc.nil
+ | Some modType -> Doc.concat [
+ Doc.text ": ";
+ printOutModuleTypeDoc modType;
+ ]
+ ]
+ )) args
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ in
+ Doc.group (
+ Doc.concat [
+ argsDoc;
+ Doc.text " => ";
+ printOutModuleTypeDoc returnModType
+ ]
+ )
+ | Omty_signature [] -> Doc.nil
+ | Omty_signature signature ->
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.concat [
+ Doc.lbrace;
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printOutSignatureDoc signature;
+ ]
+ );
+ Doc.softLine;
+ Doc.rbrace;
+ ]
+ )
+ | Omty_alias _ident -> Doc.nil
+
+ and printOutSignatureDoc (signature : Outcometree.out_sig_item list) =
+ let rec loop signature acc =
+ match signature with
+ | [] -> List.rev acc
+ | Outcometree.Osig_typext(ext, Oext_first) :: items ->
+ (* Gather together the extension constructors *)
+ let rec gather_extensions acc items =
+ match items with
+ Outcometree.Osig_typext(ext, Oext_next) :: items ->
+ gather_extensions
+ ((ext.oext_name, ext.oext_args, ext.oext_ret_type) :: acc)
+ items
+ | _ -> (List.rev acc, items)
+ in
+ let exts, items =
+ gather_extensions
+ [(ext.oext_name, ext.oext_args, ext.oext_ret_type)]
+ items
+ in
+ let te =
+ { Outcometree.otyext_name = ext.oext_type_name;
+ otyext_params = ext.oext_type_params;
+ otyext_constructors = exts;
+ otyext_private = ext.oext_private }
+ in
+ let doc = printOutTypeExtensionDoc te in
+ loop items (doc::acc)
+ | item::items ->
+ let doc = printOutSigItemDoc item in
+ loop items (doc::acc)
+ in
+ match loop signature [] with
+ | [doc] -> doc
+ | docs ->
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.join ~sep:Doc.line docs
+ )
+
+ and printOutExtensionConstructorDoc (outExt : Outcometree.out_extension_constructor) =
+ let typeParams = match outExt.oext_type_params with
+ | [] -> Doc.nil
+ | params ->
+ Doc.group(
+ Doc.concat [
+ Doc.lessThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (List.map
+ (fun ty -> Doc.text (if ty = "_" then ty else "'" ^ ty))
+ params
+
+ )
+ ]
+ );
+ Doc.softLine;
+ Doc.greaterThan;
+ ]
+ )
+
+ in
+ Doc.group (
+ Doc.concat [
+ Doc.text "type ";
+ Doc.text outExt.oext_type_name;
+ typeParams;
+ Doc.text " +=";
+ Doc.line;
+ if outExt.oext_private = Asttypes.Private then
+ Doc.text "private "
+ else
+ Doc.nil;
+ printOutConstructorDoc
+ (outExt.oext_name, outExt.oext_args, outExt.oext_ret_type)
+ ]
+ )
+
+ and printOutTypeExtensionDoc (typeExtension : Outcometree.out_type_extension) =
+ let typeParams = match typeExtension.otyext_params with
+ | [] -> Doc.nil
+ | params ->
+ Doc.group(
+ Doc.concat [
+ Doc.lessThan;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (List.map
+ (fun ty -> Doc.text (if ty = "_" then ty else "'" ^ ty))
+ params
+
+ )
+ ]
+ );
+ Doc.softLine;
+ Doc.greaterThan;
+ ]
+ )
+
+ in
+ Doc.group (
+ Doc.concat [
+ Doc.text "type ";
+ Doc.text typeExtension.otyext_name;
+ typeParams;
+ Doc.text " +=";
+ if typeExtension.otyext_private = Asttypes.Private then
+ Doc.text "private "
+ else
+ Doc.nil;
+ printOutConstructorsDoc typeExtension.otyext_constructors;
+ ]
+ )
+
+ let printOutSigItem fmt outSigItem =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutSigItemDoc outSigItem))
+
+ let printOutSignature fmt signature =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutSignatureDoc signature))
+
+ let validFloatLexeme s =
+ let l = String.length s in
+ let rec loop i =
+ if i >= l then s ^ "." else
+ match (s.[i] [@doesNotRaise]) with
+ | '0' .. '9' | '-' -> loop (i+1)
+ | _ -> s
+ in loop 0
+
+ let floatRepres f =
+ match classify_float f with
+ | FP_nan -> "nan"
+ | FP_infinite ->
+ if f < 0.0 then "neg_infinity" else "infinity"
+ | _ ->
+ let float_val =
+ let s1 = Printf.sprintf "%.12g" f in
+ if f = (float_of_string [@doesNotRaise]) s1 then s1 else
+ let s2 = Printf.sprintf "%.15g" f in
+ if f = (float_of_string [@doesNotRaise]) s2 then s2 else
+ Printf.sprintf "%.18g" f
+ in validFloatLexeme float_val
+
+ let rec printOutValueDoc (outValue : Outcometree.out_value) =
+ match outValue with
+ | Oval_array outValues ->
+ Doc.group (
+ Doc.concat [
+ Doc.lbracket;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutValueDoc outValues
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbracket;
+ ]
+ )
+ | Oval_char c -> Doc.text ("'" ^ (Char.escaped c) ^ "'")
+ | Oval_constr (outIdent, outValues) ->
+ Doc.group (
+ Doc.concat [
+ printOutIdentDoc outIdent;
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutValueDoc outValues
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ | Oval_ellipsis -> Doc.text "..."
+ | Oval_int i -> Doc.text (Format.sprintf "%i" i)
+ | Oval_int32 i -> Doc.text (Format.sprintf "%lil" i)
+ | Oval_int64 i -> Doc.text (Format.sprintf "%LiL" i)
+ | Oval_nativeint i -> Doc.text (Format.sprintf "%nin" i)
+ | Oval_float f -> Doc.text (floatRepres f)
+ | Oval_list outValues ->
+ Doc.group (
+ Doc.concat [
+ Doc.text "list[";
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutValueDoc outValues
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rbracket;
+ ]
+ )
+ | Oval_printer fn ->
+ let fmt = Format.str_formatter in
+ fn fmt;
+ let str = Format.flush_str_formatter () in
+ Doc.text str
+ | Oval_record rows ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map (fun (outIdent, outValue) -> Doc.group (
+ Doc.concat [
+ printOutIdentDoc outIdent;
+ Doc.text ": ";
+ printOutValueDoc outValue;
+ ]
+ )
+ ) rows
+ );
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ | Oval_string (txt, _sizeToPrint, _kind) ->
+ Doc.text (escapeStringContents txt)
+ | Oval_stuff txt -> Doc.text txt
+ | Oval_tuple outValues ->
+ Doc.group (
+ Doc.concat [
+ Doc.lparen;
+ Doc.indent (
+ Doc.concat [
+ Doc.softLine;
+ Doc.join ~sep:(Doc.concat [Doc.comma; Doc.line]) (
+ List.map printOutValueDoc outValues
+ )
+ ]
+ );
+ Doc.trailingComma;
+ Doc.softLine;
+ Doc.rparen;
+ ]
+ )
+ (* Not supported by NapkinScript *)
+ | Oval_variant _ -> Doc.nil
+
+ let printOutExceptionDoc exc outValue =
+ match exc with
+ | Sys.Break -> Doc.text "Interrupted."
+ | Out_of_memory -> Doc.text "Out of memory during evaluation."
+ | Stack_overflow ->
+ Doc.text "Stack overflow during evaluation (looping recursion?)."
+ | _ ->
+ Doc.group (
+ Doc.indent(
+ Doc.concat [
+ Doc.text "Exception:";
+ Doc.line;
+ printOutValueDoc outValue;
+ ]
+ )
+ )
+
+ let printOutPhraseSignature signature =
+ let rec loop signature acc =
+ match signature with
+ | [] -> List.rev acc
+ | (Outcometree.Osig_typext(ext, Oext_first), None)::signature ->
+ (* Gather together extension constructors *)
+ let rec gather_extensions acc items =
+ match items with
+ | (Outcometree.Osig_typext(ext, Oext_next), None)::items ->
+ gather_extensions
+ ((ext.oext_name, ext.oext_args, ext.oext_ret_type)::acc)
+ items
+ | _ -> (List.rev acc, items)
+ in
+ let exts, signature =
+ gather_extensions
+ [(ext.oext_name, ext.oext_args, ext.oext_ret_type)]
+ signature
+ in
+ let te =
+ { Outcometree.otyext_name = ext.oext_type_name;
+ otyext_params = ext.oext_type_params;
+ otyext_constructors = exts;
+ otyext_private = ext.oext_private }
+ in
+ let doc = printOutTypeExtensionDoc te in
+ loop signature (doc::acc)
+ | (sigItem, optOutValue)::signature ->
+ let doc = match optOutValue with
+ | None ->
+ printOutSigItemDoc sigItem
+ | Some outValue ->
+ Doc.group (
+ Doc.concat [
+ printOutSigItemDoc sigItem;
+ Doc.text " = ";
+ printOutValueDoc outValue;
+ ]
+ )
+ in
+ loop signature (doc::acc)
+ in
+ Doc.breakableGroup ~forceBreak:true (
+ Doc.join ~sep:Doc.line (loop signature [])
+ )
+
+ let printOutPhraseDoc (outPhrase : Outcometree.out_phrase) =
+ match outPhrase with
+ | Ophr_eval (outValue, outType) ->
+ Doc.group (
+ Doc.concat [
+ Doc.text "- : ";
+ printOutTypeDoc outType;
+ Doc.text " =";
+ Doc.indent (
+ Doc.concat [
+ Doc.line;
+ printOutValueDoc outValue;
+ ]
+ )
+ ]
+ )
+ | Ophr_signature [] -> Doc.nil
+ | Ophr_signature signature -> printOutPhraseSignature signature
+ | Ophr_exception (exc, outValue) ->
+ printOutExceptionDoc exc outValue
+
+ let printOutPhase fmt outPhrase =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutPhraseDoc outPhrase))
+
+ let printOutModuleType fmt outModuleType =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutModuleTypeDoc outModuleType))
+
+ let printOutTypeExtension fmt typeExtension =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutTypeExtensionDoc typeExtension))
+
+ let printOutValue fmt outValue =
+ Format.pp_print_string fmt
+ (Doc.toString ~width:80 (printOutValueDoc outValue))
+
+ (* Not supported in Napkin *)
+ let printOutClassType _fmt _ = ()
+
+ let out_value = ref printOutValue
+ let out_type = ref printOutType
+ let out_module_type = ref printOutModuleType
+ let out_sig_item = ref printOutSigItem
+ let out_signature = ref printOutSignature
+ let out_type_extension = ref printOutTypeExtension
+ let out_phrase = ref printOutPhase [@live]
+ let out_class_type = ref printOutClassType
+end
+
+module Repl = struct
+ let parseToplevelPhrase filename =
+ let src = IO.readFile filename in
+ let p = Parser.make src filename in
+ Parsetree.Ptop_def (NapkinScript.parseImplementation p)
+
+ let typeAndPrintOutcome filename =
+ Compmisc.init_path false;
+ let env = Compmisc.initial_env () in
+ try
+ let sstr = match parseToplevelPhrase filename with
+ | Parsetree.Ptop_def sstr -> sstr
+ | _ -> assert false
+ in
+ let (_str, signature, _newenv) = Typemod.type_toplevel_phrase env sstr in
+ let outSigItems = Printtyp.tree_of_signature signature in
+ let fmt = Format.str_formatter in
+ !OutcomePrinter.out_signature fmt outSigItems;
+ let result = Format.flush_str_formatter () in
+ print_string result
+ with
+ | Typetexp.Error (_, _, err) ->
+ let fmt = Format.str_formatter in
+ Typetexp.report_error env fmt err;
+ let result = Format.flush_str_formatter () in
+ let () = print_endline result in
+ ()
+ | _ -> print_endline "catch all"
+end
+
+(* command line flags *)
+module Clflags: sig
+ val recover: bool ref
+ val print: string ref
+ val width: int ref
+ val origin: string ref
+ val files: string list ref
+ val interface: bool ref
+ val report: string ref
+
+ val parse: unit -> unit
+ val outcome: bool ref
+end = struct
+ let recover = ref false
+ let width = ref 100
+
+ let files = ref []
+ let addFilename filename = files := filename::(!files)
+
+ let print = ref ""
+ let outcome = ref false
+ let origin = ref ""
+ let interface = ref false
+ let report = ref "pretty"
+
+ let usage = "Usage: napkinscript \nOptions are:"
+
+ let spec = [
+ ("-recover", Arg.Unit (fun () -> recover := true), "Emit partial ast");
+ ("-print", Arg.String (fun txt -> print := txt), "Print either binary, ocaml or ast");
+ ("-parse", Arg.String (fun txt -> origin := txt), "Parse ocaml or napkinscript");
+ ("-outcome", Arg.Bool (fun printOutcomeTree -> outcome := printOutcomeTree), "print outcometree");
+ ("-width", Arg.Int (fun w -> width := w), "Specify the line length that the printer will wrap on" );
+ ("-interface", Arg.Unit (fun () -> interface := true), "Parse as interface");
+ ("-report", Arg.String (fun txt -> report := txt), "Stylize errors and messages using color and context. Accepts `Pretty` and `Plain`. Default `Plain`")
+ ]
+
+ let parse () = Arg.parse spec addFilename usage
+end
+
+module Driver: sig
+ val processFile:
+ isInterface: bool
+ -> width: int
+ -> recover: bool
+ -> origin:string
+ -> target:string
+ -> report:string
+ -> string
+ -> unit
+end = struct
+ type 'a file_kind =
+ | Structure: Parsetree.structure file_kind
+ | Signature: Parsetree.signature file_kind
+
+ let parseNapkin (type a) (kind : a file_kind) p : a =
+ match kind with
+ | Structure -> NapkinScript.parseImplementation p
+ | Signature -> NapkinScript.parseSpecification p
+
+ let extractOcamlStringData filename =
+ let lexbuf = if String.length filename > 0 then
+ IO.readFile filename |> Lexing.from_string
+ else
+ Lexing.from_channel stdin
+ in
+ let stringLocs = ref [] in
+ let rec next () =
+ let token = Lexer.token_with_comments lexbuf in
+ match token with
+ | OcamlParser.STRING (_txt, None) ->
+ let open Location in
+ let loc = {
+ loc_start = lexbuf.lex_start_p;
+ loc_end = lexbuf.Lexing.lex_curr_p;
+ loc_ghost = false;
+ } in
+ let len = loc.loc_end.pos_cnum - loc.loc_start.pos_cnum in
+ let txt = Bytes.to_string (
+ (Bytes.sub [@doesNotRaise]) lexbuf.Lexing.lex_buffer loc.loc_start.pos_cnum len
+ ) in
+ stringLocs := (txt, loc)::(!stringLocs);
+ next();
+ | OcamlParser.EOF -> ()
+ | _ -> next()
+ in
+ next();
+ List.rev !stringLocs
+
+ let parseOcaml (type a) (kind : a file_kind) filename : a =
+ let lexbuf = if String.length filename > 0 then
+ IO.readFile filename |> Lexing.from_string
+ else
+ Lexing.from_channel stdin
+ in
+ let stringData = extractOcamlStringData filename in
+ match kind with
+ | Structure ->
+ Parse.implementation lexbuf
+ |> ParsetreeCompatibility.replaceStringLiteralStructure stringData
+ |> ParsetreeCompatibility.structure
+ | Signature ->
+ Parse.interface lexbuf
+ |> ParsetreeCompatibility.replaceStringLiteralSignature stringData
+ |> ParsetreeCompatibility.signature
+
+ let parseNapkinFile ~destination kind filename =
+ let src = if String.length filename > 0 then
+ IO.readFile filename
+ else
+ IO.readStdin ()
+ in
+ let p =
+ let mode = match destination with
+ | "napkinscript" | "ns" | "sexp" -> Parser.Default
+ | _ -> Parser.ParseForTypeChecker
+ in
+ Parser.make ~mode src filename in
+ let ast = parseNapkin kind p in
+ let report = match p.diagnostics with
+ | [] -> None
+ | diagnostics -> Some(diagnostics)
+ in
+ (ast, report, p)
+
+ let parseOcamlFile kind filename =
+ let ast = parseOcaml kind filename in
+ let lexbuf2 = if String.length filename > 0 then
+ IO.readFile filename |> Lexing.from_string
+ else
+ Lexing.from_channel stdin
+ in
+ let comments =
+ let rec next (prevTokEndPos : Lexing.position) comments lb =
+ let token = Lexer.token_with_comments lb in
+ match token with
+ | OcamlParser.EOF -> comments
+ | OcamlParser.COMMENT (txt, loc) ->
+ let comment = Comment.fromOcamlComment
+ ~loc
+ ~prevTokEndPos
+ ~txt
+ in
+ next loc.Location.loc_end (comment::comments) lb
+ | _ ->
+ next lb.Lexing.lex_curr_p comments lb
+ in
+ let cmts = next lexbuf2.Lexing.lex_start_p [] lexbuf2 in
+ cmts
+ in
+ let p = Parser.make "" filename in
+ p.comments <- comments;
+ (ast, None, p)
+
+ let reasonFilename = ref ""
+ let commentData = ref []
+ let stringData = ref []
+
+ let parseReasonBinaryFromStdin (type a) (kind : a file_kind) filename :a =
+ let chan, close =
+ match String.length filename == 0 with
+ | true -> stdin, (fun _ -> ())
+ | false ->
+ let file_chan = open_in_bin filename in
+ seek_in file_chan 0;
+ file_chan, close_in_noerr
+ in
+ let ic = chan in
+ let magic = match kind with
+ | Structure -> Config.ast_impl_magic_number
+ | Signature -> Config.ast_intf_magic_number
+ in
+ let buffer = (really_input_string [@doesNotRaise]) ic (String.length magic) in
+ assert(buffer = magic);
+ let filename = input_value ic in
+ reasonFilename := filename;
+ let ast = input_value ic in
+ close chan;
+
+ let src =
+ if String.length filename > 0 then IO.readFile filename
+ else IO.readStdin ()
+ in
+
+ let scanner = Scanner.make (Bytes.of_string src) filename in
+
+ let rec next prevEndPos scanner =
+ let (startPos, endPos, token) = Scanner.scan scanner in
+ match token with
+ | Eof -> ()
+ | Comment c ->
+ Comment.setPrevTokEndPos c prevEndPos;
+ commentData := c::(!commentData);
+ next endPos scanner
+ | String _ ->
+ let loc = {Location.loc_start = startPos; loc_end = endPos; loc_ghost = false} in
+ let len = endPos.pos_cnum - startPos.pos_cnum in
+ let txt = (String.sub [@doesNotRaise]) src startPos.pos_cnum len in
+ stringData := (txt, loc)::(!stringData);
+ next endPos scanner
+ | _ ->
+ next endPos scanner
+ in
+
+ next Lexing.dummy_pos scanner;
+
+ match kind with
+ | Structure ->
+ ast
+ |> ParsetreeCompatibility.replaceStringLiteralStructure !stringData
+ |> ParsetreeCompatibility.normalizeReasonArityStructure ~forPrinter:true
+ |> ParsetreeCompatibility.structure
+ | Signature ->
+ ast
+ |> ParsetreeCompatibility.replaceStringLiteralSignature !stringData
+ |> ParsetreeCompatibility.normalizeReasonAritySignature ~forPrinter:true
+ |> ParsetreeCompatibility.signature
+
+ let isReasonDocComment (comment: Comment.t) =
+ let content = Comment.txt comment in
+ let len = String.length content in
+ if len = 0 then true
+ else if len >= 2 && (String.unsafe_get content 0 = '*' && String.unsafe_get content 1 = '*') then false
+ else if len >= 1 && (String.unsafe_get content 0 = '*') then true
+ else false
+
+
+ let parseReasonBinary kind filename =
+ let ast = parseReasonBinaryFromStdin kind filename in
+ let p = Parser.make "" !reasonFilename in
+ p.comments <- List.filter (fun c -> not (isReasonDocComment c)) !commentData;
+ (ast, None, p)
+
+ let parseImplementation ~origin ~destination filename =
+ match origin with
+ | "ml" | "ocaml" ->
+ parseOcamlFile Structure filename
+ | "reasonBinary" ->
+ parseReasonBinary Structure filename
+ | _ ->
+ parseNapkinFile ~destination Structure filename
+
+ let parseInterface ~destination ~origin filename =
+ match origin with
+ | "ml" | "ocaml" ->
+ parseOcamlFile Signature filename
+ | "reasonBinary" ->
+ parseReasonBinary Signature filename
+ | _ ->
+ parseNapkinFile ~destination Signature filename
+
+ let process ~reportStyle parseFn printFn recover filename =
+ let (ast, report, parserState) = parseFn filename in
+ match report with
+ | Some report when recover = true ->
+ printFn ast parserState;
+ prerr_string (
+ Diagnostics.stringOfReport
+ ~style:(Diagnostics.parseReportStyle reportStyle)
+ report (Bytes.to_string parserState.Parser.scanner.src)
+ );
+ | Some report ->
+ prerr_string (
+ Diagnostics.stringOfReport
+ ~style:(Diagnostics.parseReportStyle reportStyle)
+ report (Bytes.to_string parserState.Parser.scanner.src)
+ );
+ exit 1
+ | None ->
+ printFn ast parserState
+
+ type action =
+ | ProcessImplementation
+ | ProcessInterface
+
+ let printImplementation ~target ~width filename ast _parserState =
+ match target with
+ | "ml" | "ocaml" ->
+ Pprintast.structure Format.std_formatter ast
+ | "ns" | "napkinscript" ->
+ Printer.printImplementation ~width ast (List.rev _parserState.Parser.comments)
+ | "ast" ->
+ Printast.implementation Format.std_formatter ast
+ | "sexp" ->
+ ast |> SexpAst.implementation |> Sexp.toString |> print_string
+ | _ -> (* default binary *)
+ output_string stdout Config.ast_impl_magic_number;
+ output_value stdout filename;
+ output_value stdout ast
+
+ let printInterface ~target ~width filename ast _parserState =
+ match target with
+ | "ml" | "ocaml" -> Pprintast.signature Format.std_formatter ast
+ | "ns" | "napkinscript" ->
+ Printer.printInterface ~width ast (List.rev _parserState.Parser.comments)
+ | "ast" -> Printast.interface Format.std_formatter ast
+ | "sexp" ->
+ ast |> SexpAst.interface |> Sexp.toString |> print_string
+ | _ -> (* default binary *)
+ output_string stdout Config.ast_intf_magic_number;
+ output_value stdout filename;
+ output_value stdout ast
+
+ let processFile ~isInterface ~width ~recover ~origin ~target ~report filename =
+ try
+ let len = String.length filename in
+ let action =
+ if isInterface || len > 0 && (String.get [@doesNotRaise]) filename (len - 1) = 'i' then
+ ProcessInterface
+ else ProcessImplementation
+ in
+ match action with
+ | ProcessImplementation ->
+ process
+ ~reportStyle:report
+ (parseImplementation ~origin ~destination:target)
+ (printImplementation ~target ~width filename) recover filename
+ | ProcessInterface ->
+ process
+ ~reportStyle:report
+ (parseInterface ~origin ~destination:target)
+ (printInterface ~target ~width filename) recover filename
+ with
+ | Failure txt ->
+ prerr_string txt;
+ prerr_newline();
+ exit 1
+ | _ -> exit 1
+end
+
+let () =
+ Clflags.parse ();
+ if !Clflags.outcome then (
+ Repl.typeAndPrintOutcome (List.hd !Clflags.files)
+ ) else (
+ let () = match !Clflags.files with
+ | (_file::_) as files ->
+ List.iter (fun filename ->
+ Driver.processFile
+ ~isInterface:!Clflags.interface
+ ~width:!Clflags.width
+ ~recover:!Clflags.recover
+ ~target:!Clflags.print
+ ~origin:!Clflags.origin
+ ~report:!Clflags.report
+ filename
+ ) files;
+ | [] ->
+ Driver.processFile
+ ~isInterface:!Clflags.interface
+ ~width:!Clflags.width
+ ~recover:!Clflags.recover
+ ~target:!Clflags.print
+ ~origin:!Clflags.origin
+ ~report:!Clflags.report
+ ""
+ in
+ exit 0
+ )
\ No newline at end of file
diff --git a/res_syntax/benchmarks/data/Napkinscript.res b/res_syntax/benchmarks/data/Napkinscript.res
new file mode 100644
index 0000000000..f1f9658043
--- /dev/null
+++ b/res_syntax/benchmarks/data/Napkinscript.res
@@ -0,0 +1,20680 @@
+module MiniBuffer: {
+ type t
+ let add_char: (t, char) => unit
+ let add_string: (t, string) => unit
+ let contents: t => string
+ let create: int => t
+ let flush_newline: t => unit
+ let length: t => int
+ let unsafe_get: (t, int) => char
+} = {
+ type t = {
+ mutable buffer: bytes,
+ mutable position: int,
+ mutable length: int,
+ }
+
+ let create = n => {
+ let n = if n < 1 {
+ 1
+ } else {
+ n
+ }
+ let s = (@doesNotRaise Bytes.create)(n)
+ {buffer: s, position: 0, length: n}
+ }
+
+ let contents = b => Bytes.sub_string(b.buffer, 0, b.position)
+
+ let unsafe_get = (b, ofs) => Bytes.unsafe_get(b.buffer, ofs)
+
+ let length = b => b.position
+
+ /* Can't be called directly, don't add to the interface */
+ let resize_internal = (b, more) => {
+ let len = b.length
+ let new_len = ref(len)
+ while b.position + more > new_len.contents {
+ new_len := 2 * new_len.contents
+ }
+ if new_len.contents > Sys.max_string_length {
+ if b.position + more <= Sys.max_string_length {
+ new_len := Sys.max_string_length
+ }
+ }
+ let new_buffer = (@doesNotRaise Bytes.create)(new_len.contents)
+ /* PR#6148: let's keep using [blit] rather than [unsafe_blit] in
+ this tricky function that is slow anyway. */
+ @doesNotRaise
+ Bytes.blit(b.buffer, 0, new_buffer, 0, b.position)
+ b.buffer = new_buffer
+ b.length = new_len.contents
+ }
+
+ let add_char = (b, c) => {
+ let pos = b.position
+ if pos >= b.length {
+ resize_internal(b, 1)
+ }
+ Bytes.unsafe_set(b.buffer, pos, c)
+ b.position = pos + 1
+ }
+
+ let add_string = (b, s) => {
+ let len = String.length(s)
+ let new_position = b.position + len
+ if new_position > b.length {
+ resize_internal(b, len)
+ }
+ @doesNotRaise
+ Bytes.blit_string(s, 0, b.buffer, b.position, len)
+ b.position = new_position
+ }
+
+ /* adds newline and trims all preceding whitespace */
+ let flush_newline = b => {
+ let position = ref(b.position)
+ while Bytes.unsafe_get(b.buffer, position.contents - 1) == ' ' && position.contents >= 0 {
+ position := position.contents - 1
+ }
+ b.position = position.contents
+ add_char(b, '\n')
+ }
+}
+
+module Doc = {
+ type mode = Break | Flat
+
+ type lineStyle =
+ | Classic /* fits? -> replace with space */
+ | Soft /* fits? -> replaced with nothing */
+ | Hard /* always included, forces breaks in parents */
+
+ type rec t =
+ | Nil
+ | Text(string)
+ | Concat(list)
+ | Indent(t)
+ | IfBreaks({yes: t, no: t})
+ | LineSuffix(t)
+ | LineBreak(lineStyle)
+ | Group({shouldBreak: bool, doc: t})
+ | CustomLayout(list)
+ | BreakParent
+ /* | Cursor */
+
+ let nil = Nil
+ let line = LineBreak(Classic)
+ let hardLine = LineBreak(Hard)
+ let softLine = LineBreak(Soft)
+ let text = s => Text(s)
+ let concat = l => Concat(l)
+ let indent = d => Indent(d)
+ let ifBreaks = (t, f) => IfBreaks({yes: t, no: f})
+ let lineSuffix = d => LineSuffix(d)
+ let group = d => Group({shouldBreak: false, doc: d})
+ let breakableGroup = (~forceBreak, d) => Group({shouldBreak: forceBreak, doc: d})
+ let customLayout = gs => CustomLayout(gs)
+ let breakParent = BreakParent
+ /* let cursor = Cursor */
+
+ let space = Text(" ")
+ let comma = Text(",")
+ let dot = Text(".")
+ let dotdot = Text("..")
+ let dotdotdot = Text("...")
+ let lessThan = Text("<")
+ let greaterThan = Text(">")
+ let lbrace = Text("{")
+ let rbrace = Text("}")
+ let lparen = Text("(")
+ let rparen = Text(")")
+ let lbracket = Text("[")
+ let rbracket = Text("]")
+ let question = Text("?")
+ let tilde = Text("~")
+ let equal = Text("=")
+ let trailingComma = IfBreaks({yes: comma, no: nil})
+ let doubleQuote = Text("\"")
+
+ let propagateForcedBreaks = doc => {
+ let rec walk = doc =>
+ switch doc {
+ | Text(_) | Nil | LineSuffix(_) => (false, doc)
+ | BreakParent => (true, Nil)
+ | LineBreak(Hard) => (true, doc)
+ | LineBreak(Classic | Soft) => (false, doc)
+ | Indent(children) =>
+ let (childForcesBreak, newChildren) = walk(children)
+ (childForcesBreak, Indent(newChildren))
+ | IfBreaks({yes: trueDoc, no: falseDoc}) =>
+ let (falseForceBreak, falseDoc) = walk(falseDoc)
+ if falseForceBreak {
+ let (_, trueDoc) = walk(trueDoc)
+ (true, trueDoc)
+ } else {
+ let (forceBreak, trueDoc) = walk(trueDoc)
+ (forceBreak, IfBreaks({yes: trueDoc, no: falseDoc}))
+ }
+ | Group({shouldBreak: forceBreak, doc: children}) =>
+ let (childForcesBreak, newChildren) = walk(children)
+ let shouldBreak = forceBreak || childForcesBreak
+ (shouldBreak, Group({shouldBreak: shouldBreak, doc: newChildren}))
+ | Concat(children) =>
+ let (forceBreak, newChildren) = List.fold_left(((forceBreak, newChildren), child) => {
+ let (childForcesBreak, newChild) = walk(child)
+ (forceBreak || childForcesBreak, list{newChild, ...newChildren})
+ }, (false, list{}), children)
+
+ (forceBreak, Concat(List.rev(newChildren)))
+ | CustomLayout(children) =>
+ /* When using CustomLayout, we don't want to propagate forced breaks
+ * from the children up. By definition it picks the first layout that fits
+ * otherwise it takes the last of the list.
+ * However we do want to propagate forced breaks in the sublayouts. They
+ * might need to be broken. We just don't propagate them any higher here */
+ let children = switch walk(Concat(children)) {
+ | (_, Concat(children)) => children
+ | _ => assert false
+ }
+
+ (false, CustomLayout(children))
+ }
+
+ let (_, processedDoc) = walk(doc)
+ processedDoc
+ }
+
+ let join = (~sep, docs) => {
+ let rec loop = (acc, sep, docs) =>
+ switch docs {
+ | list{} => List.rev(acc)
+ | list{x} => List.rev(list{x, ...acc})
+ | list{x, ...xs} => loop(list{sep, x, ...acc}, sep, xs)
+ }
+
+ Concat(loop(list{}, sep, docs))
+ }
+
+ let rec fits = (w, doc) =>
+ switch doc {
+ | _ when w < 0 => false
+ | list{} => true
+ | list{(_ind, _mode, Text(txt)), ...rest} => fits(w - String.length(txt), rest)
+ | list{(ind, mode, Indent(doc)), ...rest} => fits(w, list{(ind + 2, mode, doc), ...rest})
+ | list{(_ind, Flat, LineBreak(break)), ...rest} =>
+ if break == Hard {
+ true
+ } else {
+ let w = if break == Classic {
+ w - 1
+ } else {
+ w
+ }
+ fits(w, rest)
+ }
+ | list{(_ind, _mode, Nil), ...rest} => fits(w, rest)
+ | list{(_ind, Break, LineBreak(_break)), ..._rest} => true
+ | list{(ind, mode, Group({shouldBreak: forceBreak, doc})), ...rest} =>
+ let mode = if forceBreak {
+ Break
+ } else {
+ mode
+ }
+ fits(w, list{(ind, mode, doc), ...rest})
+ | list{(ind, mode, IfBreaks({yes: breakDoc, no: flatDoc})), ...rest} =>
+ if mode == Break {
+ fits(w, list{(ind, mode, breakDoc), ...rest})
+ } else {
+ fits(w, list{(ind, mode, flatDoc), ...rest})
+ }
+ | list{(ind, mode, Concat(docs)), ...rest} =>
+ let ops = List.map(doc => (ind, mode, doc), docs)
+ fits(w, List.append(ops, rest))
+ /* | (_ind, _mode, Cursor)::rest -> fits w rest */
+ | list{(_ind, _mode, LineSuffix(_)), ...rest} => fits(w, rest)
+ | list{(_ind, _mode, BreakParent), ...rest} => fits(w, rest)
+ | list{(ind, mode, CustomLayout(list{hd, ..._})), ...rest} =>
+ /* TODO: if we have nested custom layouts, what we should do here? */
+ fits(w, list{(ind, mode, hd), ...rest})
+ | list{(_ind, _mode, CustomLayout(_)), ...rest} => fits(w, rest)
+ }
+
+ let toString = (~width, doc) => {
+ let doc = propagateForcedBreaks(doc)
+ let buffer = MiniBuffer.create(1000)
+
+ let rec process = (~pos, lineSuffices, stack) =>
+ switch stack {
+ | list{(ind, mode, doc) as cmd, ...rest} =>
+ switch doc {
+ | Nil | BreakParent => process(~pos, lineSuffices, rest)
+ | Text(txt) =>
+ MiniBuffer.add_string(buffer, txt)
+ process(~pos=String.length(txt) + pos, lineSuffices, rest)
+ | LineSuffix(doc) => process(~pos, list{(ind, mode, doc), ...lineSuffices}, rest)
+ | Concat(docs) =>
+ let ops = List.map(doc => (ind, mode, doc), docs)
+ process(~pos, lineSuffices, List.append(ops, rest))
+ | Indent(doc) => process(~pos, lineSuffices, list{(ind + 2, mode, doc), ...rest})
+ | IfBreaks({yes: breakDoc, no: flatDoc}) =>
+ if mode == Break {
+ process(~pos, lineSuffices, list{(ind, mode, breakDoc), ...rest})
+ } else {
+ process(~pos, lineSuffices, list{(ind, mode, flatDoc), ...rest})
+ }
+ | LineBreak(lineStyle) =>
+ if mode == Break {
+ switch lineSuffices {
+ | list{} =>
+ MiniBuffer.flush_newline(buffer)
+ MiniBuffer.add_string(buffer, @doesNotRaise String.make(ind, ' '))
+ process(~pos=ind, list{}, rest)
+ | _docs =>
+ process(
+ ~pos=ind,
+ list{},
+ List.concat(list{List.rev(lineSuffices), list{cmd, ...rest}}),
+ )
+ }
+ } else {
+ /* mode = Flat */
+ let pos = switch lineStyle {
+ | Classic =>
+ MiniBuffer.add_string(buffer, " ")
+ pos + 1
+ | Hard =>
+ MiniBuffer.flush_newline(buffer)
+ 0
+ | Soft => pos
+ }
+
+ process(~pos, lineSuffices, rest)
+ }
+ | Group({shouldBreak, doc}) =>
+ if shouldBreak || !fits(width - pos, list{(ind, Flat, doc), ...rest}) {
+ process(~pos, lineSuffices, list{(ind, Break, doc), ...rest})
+ } else {
+ process(~pos, lineSuffices, list{(ind, Flat, doc), ...rest})
+ }
+ | CustomLayout(docs) =>
+ let rec findGroupThatFits = groups =>
+ switch groups {
+ | list{} => Nil
+ | list{lastGroup} => lastGroup
+ | list{doc, ...docs} =>
+ if fits(width - pos, list{(ind, Flat, doc), ...rest}) {
+ doc
+ } else {
+ findGroupThatFits(docs)
+ }
+ }
+
+ let doc = findGroupThatFits(docs)
+ process(~pos, lineSuffices, list{(ind, Flat, doc), ...rest})
+ }
+ | list{} =>
+ switch lineSuffices {
+ | list{} => ()
+ | suffices => process(~pos=0, list{}, List.rev(suffices))
+ }
+ }
+
+ process(~pos=0, list{}, list{(0, Flat, doc)})
+
+ let len = MiniBuffer.length(buffer)
+ if len > 0 && MiniBuffer.unsafe_get(buffer, len - 1) !== '\n' {
+ MiniBuffer.add_char(buffer, '\n')
+ }
+ MiniBuffer.contents(buffer)
+ }
+
+ @live
+ let debug = t => {
+ let rec toDoc = x =>
+ switch x {
+ | Nil => text("nil")
+ | BreakParent => text("breakparent")
+ | Text(txt) => text("text(" ++ (txt ++ ")"))
+ | LineSuffix(doc) =>
+ group(
+ concat(list{
+ text("linesuffix("),
+ indent(concat(list{line, toDoc(doc)})),
+ line,
+ text(")"),
+ }),
+ )
+ | Concat(docs) =>
+ group(
+ concat(list{
+ text("concat("),
+ indent(
+ concat(list{line, join(~sep=concat(list{text(","), line}), List.map(toDoc, docs))}),
+ ),
+ line,
+ text(")"),
+ }),
+ )
+ | CustomLayout(docs) =>
+ group(
+ concat(list{
+ text("customLayout("),
+ indent(
+ concat(list{line, join(~sep=concat(list{text(","), line}), List.map(toDoc, docs))}),
+ ),
+ line,
+ text(")"),
+ }),
+ )
+ | Indent(doc) => concat(list{text("indent("), softLine, toDoc(doc), softLine, text(")")})
+ | IfBreaks({yes: trueDoc, no: falseDoc}) =>
+ group(
+ concat(list{
+ text("ifBreaks("),
+ indent(
+ concat(list{line, toDoc(trueDoc), concat(list{text(","), line}), toDoc(falseDoc)}),
+ ),
+ line,
+ text(")"),
+ }),
+ )
+ | LineBreak(break) =>
+ let breakTxt = switch break {
+ | Classic => "Classic"
+ | Soft => "Soft"
+ | Hard => "Hard"
+ }
+
+ text("LineBreak(" ++ (breakTxt ++ ")"))
+ | Group({shouldBreak, doc}) =>
+ group(
+ concat(list{
+ text("Group("),
+ indent(
+ concat(list{
+ line,
+ text("shouldBreak: " ++ string_of_bool(shouldBreak)),
+ concat(list{text(","), line}),
+ toDoc(doc),
+ }),
+ ),
+ line,
+ text(")"),
+ }),
+ )
+ }
+
+ let doc = toDoc(t)
+ toString(~width=10, doc) |> print_endline
+ }
+}
+
+module Sexp: {
+ type t
+
+ let atom: string => t
+ let list: list => t
+ let toString: t => string
+} = {
+ type rec t =
+ | Atom(string)
+ | List(list)
+
+ let atom = s => Atom(s)
+ let list = l => List(l)
+
+ let rec toDoc = t =>
+ switch t {
+ | Atom(s) => Doc.text(s)
+ | List(list{}) => Doc.text("()")
+ | List(list{sexpr}) => Doc.concat(list{Doc.lparen, toDoc(sexpr), Doc.rparen})
+ | List(list{hd, ...tail}) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lparen,
+ toDoc(hd),
+ Doc.indent(Doc.concat(list{Doc.line, Doc.join(~sep=Doc.line, List.map(toDoc, tail))})),
+ Doc.rparen,
+ }),
+ )
+ }
+
+ let toString = sexpr => {
+ let doc = toDoc(sexpr)
+ Doc.toString(~width=80, doc)
+ }
+}
+
+module SexpAst: {
+ let implementation: Parsetree.structure => Sexp.t
+ let interface: Parsetree.signature => Sexp.t
+} = {
+ open Parsetree
+
+ let mapEmpty = (~f, items) =>
+ switch items {
+ | list{} => list{Sexp.list(list{})}
+ | items => List.map(f, items)
+ }
+
+ let string = txt => Sexp.atom("\"" ++ (txt ++ "\""))
+
+ let char = c => Sexp.atom("'" ++ (Char.escaped(c) ++ "'"))
+
+ let optChar = oc =>
+ switch oc {
+ | None => Sexp.atom("None")
+ | Some(c) => Sexp.list(list{Sexp.atom("Some"), char(c)})
+ }
+
+ let longident = l => {
+ let rec loop = l =>
+ switch l {
+ | Longident.Lident(ident) => Sexp.list(list{Sexp.atom("Lident"), string(ident)})
+ | Longident.Ldot(lident, txt) => Sexp.list(list{Sexp.atom("Ldot"), loop(lident), string(txt)})
+ | Longident.Lapply(l1, l2) => Sexp.list(list{Sexp.atom("Lapply"), loop(l1), loop(l2)})
+ }
+
+ Sexp.list(list{Sexp.atom("longident"), loop(l)})
+ }
+
+ let closedFlag = flag =>
+ switch flag {
+ | Asttypes.Closed => Sexp.atom("Closed")
+ | Open => Sexp.atom("Open")
+ }
+
+ let directionFlag = flag =>
+ switch flag {
+ | Asttypes.Upto => Sexp.atom("Upto")
+ | Downto => Sexp.atom("Downto")
+ }
+
+ let recFlag = flag =>
+ switch flag {
+ | Asttypes.Recursive => Sexp.atom("Recursive")
+ | Nonrecursive => Sexp.atom("Nonrecursive")
+ }
+
+ let overrideFlag = flag =>
+ switch flag {
+ | Asttypes.Override => Sexp.atom("Override")
+ | Fresh => Sexp.atom("Fresh")
+ }
+
+ let privateFlag = flag =>
+ switch flag {
+ | Asttypes.Public => Sexp.atom("Public")
+ | Private => Sexp.atom("Private")
+ }
+
+ let mutableFlag = flag =>
+ switch flag {
+ | Asttypes.Immutable => Sexp.atom("Immutable")
+ | Mutable => Sexp.atom("Mutable")
+ }
+
+ let variance = v =>
+ switch v {
+ | Asttypes.Covariant => Sexp.atom("Covariant")
+ | Contravariant => Sexp.atom("Contravariant")
+ | Invariant => Sexp.atom("Invariant")
+ }
+
+ let argLabel = lbl =>
+ switch lbl {
+ | Asttypes.Nolabel => Sexp.atom("Nolabel")
+ | Labelled(txt) => Sexp.list(list{Sexp.atom("Labelled"), string(txt)})
+ | Optional(txt) => Sexp.list(list{Sexp.atom("Optional"), string(txt)})
+ }
+
+ let constant = c => {
+ let sexpr = switch c {
+ | Pconst_integer(txt, tag) =>
+ Sexp.list(list{Sexp.atom("Pconst_integer"), string(txt), optChar(tag)})
+ | Pconst_char(c) => Sexp.list(list{Sexp.atom("Pconst_char"), Sexp.atom(Char.escaped(c))})
+ | Pconst_string(txt, tag) =>
+ Sexp.list(list{
+ Sexp.atom("Pconst_string"),
+ string(txt),
+ switch tag {
+ | Some(txt) => Sexp.list(list{Sexp.atom("Some"), string(txt)})
+ | None => Sexp.atom("None")
+ },
+ })
+ | Pconst_float(txt, tag) =>
+ Sexp.list(list{Sexp.atom("Pconst_float"), string(txt), optChar(tag)})
+ }
+
+ Sexp.list(list{Sexp.atom("constant"), sexpr})
+ }
+
+ let rec structure = s => Sexp.list(list{Sexp.atom("structure"), ...List.map(structureItem, s)})
+
+ and structureItem = si => {
+ let desc = switch si.pstr_desc {
+ | Pstr_eval(expr, attrs) =>
+ Sexp.list(list{Sexp.atom("Pstr_eval"), expression(expr), attributes(attrs)})
+ | Pstr_value(flag, vbs) =>
+ Sexp.list(list{
+ Sexp.atom("Pstr_value"),
+ recFlag(flag),
+ Sexp.list(mapEmpty(~f=valueBinding, vbs)),
+ })
+ | Pstr_primitive(vd) => Sexp.list(list{Sexp.atom("Pstr_primitive"), valueDescription(vd)})
+ | Pstr_type(flag, tds) =>
+ Sexp.list(list{
+ Sexp.atom("Pstr_type"),
+ recFlag(flag),
+ Sexp.list(mapEmpty(~f=typeDeclaration, tds)),
+ })
+ | Pstr_typext(typext) => Sexp.list(list{Sexp.atom("Pstr_type"), typeExtension(typext)})
+ | Pstr_exception(ec) => Sexp.list(list{Sexp.atom("Pstr_exception"), extensionConstructor(ec)})
+ | Pstr_module(mb) => Sexp.list(list{Sexp.atom("Pstr_module"), moduleBinding(mb)})
+ | Pstr_recmodule(mbs) =>
+ Sexp.list(list{Sexp.atom("Pstr_recmodule"), Sexp.list(mapEmpty(~f=moduleBinding, mbs))})
+ | Pstr_modtype(modTypDecl) =>
+ Sexp.list(list{Sexp.atom("Pstr_modtype"), moduleTypeDeclaration(modTypDecl)})
+ | Pstr_open(openDesc) => Sexp.list(list{Sexp.atom("Pstr_open"), openDescription(openDesc)})
+ | Pstr_class(_) => Sexp.atom("Pstr_class")
+ | Pstr_class_type(_) => Sexp.atom("Pstr_class_type")
+ | Pstr_include(id) => Sexp.list(list{Sexp.atom("Pstr_include"), includeDeclaration(id)})
+ | Pstr_attribute(attr) => Sexp.list(list{Sexp.atom("Pstr_attribute"), attribute(attr)})
+ | Pstr_extension(ext, attrs) =>
+ Sexp.list(list{Sexp.atom("Pstr_extension"), extension(ext), attributes(attrs)})
+ }
+
+ Sexp.list(list{Sexp.atom("structure_item"), desc})
+ }
+
+ and includeDeclaration = id =>
+ Sexp.list(list{
+ Sexp.atom("include_declaration"),
+ moduleExpression(id.pincl_mod),
+ attributes(id.pincl_attributes),
+ })
+
+ and openDescription = od =>
+ Sexp.list(list{
+ Sexp.atom("open_description"),
+ longident(od.popen_lid.Asttypes.txt),
+ attributes(od.popen_attributes),
+ })
+
+ and moduleTypeDeclaration = mtd =>
+ Sexp.list(list{
+ Sexp.atom("module_type_declaration"),
+ string(mtd.pmtd_name.Asttypes.txt),
+ switch mtd.pmtd_type {
+ | None => Sexp.atom("None")
+ | Some(modType) => Sexp.list(list{Sexp.atom("Some"), moduleType(modType)})
+ },
+ attributes(mtd.pmtd_attributes),
+ })
+
+ and moduleBinding = mb =>
+ Sexp.list(list{
+ Sexp.atom("module_binding"),
+ string(mb.pmb_name.Asttypes.txt),
+ moduleExpression(mb.pmb_expr),
+ attributes(mb.pmb_attributes),
+ })
+
+ and moduleExpression = me => {
+ let desc = switch me.pmod_desc {
+ | Pmod_ident(modName) =>
+ Sexp.list(list{Sexp.atom("Pmod_ident"), longident(modName.Asttypes.txt)})
+ | Pmod_structure(s) => Sexp.list(list{Sexp.atom("Pmod_structure"), structure(s)})
+ | Pmod_functor(lbl, optModType, modExpr) =>
+ Sexp.list(list{
+ Sexp.atom("Pmod_functor"),
+ string(lbl.Asttypes.txt),
+ switch optModType {
+ | None => Sexp.atom("None")
+ | Some(modType) => Sexp.list(list{Sexp.atom("Some"), moduleType(modType)})
+ },
+ moduleExpression(modExpr),
+ })
+ | Pmod_apply(callModExpr, modExprArg) =>
+ Sexp.list(list{
+ Sexp.atom("Pmod_apply"),
+ moduleExpression(callModExpr),
+ moduleExpression(modExprArg),
+ })
+ | Pmod_constraint(modExpr, modType) =>
+ Sexp.list(list{Sexp.atom("Pmod_constraint"), moduleExpression(modExpr), moduleType(modType)})
+ | Pmod_unpack(expr) => Sexp.list(list{Sexp.atom("Pmod_unpack"), expression(expr)})
+ | Pmod_extension(ext) => Sexp.list(list{Sexp.atom("Pmod_extension"), extension(ext)})
+ }
+
+ Sexp.list(list{Sexp.atom("module_expr"), desc, attributes(me.pmod_attributes)})
+ }
+
+ and moduleType = mt => {
+ let desc = switch mt.pmty_desc {
+ | Pmty_ident(longidentLoc) =>
+ Sexp.list(list{Sexp.atom("Pmty_ident"), longident(longidentLoc.Asttypes.txt)})
+ | Pmty_signature(s) => Sexp.list(list{Sexp.atom("Pmty_signature"), signature(s)})
+ | Pmty_functor(lbl, optModType, modType) =>
+ Sexp.list(list{
+ Sexp.atom("Pmty_functor"),
+ string(lbl.Asttypes.txt),
+ switch optModType {
+ | None => Sexp.atom("None")
+ | Some(modType) => Sexp.list(list{Sexp.atom("Some"), moduleType(modType)})
+ },
+ moduleType(modType),
+ })
+ | Pmty_alias(longidentLoc) =>
+ Sexp.list(list{Sexp.atom("Pmty_alias"), longident(longidentLoc.Asttypes.txt)})
+ | Pmty_extension(ext) => Sexp.list(list{Sexp.atom("Pmty_extension"), extension(ext)})
+ | Pmty_typeof(modExpr) => Sexp.list(list{Sexp.atom("Pmty_typeof"), moduleExpression(modExpr)})
+ | Pmty_with(modType, withConstraints) =>
+ Sexp.list(list{
+ Sexp.atom("Pmty_with"),
+ moduleType(modType),
+ Sexp.list(mapEmpty(~f=withConstraint, withConstraints)),
+ })
+ }
+
+ Sexp.list(list{Sexp.atom("module_type"), desc, attributes(mt.pmty_attributes)})
+ }
+
+ and withConstraint = wc =>
+ switch wc {
+ | Pwith_type(longidentLoc, td) =>
+ Sexp.list(list{
+ Sexp.atom("Pmty_with"),
+ longident(longidentLoc.Asttypes.txt),
+ typeDeclaration(td),
+ })
+ | Pwith_module(l1, l2) =>
+ Sexp.list(list{
+ Sexp.atom("Pwith_module"),
+ longident(l1.Asttypes.txt),
+ longident(l2.Asttypes.txt),
+ })
+ | Pwith_typesubst(longidentLoc, td) =>
+ Sexp.list(list{
+ Sexp.atom("Pwith_typesubst"),
+ longident(longidentLoc.Asttypes.txt),
+ typeDeclaration(td),
+ })
+ | Pwith_modsubst(l1, l2) =>
+ Sexp.list(list{
+ Sexp.atom("Pwith_modsubst"),
+ longident(l1.Asttypes.txt),
+ longident(l2.Asttypes.txt),
+ })
+ }
+
+ and signature = s => Sexp.list(list{Sexp.atom("signature"), ...List.map(signatureItem, s)})
+
+ and signatureItem = si => {
+ let descr = switch si.psig_desc {
+ | Psig_value(vd) => Sexp.list(list{Sexp.atom("Psig_value"), valueDescription(vd)})
+ | Psig_type(flag, typeDeclarations) =>
+ Sexp.list(list{
+ Sexp.atom("Psig_type"),
+ recFlag(flag),
+ Sexp.list(mapEmpty(~f=typeDeclaration, typeDeclarations)),
+ })
+ | Psig_typext(typExt) => Sexp.list(list{Sexp.atom("Psig_typext"), typeExtension(typExt)})
+ | Psig_exception(extConstr) =>
+ Sexp.list(list{Sexp.atom("Psig_exception"), extensionConstructor(extConstr)})
+ | Psig_module(modDecl) => Sexp.list(list{Sexp.atom("Psig_module"), moduleDeclaration(modDecl)})
+ | Psig_recmodule(modDecls) =>
+ Sexp.list(list{
+ Sexp.atom("Psig_recmodule"),
+ Sexp.list(mapEmpty(~f=moduleDeclaration, modDecls)),
+ })
+ | Psig_modtype(modTypDecl) =>
+ Sexp.list(list{Sexp.atom("Psig_modtype"), moduleTypeDeclaration(modTypDecl)})
+ | Psig_open(openDesc) => Sexp.list(list{Sexp.atom("Psig_open"), openDescription(openDesc)})
+ | Psig_include(inclDecl) =>
+ Sexp.list(list{Sexp.atom("Psig_include"), includeDescription(inclDecl)})
+ | Psig_class(_) => Sexp.list(list{Sexp.atom("Psig_class")})
+ | Psig_class_type(_) => Sexp.list(list{Sexp.atom("Psig_class_type")})
+ | Psig_attribute(attr) => Sexp.list(list{Sexp.atom("Psig_attribute"), attribute(attr)})
+ | Psig_extension(ext, attrs) =>
+ Sexp.list(list{Sexp.atom("Psig_extension"), extension(ext), attributes(attrs)})
+ }
+
+ Sexp.list(list{Sexp.atom("signature_item"), descr})
+ }
+
+ and includeDescription = id =>
+ Sexp.list(list{
+ Sexp.atom("include_description"),
+ moduleType(id.pincl_mod),
+ attributes(id.pincl_attributes),
+ })
+
+ and moduleDeclaration = md =>
+ Sexp.list(list{
+ Sexp.atom("module_declaration"),
+ string(md.pmd_name.Asttypes.txt),
+ moduleType(md.pmd_type),
+ attributes(md.pmd_attributes),
+ })
+
+ and valueBinding = vb =>
+ Sexp.list(list{
+ Sexp.atom("value_binding"),
+ pattern(vb.pvb_pat),
+ expression(vb.pvb_expr),
+ attributes(vb.pvb_attributes),
+ })
+
+ and valueDescription = vd =>
+ Sexp.list(list{
+ Sexp.atom("value_description"),
+ string(vd.pval_name.Asttypes.txt),
+ coreType(vd.pval_type),
+ Sexp.list(mapEmpty(~f=string, vd.pval_prim)),
+ attributes(vd.pval_attributes),
+ })
+
+ and typeDeclaration = td =>
+ Sexp.list(list{
+ Sexp.atom("type_declaration"),
+ string(td.ptype_name.Asttypes.txt),
+ Sexp.list(list{
+ Sexp.atom("ptype_params"),
+ Sexp.list(
+ mapEmpty(
+ ~f=((typexpr, var)) => Sexp.list(list{coreType(typexpr), variance(var)}),
+ td.ptype_params,
+ ),
+ ),
+ }),
+ Sexp.list(list{
+ Sexp.atom("ptype_cstrs"),
+ Sexp.list(
+ mapEmpty(
+ ~f=((typ1, typ2, _loc)) => Sexp.list(list{coreType(typ1), coreType(typ2)}),
+ td.ptype_cstrs,
+ ),
+ ),
+ }),
+ Sexp.list(list{Sexp.atom("ptype_kind"), typeKind(td.ptype_kind)}),
+ Sexp.list(list{
+ Sexp.atom("ptype_manifest"),
+ switch td.ptype_manifest {
+ | None => Sexp.atom("None")
+ | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)})
+ },
+ }),
+ Sexp.list(list{Sexp.atom("ptype_private"), privateFlag(td.ptype_private)}),
+ attributes(td.ptype_attributes),
+ })
+
+ and extensionConstructor = ec =>
+ Sexp.list(list{
+ Sexp.atom("extension_constructor"),
+ string(ec.pext_name.Asttypes.txt),
+ extensionConstructorKind(ec.pext_kind),
+ attributes(ec.pext_attributes),
+ })
+
+ and extensionConstructorKind = kind =>
+ switch kind {
+ | Pext_decl(args, optTypExpr) =>
+ Sexp.list(list{
+ Sexp.atom("Pext_decl"),
+ constructorArguments(args),
+ switch optTypExpr {
+ | None => Sexp.atom("None")
+ | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)})
+ },
+ })
+ | Pext_rebind(longidentLoc) =>
+ Sexp.list(list{Sexp.atom("Pext_rebind"), longident(longidentLoc.Asttypes.txt)})
+ }
+
+ and typeExtension = te =>
+ Sexp.list(list{
+ Sexp.atom("type_extension"),
+ Sexp.list(list{Sexp.atom("ptyext_path"), longident(te.ptyext_path.Asttypes.txt)}),
+ Sexp.list(list{
+ Sexp.atom("ptyext_parms"),
+ Sexp.list(
+ mapEmpty(
+ ~f=((typexpr, var)) => Sexp.list(list{coreType(typexpr), variance(var)}),
+ te.ptyext_params,
+ ),
+ ),
+ }),
+ Sexp.list(list{
+ Sexp.atom("ptyext_constructors"),
+ Sexp.list(mapEmpty(~f=extensionConstructor, te.ptyext_constructors)),
+ }),
+ Sexp.list(list{Sexp.atom("ptyext_private"), privateFlag(te.ptyext_private)}),
+ attributes(te.ptyext_attributes),
+ })
+
+ and typeKind = kind =>
+ switch kind {
+ | Ptype_abstract => Sexp.atom("Ptype_abstract")
+ | Ptype_variant(constrDecls) =>
+ Sexp.list(list{
+ Sexp.atom("Ptype_variant"),
+ Sexp.list(mapEmpty(~f=constructorDeclaration, constrDecls)),
+ })
+ | Ptype_record(lblDecls) =>
+ Sexp.list(list{Sexp.atom("Ptype_record"), Sexp.list(mapEmpty(~f=labelDeclaration, lblDecls))})
+ | Ptype_open => Sexp.atom("Ptype_open")
+ }
+
+ and constructorDeclaration = cd =>
+ Sexp.list(list{
+ Sexp.atom("constructor_declaration"),
+ string(cd.pcd_name.Asttypes.txt),
+ Sexp.list(list{Sexp.atom("pcd_args"), constructorArguments(cd.pcd_args)}),
+ Sexp.list(list{
+ Sexp.atom("pcd_res"),
+ switch cd.pcd_res {
+ | None => Sexp.atom("None")
+ | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)})
+ },
+ }),
+ attributes(cd.pcd_attributes),
+ })
+
+ and constructorArguments = args =>
+ switch args {
+ | Pcstr_tuple(types) =>
+ Sexp.list(list{Sexp.atom("Pcstr_tuple"), Sexp.list(mapEmpty(~f=coreType, types))})
+ | Pcstr_record(lds) =>
+ Sexp.list(list{Sexp.atom("Pcstr_record"), Sexp.list(mapEmpty(~f=labelDeclaration, lds))})
+ }
+
+ and labelDeclaration = ld =>
+ Sexp.list(list{
+ Sexp.atom("label_declaration"),
+ string(ld.pld_name.Asttypes.txt),
+ mutableFlag(ld.pld_mutable),
+ coreType(ld.pld_type),
+ attributes(ld.pld_attributes),
+ })
+
+ and expression = expr => {
+ let desc = switch expr.pexp_desc {
+ | Pexp_ident(longidentLoc) =>
+ Sexp.list(list{Sexp.atom("Pexp_ident"), longident(longidentLoc.Asttypes.txt)})
+ | Pexp_constant(c) => Sexp.list(list{Sexp.atom("Pexp_constant"), constant(c)})
+ | Pexp_let(flag, vbs, expr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_let"),
+ recFlag(flag),
+ Sexp.list(mapEmpty(~f=valueBinding, vbs)),
+ expression(expr),
+ })
+ | Pexp_function(cases) =>
+ Sexp.list(list{Sexp.atom("Pexp_function"), Sexp.list(mapEmpty(~f=case, cases))})
+ | Pexp_fun(argLbl, exprOpt, pat, expr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_fun"),
+ argLabel(argLbl),
+ switch exprOpt {
+ | None => Sexp.atom("None")
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ },
+ pattern(pat),
+ expression(expr),
+ })
+ | Pexp_apply(expr, args) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_apply"),
+ expression(expr),
+ Sexp.list(
+ mapEmpty(
+ ~f=((argLbl, expr)) => Sexp.list(list{argLabel(argLbl), expression(expr)}),
+ args,
+ ),
+ ),
+ })
+ | Pexp_match(expr, cases) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_match"),
+ expression(expr),
+ Sexp.list(mapEmpty(~f=case, cases)),
+ })
+ | Pexp_try(expr, cases) =>
+ Sexp.list(list{Sexp.atom("Pexp_try"), expression(expr), Sexp.list(mapEmpty(~f=case, cases))})
+ | Pexp_tuple(exprs) =>
+ Sexp.list(list{Sexp.atom("Pexp_tuple"), Sexp.list(mapEmpty(~f=expression, exprs))})
+ | Pexp_construct(longidentLoc, exprOpt) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_construct"),
+ longident(longidentLoc.Asttypes.txt),
+ switch exprOpt {
+ | None => Sexp.atom("None")
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ },
+ })
+ | Pexp_variant(lbl, exprOpt) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_variant"),
+ string(lbl),
+ switch exprOpt {
+ | None => Sexp.atom("None")
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ },
+ })
+ | Pexp_record(rows, optExpr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_record"),
+ Sexp.list(
+ mapEmpty(
+ ~f=((longidentLoc, expr)) =>
+ Sexp.list(list{longident(longidentLoc.Asttypes.txt), expression(expr)}),
+ rows,
+ ),
+ ),
+ switch optExpr {
+ | None => Sexp.atom("None")
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ },
+ })
+ | Pexp_field(expr, longidentLoc) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_field"),
+ expression(expr),
+ longident(longidentLoc.Asttypes.txt),
+ })
+ | Pexp_setfield(expr1, longidentLoc, expr2) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_setfield"),
+ expression(expr1),
+ longident(longidentLoc.Asttypes.txt),
+ expression(expr2),
+ })
+ | Pexp_array(exprs) =>
+ Sexp.list(list{Sexp.atom("Pexp_array"), Sexp.list(mapEmpty(~f=expression, exprs))})
+ | Pexp_ifthenelse(expr1, expr2, optExpr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_ifthenelse"),
+ expression(expr1),
+ expression(expr2),
+ switch optExpr {
+ | None => Sexp.atom("None")
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ },
+ })
+ | Pexp_sequence(expr1, expr2) =>
+ Sexp.list(list{Sexp.atom("Pexp_sequence"), expression(expr1), expression(expr2)})
+ | Pexp_while(expr1, expr2) =>
+ Sexp.list(list{Sexp.atom("Pexp_while"), expression(expr1), expression(expr2)})
+ | Pexp_for(pat, e1, e2, flag, e3) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_for"),
+ pattern(pat),
+ expression(e1),
+ expression(e2),
+ directionFlag(flag),
+ expression(e3),
+ })
+ | Pexp_constraint(expr, typexpr) =>
+ Sexp.list(list{Sexp.atom("Pexp_constraint"), expression(expr), coreType(typexpr)})
+ | Pexp_coerce(expr, optTyp, typexpr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_coerce"),
+ expression(expr),
+ switch optTyp {
+ | None => Sexp.atom("None")
+ | Some(typ) => Sexp.list(list{Sexp.atom("Some"), coreType(typ)})
+ },
+ coreType(typexpr),
+ })
+ | Pexp_send(_) => Sexp.list(list{Sexp.atom("Pexp_send")})
+ | Pexp_new(_) => Sexp.list(list{Sexp.atom("Pexp_new")})
+ | Pexp_setinstvar(_) => Sexp.list(list{Sexp.atom("Pexp_setinstvar")})
+ | Pexp_override(_) => Sexp.list(list{Sexp.atom("Pexp_override")})
+ | Pexp_letmodule(modName, modExpr, expr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_letmodule"),
+ string(modName.Asttypes.txt),
+ moduleExpression(modExpr),
+ expression(expr),
+ })
+ | Pexp_letexception(extConstr, expr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_letexception"),
+ extensionConstructor(extConstr),
+ expression(expr),
+ })
+ | Pexp_assert(expr) => Sexp.list(list{Sexp.atom("Pexp_assert"), expression(expr)})
+ | Pexp_lazy(expr) => Sexp.list(list{Sexp.atom("Pexp_lazy"), expression(expr)})
+ | Pexp_poly(_) => Sexp.list(list{Sexp.atom("Pexp_poly")})
+ | Pexp_object(_) => Sexp.list(list{Sexp.atom("Pexp_object")})
+ | Pexp_newtype(lbl, expr) =>
+ Sexp.list(list{Sexp.atom("Pexp_newtype"), string(lbl.Asttypes.txt), expression(expr)})
+ | Pexp_pack(modExpr) => Sexp.list(list{Sexp.atom("Pexp_pack"), moduleExpression(modExpr)})
+ | Pexp_open(flag, longidentLoc, expr) =>
+ Sexp.list(list{
+ Sexp.atom("Pexp_open"),
+ overrideFlag(flag),
+ longident(longidentLoc.Asttypes.txt),
+ expression(expr),
+ })
+ | Pexp_extension(ext) => Sexp.list(list{Sexp.atom("Pexp_extension"), extension(ext)})
+ | Pexp_unreachable => Sexp.atom("Pexp_unreachable")
+ }
+
+ Sexp.list(list{Sexp.atom("expression"), desc})
+ }
+
+ and case = c =>
+ Sexp.list(list{
+ Sexp.atom("case"),
+ Sexp.list(list{Sexp.atom("pc_lhs"), pattern(c.pc_lhs)}),
+ Sexp.list(list{
+ Sexp.atom("pc_guard"),
+ switch c.pc_guard {
+ | None => Sexp.atom("None")
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ },
+ }),
+ Sexp.list(list{Sexp.atom("pc_rhs"), expression(c.pc_rhs)}),
+ })
+
+ and pattern = p => {
+ let descr = switch p.ppat_desc {
+ | Ppat_any => Sexp.atom("Ppat_any")
+ | Ppat_var(var) => Sexp.list(list{Sexp.atom("Ppat_var"), string(var.Location.txt)})
+ | Ppat_alias(p, alias) =>
+ Sexp.list(list{Sexp.atom("Ppat_alias"), pattern(p), string(alias.txt)})
+ | Ppat_constant(c) => Sexp.list(list{Sexp.atom("Ppat_constant"), constant(c)})
+ | Ppat_interval(lo, hi) =>
+ Sexp.list(list{Sexp.atom("Ppat_interval"), constant(lo), constant(hi)})
+ | Ppat_tuple(patterns) =>
+ Sexp.list(list{Sexp.atom("Ppat_tuple"), Sexp.list(mapEmpty(~f=pattern, patterns))})
+ | Ppat_construct(longidentLoc, optPattern) =>
+ Sexp.list(list{
+ Sexp.atom("Ppat_construct"),
+ longident(longidentLoc.Location.txt),
+ switch optPattern {
+ | None => Sexp.atom("None")
+ | Some(p) => Sexp.list(list{Sexp.atom("some"), pattern(p)})
+ },
+ })
+ | Ppat_variant(lbl, optPattern) =>
+ Sexp.list(list{
+ Sexp.atom("Ppat_variant"),
+ string(lbl),
+ switch optPattern {
+ | None => Sexp.atom("None")
+ | Some(p) => Sexp.list(list{Sexp.atom("Some"), pattern(p)})
+ },
+ })
+ | Ppat_record(rows, flag) =>
+ Sexp.list(list{
+ Sexp.atom("Ppat_record"),
+ closedFlag(flag),
+ Sexp.list(
+ mapEmpty(
+ ~f=((longidentLoc, p)) =>
+ Sexp.list(list{longident(longidentLoc.Location.txt), pattern(p)}),
+ rows,
+ ),
+ ),
+ })
+ | Ppat_array(patterns) =>
+ Sexp.list(list{Sexp.atom("Ppat_array"), Sexp.list(mapEmpty(~f=pattern, patterns))})
+ | Ppat_or(p1, p2) => Sexp.list(list{Sexp.atom("Ppat_or"), pattern(p1), pattern(p2)})
+ | Ppat_constraint(p, typexpr) =>
+ Sexp.list(list{Sexp.atom("Ppat_constraint"), pattern(p), coreType(typexpr)})
+ | Ppat_type(longidentLoc) =>
+ Sexp.list(list{Sexp.atom("Ppat_type"), longident(longidentLoc.Location.txt)})
+ | Ppat_lazy(p) => Sexp.list(list{Sexp.atom("Ppat_lazy"), pattern(p)})
+ | Ppat_unpack(stringLoc) =>
+ Sexp.list(list{Sexp.atom("Ppat_unpack"), string(stringLoc.Location.txt)})
+ | Ppat_exception(p) => Sexp.list(list{Sexp.atom("Ppat_exception"), pattern(p)})
+ | Ppat_extension(ext) => Sexp.list(list{Sexp.atom("Ppat_extension"), extension(ext)})
+ | Ppat_open(longidentLoc, p) =>
+ Sexp.list(list{Sexp.atom("Ppat_open"), longident(longidentLoc.Location.txt), pattern(p)})
+ }
+
+ Sexp.list(list{Sexp.atom("pattern"), descr})
+ }
+
+ and objectField = field =>
+ switch field {
+ | Otag(lblLoc, attrs, typexpr) =>
+ Sexp.list(list{Sexp.atom("Otag"), string(lblLoc.txt), attributes(attrs), coreType(typexpr)})
+ | Oinherit(typexpr) => Sexp.list(list{Sexp.atom("Oinherit"), coreType(typexpr)})
+ }
+
+ and rowField = field =>
+ switch field {
+ | Rtag(labelLoc, attrs, truth, types) =>
+ Sexp.list(list{
+ Sexp.atom("Rtag"),
+ string(labelLoc.txt),
+ attributes(attrs),
+ Sexp.atom(
+ if truth {
+ "true"
+ } else {
+ "false"
+ },
+ ),
+ Sexp.list(mapEmpty(~f=coreType, types)),
+ })
+ | Rinherit(typexpr) => Sexp.list(list{Sexp.atom("Rinherit"), coreType(typexpr)})
+ }
+
+ and packageType = ((modNameLoc, packageConstraints)) =>
+ Sexp.list(list{
+ Sexp.atom("package_type"),
+ longident(modNameLoc.Asttypes.txt),
+ Sexp.list(
+ mapEmpty(
+ ~f=((modNameLoc, typexpr)) =>
+ Sexp.list(list{longident(modNameLoc.Asttypes.txt), coreType(typexpr)}),
+ packageConstraints,
+ ),
+ ),
+ })
+
+ and coreType = typexpr => {
+ let desc = switch typexpr.ptyp_desc {
+ | Ptyp_any => Sexp.atom("Ptyp_any")
+ | Ptyp_var(var) => Sexp.list(list{Sexp.atom("Ptyp_var"), string(var)})
+ | Ptyp_arrow(argLbl, typ1, typ2) =>
+ Sexp.list(list{Sexp.atom("Ptyp_arrow"), argLabel(argLbl), coreType(typ1), coreType(typ2)})
+ | Ptyp_tuple(types) =>
+ Sexp.list(list{Sexp.atom("Ptyp_tuple"), Sexp.list(mapEmpty(~f=coreType, types))})
+ | Ptyp_constr(longidentLoc, types) =>
+ Sexp.list(list{
+ Sexp.atom("Ptyp_constr"),
+ longident(longidentLoc.txt),
+ Sexp.list(mapEmpty(~f=coreType, types)),
+ })
+ | Ptyp_alias(typexpr, alias) =>
+ Sexp.list(list{Sexp.atom("Ptyp_alias"), coreType(typexpr), string(alias)})
+ | Ptyp_object(fields, flag) =>
+ Sexp.list(list{
+ Sexp.atom("Ptyp_object"),
+ closedFlag(flag),
+ Sexp.list(mapEmpty(~f=objectField, fields)),
+ })
+ | Ptyp_class(longidentLoc, types) =>
+ Sexp.list(list{
+ Sexp.atom("Ptyp_class"),
+ longident(longidentLoc.Location.txt),
+ Sexp.list(mapEmpty(~f=coreType, types)),
+ })
+ | Ptyp_variant(fields, flag, optLabels) =>
+ Sexp.list(list{
+ Sexp.atom("Ptyp_variant"),
+ Sexp.list(mapEmpty(~f=rowField, fields)),
+ closedFlag(flag),
+ switch optLabels {
+ | None => Sexp.atom("None")
+ | Some(lbls) => Sexp.list(mapEmpty(~f=string, lbls))
+ },
+ })
+ | Ptyp_poly(lbls, typexpr) =>
+ Sexp.list(list{
+ Sexp.atom("Ptyp_poly"),
+ Sexp.list(mapEmpty(~f=lbl => string(lbl.Asttypes.txt), lbls)),
+ coreType(typexpr),
+ })
+ | Ptyp_package(package) => Sexp.list(list{Sexp.atom("Ptyp_package"), packageType(package)})
+ | Ptyp_extension(ext) => Sexp.list(list{Sexp.atom("Ptyp_extension"), extension(ext)})
+ }
+
+ Sexp.list(list{Sexp.atom("core_type"), desc})
+ }
+
+ and payload = p =>
+ switch p {
+ | PStr(s) => Sexp.list(list{Sexp.atom("PStr"), ...mapEmpty(~f=structureItem, s)})
+ | PSig(s) => Sexp.list(list{Sexp.atom("PSig"), signature(s)})
+ | PTyp(ct) => Sexp.list(list{Sexp.atom("PTyp"), coreType(ct)})
+ | PPat(pat, optExpr) =>
+ Sexp.list(list{
+ Sexp.atom("PPat"),
+ pattern(pat),
+ switch optExpr {
+ | Some(expr) => Sexp.list(list{Sexp.atom("Some"), expression(expr)})
+ | None => Sexp.atom("None")
+ },
+ })
+ }
+
+ and attribute = ((stringLoc, p)) =>
+ Sexp.list(list{Sexp.atom("attribute"), Sexp.atom(stringLoc.Asttypes.txt), payload(p)})
+
+ and extension = ((stringLoc, p)) =>
+ Sexp.list(list{Sexp.atom("extension"), Sexp.atom(stringLoc.Asttypes.txt), payload(p)})
+
+ and attributes = attrs => {
+ let sexprs = mapEmpty(~f=attribute, attrs)
+ Sexp.list(list{Sexp.atom("attributes"), ...sexprs})
+ }
+
+ let implementation = structure
+ let interface = signature
+}
+
+module IO: {
+ let readFile: string => string
+ let readStdin: unit => string
+} = {
+ /* random chunk size: 2^15, TODO: why do we guess randomly? */
+ let chunkSize = 32768
+
+ let readFile = filename => {
+ let chan = open_in(filename)
+ let buffer = Buffer.create(chunkSize)
+ let chunk = (@doesNotRaise Bytes.create)(chunkSize)
+ let rec loop = () => {
+ let len = try input(chan, chunk, 0, chunkSize) catch {
+ | Invalid_argument(_) => 0
+ }
+ if len === 0 {
+ close_in_noerr(chan)
+ Buffer.contents(buffer)
+ } else {
+ Buffer.add_subbytes(buffer, chunk, 0, len)
+ loop()
+ }
+ }
+
+ loop()
+ }
+
+ let readStdin = () => {
+ let buffer = Buffer.create(chunkSize)
+ let chunk = (@doesNotRaise Bytes.create)(chunkSize)
+ let rec loop = () => {
+ let len = try input(stdin, chunk, 0, chunkSize) catch {
+ | Invalid_argument(_) => 0
+ }
+ if len === 0 {
+ close_in_noerr(stdin)
+ Buffer.contents(buffer)
+ } else {
+ Buffer.add_subbytes(buffer, chunk, 0, len)
+ loop()
+ }
+ }
+
+ loop()
+ }
+}
+
+module CharacterCodes = {
+ let eof = -1
+
+ let space = 0x0020
+ @live let newline = 0x0A /* \n */
+ let lineFeed = 0x0A /* \n */
+ let carriageReturn = 0x0D /* \r */
+ let lineSeparator = 0x2028
+ let paragraphSeparator = 0x2029
+
+ let tab = 0x09
+
+ let bang = 0x21
+ let dot = 0x2E
+ let colon = 0x3A
+ let comma = 0x2C
+ let backtick = 0x60
+ /* let question = 0x3F */
+ let semicolon = 0x3B
+ let underscore = 0x5F
+ let singleQuote = 0x27
+ let doubleQuote = 0x22
+ let equal = 0x3D
+ let bar = 0x7C
+ let tilde = 0x7E
+ let question = 0x3F
+ let ampersand = 0x26
+ let at = 0x40
+ let dollar = 0x24
+ let percent = 0x25
+
+ let lparen = 0x28
+ let rparen = 0x29
+ let lbracket = 0x5B
+ let rbracket = 0x5D
+ let lbrace = 0x7B
+ let rbrace = 0x7D
+
+ let forwardslash = 0x2F /* / */
+ let backslash = 0x5C /* \ */
+
+ let greaterThan = 0x3E
+ let hash = 0x23
+ let lessThan = 0x3C
+
+ let minus = 0x2D
+ let plus = 0x2B
+ let asterisk = 0x2A
+
+ let _0 = 0x30
+ @live let _1 = 0x31
+ @live let _2 = 0x32
+ @live let _3 = 0x33
+ @live let _4 = 0x34
+ @live let _5 = 0x35
+ @live let _6 = 0x36
+ @live let _7 = 0x37
+ @live let _8 = 0x38
+ let _9 = 0x39
+
+ module Lower = {
+ let a = 0x61
+ let b = 0x62
+ @live let c = 0x63
+ @live let d = 0x64
+ let e = 0x65
+ let f = 0x66
+ let g = 0x67
+ @live let h = 0x68
+ @live let i = 0x69
+ @live let j = 0x6A
+ @live let k = 0x6B
+ @live let l = 0x6C
+ @live let m = 0x6D
+ let n = 0x6E
+ let o = 0x6F
+ let p = 0x70
+ @live let q = 0x71
+ let r = 0x72
+ @live let s = 0x73
+ let t = 0x74
+ @live let u = 0x75
+ @live let v = 0x76
+ @live let w = 0x77
+ let x = 0x78
+ @live let y = 0x79
+ let z = 0x7A
+ }
+
+ module Upper = {
+ let a = 0x41
+ /* let b = 0x42 */
+ @live let c = 0x43
+ @live let d = 0x44
+ @live let e = 0x45
+ @live let f = 0x46
+ let g = 0x47
+ @live let h = 0x48
+ @live let i = 0x49
+ @live let j = 0x4A
+ @live let k = 0x4B
+ @live let l = 0x4C
+ @live let m = 0x4D
+ @live let b = 0x4E
+ @live let o = 0x4F
+ @live let p = 0x50
+ @live let q = 0x51
+ @live let r = 0x52
+ @live let s = 0x53
+ @live let t = 0x54
+ @live let u = 0x55
+ @live let v = 0x56
+ @live let w = 0x57
+ @live let x = 0x58
+ @live let y = 0x59
+ let z = 0x5a
+ }
+
+ /* returns lower-case ch, ch should be ascii */
+ let lower = ch =>
+ /* if ch >= Lower.a && ch <= Lower.z then ch else ch + 32 */
+ lor(32, ch)
+
+ let isLetter = ch => (Lower.a <= ch && ch <= Lower.z) || (Upper.a <= ch && ch <= Upper.z)
+
+ let isUpperCase = ch => Upper.a <= ch && ch <= Upper.z
+
+ let isDigit = ch => _0 <= ch && ch <= _9
+
+ let isHex = ch => (_0 <= ch && ch <= _9) || (Lower.a <= lower(ch) && lower(ch) <= Lower.f)
+
+ /*
+ // ES5 7.3:
+ // The ECMAScript line terminator characters are listed in Table 3.
+ // Table 3: Line Terminator Characters
+ // Code Unit Value Name Formal Name
+ // \u000A Line Feed
+ // \u000D Carriage Return
+ // \u2028 Line separator
+ // \u2029 Paragraph separator
+ // Only the characters in Table 3 are treated as line terminators. Other new line or line
+ // breaking characters are treated as white space but not as line terminators.
+ */
+ let isLineBreak = ch =>
+ ch === lineFeed ||
+ (ch === carriageReturn ||
+ (ch === lineSeparator || ch === paragraphSeparator))
+
+ let digitValue = ch =>
+ if _0 <= ch && ch <= _9 {
+ ch - 48
+ } else if Lower.a <= lower(ch) && lower(ch) <= Lower.f {
+ lower(ch) - Lower.a + 10
+ } else {
+ 16
+ } /* larger than any legal value */
+}
+
+module Comment: {
+ type t
+
+ let toString: t => string
+
+ let loc: t => Location.t
+ let txt: t => string
+ let prevTokEndPos: t => Lexing.position
+
+ let setPrevTokEndPos: (t, Lexing.position) => unit
+
+ let isSingleLineComment: t => bool
+
+ let makeSingleLineComment: (~loc: Location.t, string) => t
+ let makeMultiLineComment: (~loc: Location.t, string) => t
+ let fromOcamlComment: (~loc: Location.t, ~txt: string, ~prevTokEndPos: Lexing.position) => t
+ let trimSpaces: string => string
+} = {
+ type style =
+ | SingleLine
+ | MultiLine
+
+ let styleToString = s =>
+ switch s {
+ | SingleLine => "SingleLine"
+ | MultiLine => "MultiLine"
+ }
+
+ type t = {
+ txt: string,
+ style: style,
+ loc: Location.t,
+ mutable prevTokEndPos: Lexing.position,
+ }
+
+ let loc = t => t.loc
+ let txt = t => t.txt
+ let prevTokEndPos = t => t.prevTokEndPos
+
+ let setPrevTokEndPos = (t, pos) => t.prevTokEndPos = pos
+
+ let isSingleLineComment = t =>
+ switch t.style {
+ | SingleLine => true
+ | MultiLine => false
+ }
+
+ let toString = t =>
+ Format.sprintf(
+ "(txt: %s\nstyle: %s\nlines: %d-%d)",
+ t.txt,
+ styleToString(t.style),
+ t.loc.loc_start.pos_lnum,
+ t.loc.loc_end.pos_lnum,
+ )
+
+ let makeSingleLineComment = (~loc, txt) => {
+ txt: txt,
+ loc: loc,
+ style: SingleLine,
+ prevTokEndPos: Lexing.dummy_pos,
+ }
+
+ let makeMultiLineComment = (~loc, txt) => {
+ txt: txt,
+ loc: loc,
+ style: MultiLine,
+ prevTokEndPos: Lexing.dummy_pos,
+ }
+
+ let fromOcamlComment = (~loc, ~txt, ~prevTokEndPos) => {
+ txt: txt,
+ loc: loc,
+ style: MultiLine,
+ prevTokEndPos: prevTokEndPos,
+ }
+
+ let trimSpaces = s => {
+ let len = String.length(s)
+ if len == 0 {
+ s
+ } else if String.unsafe_get(s, 0) == ' ' || String.unsafe_get(s, len - 1) == ' ' {
+ let b = Bytes.of_string(s)
+ let i = ref(0)
+ while i.contents < len && Bytes.unsafe_get(b, i.contents) == ' ' {
+ incr(i)
+ }
+ let j = ref(len - 1)
+ while j.contents >= i.contents && Bytes.unsafe_get(b, j.contents) == ' ' {
+ decr(j)
+ }
+ if j.contents >= i.contents {
+ (@doesNotRaise Bytes.sub)(b, i.contents, j.contents - i.contents + 1) |> Bytes.to_string
+ } else {
+ ""
+ }
+ } else {
+ s
+ }
+ }
+}
+
+module Token = {
+ type t =
+ | Open
+ | True
+ | False
+ | Character(char)
+ | Int({i: string, suffix: option})
+ | Float({f: string, suffix: option})
+ | String(string)
+ | Lident(string)
+ | Uident(string)
+ | As
+ | Dot
+ | DotDot
+ | DotDotDot
+ | Bang
+ | Semicolon
+ | Let
+ | And
+ | Rec
+ | Underscore
+ | SingleQuote
+ | Equal
+ | EqualEqual
+ | EqualEqualEqual
+ | Bar
+ | Lparen
+ | Rparen
+ | Lbracket
+ | Rbracket
+ | Lbrace
+ | Rbrace
+ | Colon
+ | Comma
+ | Eof
+ | Exception
+ | @live Backslash
+ | Forwardslash
+ | ForwardslashDot
+ | Asterisk
+ | AsteriskDot
+ | Exponentiation
+ | Minus
+ | MinusDot
+ | Plus
+ | PlusDot
+ | PlusPlus
+ | PlusEqual
+ | ColonGreaterThan
+ | GreaterThan
+ | LessThan
+ | LessThanSlash
+ | Hash
+ | HashEqual
+ | HashHash
+ | Assert
+ | Lazy
+ | Tilde
+ | Question
+ | If
+ | Else
+ | For
+ | In
+ | To
+ | Downto
+ | While
+ | Switch
+ | When
+ | EqualGreater
+ | MinusGreater
+ | External
+ | Typ
+ | Private
+ | Mutable
+ | Constraint
+ | Include
+ | Module
+ | Of
+ | With
+ | Land
+ | Lor
+ | Band /* Bitwise and: & */
+ | BangEqual
+ | BangEqualEqual
+ | LessEqual
+ | GreaterEqual
+ | ColonEqual
+ | At
+ | AtAt
+ | Percent
+ | PercentPercent
+ | Comment(Comment.t)
+ | List
+ | TemplateTail(string)
+ | TemplatePart(string)
+ | Backtick
+ | BarGreater
+ | Try
+ | Catch
+ | Import
+ | Export
+
+ let precedence = x =>
+ switch x {
+ | HashEqual | ColonEqual => 1
+ | Lor => 2
+ | Land => 3
+ | Equal
+ | EqualEqual
+ | EqualEqualEqual
+ | LessThan
+ | GreaterThan
+ | BangEqual
+ | BangEqualEqual
+ | LessEqual
+ | GreaterEqual
+ | BarGreater => 4
+ | Plus | PlusDot | Minus | MinusDot | PlusPlus => 5
+ | Asterisk | AsteriskDot | Forwardslash | ForwardslashDot => 6
+ | Exponentiation => 7
+ | MinusGreater => 8
+ | Dot => 9
+ | _ => 0
+ }
+
+ let toString = x =>
+ switch x {
+ | Open => "open"
+ | True => "true"
+ | False => "false"
+ | Character(c) => "'" ++ (Char.escaped(c) ++ "'")
+ | String(s) => s
+ | Lident(str) => str
+ | Uident(str) => str
+ | Dot => "."
+ | DotDot => ".."
+ | DotDotDot => "..."
+ | Int({i}) => "int " ++ i
+ | Float({f}) => "Float: " ++ f
+ | Bang => "!"
+ | Semicolon => ";"
+ | Let => "let"
+ | And => "and"
+ | Rec => "rec"
+ | Underscore => "_"
+ | SingleQuote => "'"
+ | Equal => "="
+ | EqualEqual => "=="
+ | EqualEqualEqual => "==="
+ | Eof => "eof"
+ | Bar => "|"
+ | As => "as"
+ | Lparen => "("
+ | Rparen => ")"
+ | Lbracket => "["
+ | Rbracket => "]"
+ | Lbrace => "{"
+ | Rbrace => "}"
+ | ColonGreaterThan => ":>"
+ | Colon => ":"
+ | Comma => ","
+ | Minus => "-"
+ | MinusDot => "-."
+ | Plus => "+"
+ | PlusDot => "+."
+ | PlusPlus => "++"
+ | PlusEqual => "+="
+ | Backslash => "\\"
+ | Forwardslash => "/"
+ | ForwardslashDot => "/."
+ | Exception => "exception"
+ | Hash => "#"
+ | HashHash => "##"
+ | HashEqual => "#="
+ | GreaterThan => ">"
+ | LessThan => "<"
+ | LessThanSlash => ""
+ | Asterisk => "*"
+ | AsteriskDot => "*."
+ | Exponentiation => "**"
+ | Assert => "assert"
+ | Lazy => "lazy"
+ | Tilde => "tilde"
+ | Question => "?"
+ | If => "if"
+ | Else => "else"
+ | For => "for"
+ | In => "in"
+ | To => "to"
+ | Downto => "downto"
+ | While => "while"
+ | Switch => "switch"
+ | When => "when"
+ | EqualGreater => "=>"
+ | MinusGreater => "->"
+ | External => "external"
+ | Typ => "type"
+ | Private => "private"
+ | Constraint => "constraint"
+ | Mutable => "mutable"
+ | Include => "include"
+ | Module => "module"
+ | Of => "of"
+ | With => "with"
+ | Lor => "||"
+ | Band => "&"
+ | Land => "&&"
+ | BangEqual => "!="
+ | BangEqualEqual => "!=="
+ | GreaterEqual => ">="
+ | LessEqual => "<="
+ | ColonEqual => ":="
+ | At => "@"
+ | AtAt => "@@"
+ | Percent => "%"
+ | PercentPercent => "%%"
+ | Comment(c) => "Comment(" ++ (Comment.toString(c) ++ ")")
+ | List => "list"
+ | TemplatePart(text) => text ++ "${"
+ | TemplateTail(text) => "TemplateTail(" ++ (text ++ ")")
+ | Backtick => "`"
+ | BarGreater => "|>"
+ | Try => "try"
+ | Catch => "catch"
+ | Import => "import"
+ | Export => "export"
+ }
+
+ @raises(Not_found)
+ let keywordTable = x =>
+ switch x {
+ | "true" => True
+ | "false" => False
+ | "open" => Open
+ | "let" => Let
+ | "rec" => Rec
+ | "and" => And
+ | "as" => As
+ | "exception" => Exception
+ | "assert" => Assert
+ | "lazy" => Lazy
+ | "if" => If
+ | "else" => Else
+ | "for" => For
+ | "in" => In
+ | "to" => To
+ | "downto" => Downto
+ | "while" => While
+ | "switch" => Switch
+ | "when" => When
+ | "external" => External
+ | "type" => Typ
+ | "private" => Private
+ | "mutable" => Mutable
+ | "constraint" => Constraint
+ | "include" => Include
+ | "module" => Module
+ | "of" => Of
+ | "list" => List
+ | "with" => With
+ | "try" => Try
+ | "catch" => Catch
+ | "import" => Import
+ | "export" => Export
+ | _ => raise(Not_found)
+ }
+
+ let isKeyword = x =>
+ switch x {
+ | True
+ | False
+ | Open
+ | Let
+ | Rec
+ | And
+ | As
+ | Exception
+ | Assert
+ | Lazy
+ | If
+ | Else
+ | For
+ | In
+ | To
+ | Downto
+ | While
+ | Switch
+ | When
+ | External
+ | Typ
+ | Private
+ | Mutable
+ | Constraint
+ | Include
+ | Module
+ | Of
+ | Land
+ | Lor
+ | List
+ | With
+ | Try
+ | Catch
+ | Import
+ | Export => true
+ | _ => false
+ }
+
+ let lookupKeyword = str =>
+ try keywordTable(str) catch {
+ | Not_found =>
+ if CharacterCodes.isUpperCase(int_of_char(@doesNotRaise String.get(str, 0))) {
+ Uident(str)
+ } else {
+ Lident(str)
+ }
+ }
+
+ let isKeywordTxt = str =>
+ try {
+ let _ = keywordTable(str)
+ true
+ } catch {
+ | Not_found => false
+ }
+}
+
+module Grammar = {
+ type t =
+ | OpenDescription /* open Belt */
+ | @live ModuleLongIdent /* Foo or Foo.Bar */
+ | Ternary /* condExpr ? trueExpr : falseExpr */
+ | Es6ArrowExpr
+ | Jsx
+ | JsxAttribute
+ | @live JsxChild
+ | ExprOperand
+ | ExprUnary
+ | ExprSetField
+ | ExprBinaryAfterOp(Token.t)
+ | ExprBlock
+ | ExprCall
+ | ExprList
+ | ExprArrayAccess
+ | ExprArrayMutation
+ | ExprIf
+ | IfCondition
+ | IfBranch
+ | ElseBranch
+ | TypeExpression
+ | External
+ | PatternMatching
+ | PatternMatchCase
+ | LetBinding
+ | PatternList
+ | PatternOcamlList
+ | PatternRecord
+
+ | TypeDef
+ | TypeConstrName
+ | TypeParams
+ | @live TypeParam
+ | PackageConstraint
+
+ | TypeRepresentation
+
+ | RecordDecl
+ | ConstructorDeclaration
+ | ParameterList
+ | StringFieldDeclarations
+ | FieldDeclarations
+ | TypExprList
+ | FunctorArgs
+ | ModExprList
+ | TypeParameters
+ | RecordRows
+ | RecordRowsStringKey
+ | ArgumentList
+ | Signature
+ | Specification
+ | Structure
+ | Implementation
+ | Attribute
+ | TypeConstraint
+ | Primitive
+ | AtomicTypExpr
+ | ListExpr
+ | JsFfiImport
+
+ let toString = x =>
+ switch x {
+ | OpenDescription => "an open description"
+ | ModuleLongIdent => "a module identifier"
+ | Ternary => "a ternary expression"
+ | Es6ArrowExpr => "an es6 arrow function"
+ | Jsx => "a jsx expression"
+ | JsxAttribute => "a jsx attribute"
+ | ExprOperand => "a basic expression"
+ | ExprUnary => "a unary expression"
+ | ExprBinaryAfterOp(op) => "an expression after the operator \"" ++ (Token.toString(op) ++ "\"")
+ | ExprIf => "an if expression"
+ | IfCondition => "the condition of an if expression"
+ | IfBranch => "the true-branch of an if expression"
+ | ElseBranch => "the else-branch of an if expression"
+ | TypeExpression => "a type"
+ | External => "an external"
+ | PatternMatching => "the cases of a pattern match"
+ | ExprBlock => "a block with expressions"
+ | ExprSetField => "a record field mutation"
+ | ExprCall => "a function application"
+ | ExprArrayAccess => "an array access expression"
+ | ExprArrayMutation => "an array mutation"
+ | LetBinding => "a let binding"
+ | TypeDef => "a type definition"
+ | TypeParams => "type parameters"
+ | TypeParam => "a type parameter"
+ | TypeConstrName => "a type-constructor name"
+ | TypeRepresentation => "a type representation"
+ | RecordDecl => "a record declaration"
+ | PatternMatchCase => "a pattern match case"
+ | ConstructorDeclaration => "a constructor declaration"
+ | ExprList => "multiple expressions"
+ | PatternList => "multiple patterns"
+ | PatternOcamlList => "a list pattern"
+ | PatternRecord => "a record pattern"
+ | ParameterList => "parameters"
+ | StringFieldDeclarations => "string field declarations"
+ | FieldDeclarations => "field declarations"
+ | TypExprList => "list of types"
+ | FunctorArgs => "functor arguments"
+ | ModExprList => "list of module expressions"
+ | TypeParameters => "list of type parameters"
+ | RecordRows => "rows of a record"
+ | RecordRowsStringKey => "rows of a record with string keys"
+ | ArgumentList => "arguments"
+ | Signature => "signature"
+ | Specification => "specification"
+ | Structure => "structure"
+ | Implementation => "implementation"
+ | Attribute => "an attribute"
+ | TypeConstraint => "constraints on a type"
+ | Primitive => "an external primitive"
+ | AtomicTypExpr => "a type"
+ | ListExpr => "an ocaml list expr"
+ | PackageConstraint => "a package constraint"
+ | JsFfiImport => "js ffi import"
+ | JsxChild => "jsx child"
+ }
+
+ let isSignatureItemStart = x =>
+ switch x {
+ | Token.At
+ | Let
+ | Typ
+ | External
+ | Exception
+ | Open
+ | Include
+ | Module
+ | AtAt
+ | PercentPercent => true
+ | _ => false
+ }
+
+ let isAtomicPatternStart = x =>
+ switch x {
+ | Token.Int(_)
+ | String(_)
+ | Character(_)
+ | Lparen
+ | Lbracket
+ | Lbrace
+ | Underscore
+ | Lident(_)
+ | Uident(_)
+ | List
+ | Exception
+ | Lazy
+ | Percent => true
+ | _ => false
+ }
+
+ let isAtomicExprStart = x =>
+ switch x {
+ | Token.True
+ | False
+ | Int(_)
+ | String(_)
+ | Float(_)
+ | Character(_)
+ | Backtick
+ | Uident(_)
+ | Lident(_)
+ | Hash
+ | Lparen
+ | List
+ | Lbracket
+ | Lbrace
+ | LessThan
+ | Module
+ | Percent => true
+ | _ => false
+ }
+
+ let isAtomicTypExprStart = x =>
+ switch x {
+ | Token.SingleQuote
+ | Underscore
+ | Lparen
+ | Lbrace
+ | Uident(_)
+ | Lident(_)
+ | List
+ | Percent => true
+ | _ => false
+ }
+
+ let isExprStart = x =>
+ switch x {
+ | Token.True
+ | False
+ | Int(_)
+ | String(_)
+ | Float(_)
+ | Character(_)
+ | Backtick
+ | Underscore
+ | Uident(_)
+ | Lident(_)
+ | Hash
+ | Lparen
+ | List
+ | Module
+ | Lbracket
+ | Lbrace
+ | LessThan
+ | Minus
+ | MinusDot
+ | Plus
+ | PlusDot
+ | Bang
+ | Percent
+ | At
+ | If
+ | Switch
+ | While
+ | For
+ | Assert
+ | Lazy
+ | Try => true
+ | _ => false
+ }
+
+ let isJsxAttributeStart = x =>
+ switch x {
+ | Token.Lident(_) | Question => true
+ | _ => false
+ }
+
+ let isStructureItemStart = x =>
+ switch x {
+ | Token.Open
+ | Let
+ | Typ
+ | External
+ | Import
+ | Export
+ | Exception
+ | Include
+ | Module
+ | AtAt
+ | PercentPercent
+ | At => true
+ | t when isExprStart(t) => true
+ | _ => false
+ }
+
+ let isPatternStart = x =>
+ switch x {
+ | Token.Int(_)
+ | Float(_)
+ | String(_)
+ | Character(_)
+ | True
+ | False
+ | Minus
+ | Plus
+ | Lparen
+ | Lbracket
+ | Lbrace
+ | List
+ | Underscore
+ | Lident(_)
+ | Uident(_)
+ | Hash
+ | HashHash
+ | Exception
+ | Lazy
+ | Percent
+ | Module
+ | At => true
+ | _ => false
+ }
+
+ let isParameterStart = x =>
+ switch x {
+ | Token.Typ | Tilde | Dot => true
+ | token when isPatternStart(token) => true
+ | _ => false
+ }
+
+ /* TODO: overparse Uident ? */
+ let isStringFieldDeclStart = x =>
+ switch x {
+ | Token.String(_) | At => true
+ | _ => false
+ }
+
+ /* TODO: overparse Uident ? */
+ let isFieldDeclStart = x =>
+ switch x {
+ | Token.At | Mutable | Lident(_) | List => true
+ /* recovery, TODO: this is not ideal… */
+ | Uident(_) => true
+ | t when Token.isKeyword(t) => true
+ | _ => false
+ }
+
+ let isRecordDeclStart = x =>
+ switch x {
+ | Token.At | Mutable | Lident(_) | List => true
+ | _ => false
+ }
+
+ let isTypExprStart = x =>
+ switch x {
+ | Token.At
+ | SingleQuote
+ | Underscore
+ | Lparen
+ | Lbracket
+ | Uident(_)
+ | Lident(_)
+ | List
+ | Module
+ | Percent
+ | Lbrace => true
+ | _ => false
+ }
+
+ let isTypeParameterStart = x =>
+ switch x {
+ | Token.Tilde | Dot => true
+ | token when isTypExprStart(token) => true
+ | _ => false
+ }
+
+ let isTypeParamStart = x =>
+ switch x {
+ | Token.Plus | Minus | SingleQuote | Underscore => true
+ | _ => false
+ }
+
+ let isFunctorArgStart = x =>
+ switch x {
+ | Token.At | Uident(_) | Underscore | Percent | Lbrace | Lparen => true
+ | _ => false
+ }
+
+ let isModExprStart = x =>
+ switch x {
+ | Token.At | Percent | Uident(_) | Lbrace | Lparen => true
+ | _ => false
+ }
+
+ let isRecordRowStart = x =>
+ switch x {
+ | Token.DotDotDot => true
+ | Token.Uident(_) | Lident(_) | List => true
+ /* TODO */
+ | t when Token.isKeyword(t) => true
+ | _ => false
+ }
+
+ let isRecordRowStringKeyStart = x =>
+ switch x {
+ | Token.String(_) => true
+ | _ => false
+ }
+
+ let isArgumentStart = x =>
+ switch x {
+ | Token.Tilde | Dot | Underscore => true
+ | t when isExprStart(t) => true
+ | _ => false
+ }
+
+ let isPatternMatchStart = x =>
+ switch x {
+ | Token.Bar => true
+ | t when isPatternStart(t) => true
+ | _ => false
+ }
+
+ let isPatternOcamlListStart = x =>
+ switch x {
+ | Token.DotDotDot => true
+ | t when isPatternStart(t) => true
+ | _ => false
+ }
+
+ let isPatternRecordItemStart = x =>
+ switch x {
+ | Token.DotDotDot | Uident(_) | Lident(_) | List | Underscore => true
+ | _ => false
+ }
+
+ let isAttributeStart = x =>
+ switch x {
+ | Token.At => true
+ | _ => false
+ }
+
+ let isJsFfiImportStart = x =>
+ switch x {
+ | Token.Lident(_) | At => true
+ | _ => false
+ }
+
+ let isJsxChildStart = isAtomicExprStart
+
+ let isBlockExprStart = x =>
+ switch x {
+ | Token.At
+ | Hash
+ | Percent
+ | Minus
+ | MinusDot
+ | Plus
+ | PlusDot
+ | Bang
+ | True
+ | False
+ | Int(_)
+ | String(_)
+ | Character(_)
+ | Lident(_)
+ | Uident(_)
+ | Lparen
+ | List
+ | Lbracket
+ | Lbrace
+ | Forwardslash
+ | Assert
+ | Lazy
+ | If
+ | For
+ | While
+ | Switch
+ | Open
+ | Module
+ | Exception
+ | Let
+ | LessThan
+ | Backtick
+ | Try
+ | Underscore => true
+ | _ => false
+ }
+
+ let isListElement = (grammar, token) =>
+ switch grammar {
+ | ExprList => token == Token.DotDotDot || isExprStart(token)
+ | ListExpr => token == DotDotDot || isExprStart(token)
+ | PatternList => token == DotDotDot || isPatternStart(token)
+ | ParameterList => isParameterStart(token)
+ | StringFieldDeclarations => isStringFieldDeclStart(token)
+ | FieldDeclarations => isFieldDeclStart(token)
+ | RecordDecl => isRecordDeclStart(token)
+ | TypExprList => isTypExprStart(token) || token == Token.LessThan
+ | TypeParams => isTypeParamStart(token)
+ | FunctorArgs => isFunctorArgStart(token)
+ | ModExprList => isModExprStart(token)
+ | TypeParameters => isTypeParameterStart(token)
+ | RecordRows => isRecordRowStart(token)
+ | RecordRowsStringKey => isRecordRowStringKeyStart(token)
+ | ArgumentList => isArgumentStart(token)
+ | Signature | Specification => isSignatureItemStart(token)
+ | Structure | Implementation => isStructureItemStart(token)
+ | PatternMatching => isPatternMatchStart(token)
+ | PatternOcamlList => isPatternOcamlListStart(token)
+ | PatternRecord => isPatternRecordItemStart(token)
+ | Attribute => isAttributeStart(token)
+ | TypeConstraint => token == Constraint
+ | PackageConstraint => token == And
+ | ConstructorDeclaration => token == Bar
+ | Primitive =>
+ switch token {
+ | Token.String(_) => true
+ | _ => false
+ }
+ | JsxAttribute => isJsxAttributeStart(token)
+ | JsFfiImport => isJsFfiImportStart(token)
+ | _ => false
+ }
+
+ let isListTerminator = (grammar, token) =>
+ switch (grammar, token) {
+ | (_, Token.Eof)
+ | (ExprList, Rparen | Forwardslash | Rbracket)
+ | (ListExpr, Rparen)
+ | (ArgumentList, Rparen)
+ | (TypExprList, Rparen | Forwardslash | GreaterThan | Equal)
+ | (ModExprList, Rparen)
+ | (
+ PatternList | PatternOcamlList | PatternRecord,
+ Forwardslash | Rbracket | Rparen | EqualGreater | In | Equal /* let {x} = foo */,
+ )
+ | (ExprBlock, Rbrace)
+ | (Structure | Signature, Rbrace)
+ | (TypeParams, Rparen)
+ | (ParameterList, EqualGreater | Lbrace)
+ | (JsxAttribute, Forwardslash | GreaterThan)
+ | (JsFfiImport, Rbrace)
+ | (StringFieldDeclarations, Rbrace) => true
+
+ | (Attribute, token) when token != At => true
+ | (TypeConstraint, token) when token != Constraint => true
+ | (PackageConstraint, token) when token != And => true
+ | (ConstructorDeclaration, token) when token != Bar => true
+ | (Primitive, Semicolon) => true
+ | (Primitive, token) when isStructureItemStart(token) => true
+
+ | _ => false
+ }
+
+ let isPartOfList = (grammar, token) =>
+ isListElement(grammar, token) || isListTerminator(grammar, token)
+}
+
+module Reporting = {
+ module TerminalDoc = {
+ type break =
+ | Never
+ | Always
+
+ type rec document =
+ | Nil
+ | Group({break: break, doc: document})
+ | Text(string)
+ | Indent({amount: int, doc: document})
+ | Append({doc1: document, doc2: document})
+
+ let group = (~break, doc) => Group({break: break, doc: doc})
+ let text = txt => Text(txt)
+ let indent = (i, d) => Indent({amount: i, doc: d})
+ let append = (d1, d2) => Append({doc1: d1, doc2: d2})
+ let nil = Nil
+
+ type rec stack =
+ | Empty
+ | Cons({doc: document, stack: stack})
+
+ let push = (stack, doc) => Cons({doc: doc, stack: stack})
+
+ type mode =
+ | Flat
+ | Break
+
+ let toString /* ~width */ = (doc: document) => {
+ let buffer = Buffer.create(100)
+ let rec loop = (stack, mode, offset) =>
+ switch stack {
+ | Empty => ()
+ | Cons({doc, stack: rest}) =>
+ switch doc {
+ | Nil => loop(rest, mode, offset)
+ | Text(txt) =>
+ Buffer.add_string(buffer, txt)
+ loop(rest, mode, offset + String.length(txt))
+ | Indent({amount: i, doc}) =>
+ let indentation = (@doesNotRaise String.make)(i, ' ')
+ Buffer.add_string(buffer, indentation)
+ loop(push(rest, doc), mode, offset + i)
+ | Append({doc1, doc2}) =>
+ let rest = push(rest, doc2)
+ let rest = push(rest, mode == Flat ? Nil : text("\n"))
+
+ let rest = push(rest, doc1)
+ loop(rest, mode, offset)
+ | Group({break, doc}) =>
+ let rest = push(rest, doc)
+ switch break {
+ | Always => loop(rest, Break, offset)
+ | Never => loop(rest, Flat, offset)
+ }
+ }
+ }
+
+ loop(push(Empty, doc), Flat, 0)
+ Buffer.contents(buffer)
+ }
+ }
+
+ type color =
+ | @live NoColor
+ | @live Red
+
+ type style = {
+ @live underline: bool,
+ @live color: color,
+ }
+
+ let highlight = (~from, ~len, txt) =>
+ if from < 0 || (String.length(txt) === 0 || from >= String.length(txt)) {
+ txt
+ } else {
+ let before = try String.sub(txt, 0, from) catch {
+ | Invalid_argument(_) => ""
+ }
+ let content =
+ "\027[31m" ++
+ (try String.sub(txt, from, len) catch {
+ | Invalid_argument(_) => ""
+ } ++
+ "\027[0m")
+
+ let after = try String.sub(txt, from + len, String.length(txt) - (from + len)) catch {
+ | Invalid_argument(_) => ""
+ }
+ before ++ (content ++ after)
+ }
+
+ let underline = (~from, ~len, txt) => {
+ open TerminalDoc
+ let indent = (@doesNotRaise String.make)(from, ' ')
+ let underline = (@doesNotRaise String.make)(len, '^')
+ let line = highlight(~from=0, ~len, underline)
+ group(~break=Always, append(text(txt), text(indent ++ line)))
+ }
+
+ let rec drop = (n, l) =>
+ if n === 1 {
+ l
+ } else {
+ drop(
+ n - 1,
+ switch l {
+ | list{_x, ...xs} => xs
+ | _ => l
+ },
+ )
+ }
+
+ let rec take = (n, l) =>
+ switch l {
+ | _ when n === 0 => list{}
+ | list{} => list{}
+ | list{x, ...xs} => list{x, ...take(n - 1, xs)}
+ }
+
+ /* TODO: cleanup */
+ let renderCodeContext = (~missing, src: string, startPos, endPos) => {
+ open Lexing
+ let startCol = startPos.pos_cnum - startPos.pos_bol
+ let endCol = endPos.pos_cnum - startPos.pos_cnum + startCol
+ let startLine = max(1, startPos.pos_lnum - 2) /* 2 lines before */
+ let lines = String.split_on_char('\n', src)
+ let endLine = {
+ let len = List.length(lines)
+ min(len, startPos.pos_lnum + 3)
+ } /* 2 lines after */
+
+ let lines = lines |> drop(startLine) |> take(endLine - startLine) |> Array.of_list
+
+ let renderLine = (x, ix) => {
+ let x = if ix == startPos.pos_lnum {
+ switch missing {
+ | Some(_len) => x ++ @doesNotRaise String.make(10, ' ')
+ | None => x
+ }
+ } else {
+ x
+ }
+
+ open TerminalDoc
+ let rowNr = {
+ let txt = string_of_int(ix)
+ let len = String.length(txt)
+ if ix == startPos.pos_lnum {
+ highlight(~from=0, ~len, txt)
+ } else {
+ txt
+ }
+ }
+
+ let len = {
+ let len = if endCol >= 0 {
+ endCol - startCol
+ } else {
+ 1
+ }
+
+ if startCol + len > String.length(x) {
+ String.length(x) - startCol - 1
+ } else {
+ len
+ }
+ }
+
+ let line = if ix == startPos.pos_lnum {
+ switch missing {
+ | Some(len) =>
+ underline(
+ ~from=startCol + String.length(String.length(string_of_int(ix)) |> string_of_int) + 5,
+ ~len,
+ x,
+ )
+ | None =>
+ let len = if startCol + len > String.length(x) {
+ String.length(x) - startCol
+ } else {
+ len
+ }
+
+ text(highlight(~from=startCol, ~len, x))
+ }
+ } else {
+ text(x)
+ }
+
+ group(~break=Never, append(append(text(rowNr), text(" │")), indent(2, line)))
+ }
+
+ let reportDoc = ref(TerminalDoc.nil)
+
+ let linesLen = Array.length(lines)
+ for i in 0 to linesLen - 1 {
+ let line = try lines[i] catch {
+ | Invalid_argument(_) => ""
+ }
+ reportDoc := {
+ open TerminalDoc
+ let ix = startLine + i
+ group(~break=Always, append(reportDoc.contents, renderLine(line, ix)))
+ }
+ }
+
+ TerminalDoc.toString(reportDoc.contents)
+ }
+
+ type problem =
+ | @live Unexpected(Token.t)
+ | @live Expected({token: Token.t, pos: Lexing.position, context: option})
+ | @live Message(string)
+ | @live Uident
+ | @live Lident
+ | @live Unbalanced(Token.t)
+
+ type parseError = (Lexing.position, problem)
+}
+
+module Diagnostics: {
+ type t
+ type category
+ type report
+
+ type reportStyle
+ let parseReportStyle: string => reportStyle
+
+ let unexpected: (Token.t, list<(Grammar.t, Lexing.position)>) => category
+ let expected: (~grammar: Grammar.t=?, Lexing.position, Token.t) => category
+ let uident: Token.t => category
+ let lident: Token.t => category
+ let unclosedString: category
+ let unclosedTemplate: category
+ let unclosedComment: category
+ let unknownUchar: int => category
+ let message: string => category
+
+ let make: (~filename: string, ~startPos: Lexing.position, ~endPos: Lexing.position, category) => t
+
+ let stringOfReport: (~style: reportStyle, list, string) => string
+} = {
+ type category =
+ | Unexpected({token: Token.t, context: list<(Grammar.t, Lexing.position)>})
+ | Expected({
+ context: option,
+ pos: Lexing.position /* prev token end */,
+ token: Token.t,
+ })
+ | Message(string)
+ | Uident(Token.t)
+ | Lident(Token.t)
+ | UnclosedString
+ | UnclosedTemplate
+ | UnclosedComment
+ | UnknownUchar(int)
+
+ type t = {
+ filename: string,
+ startPos: Lexing.position,
+ endPos: Lexing.position,
+ category: category,
+ }
+
+ type report = list
+
+ /* TODO: add json here */
+ type reportStyle =
+ | Pretty
+ | Plain
+
+ let parseReportStyle = txt =>
+ switch String.lowercase_ascii(txt) {
+ | "plain" => Plain
+ | _ => Pretty
+ }
+
+ let defaultUnexpected = token =>
+ "I'm not sure what to parse here when looking at \"" ++ (Token.toString(token) ++ "\".")
+
+ let explain = t =>
+ switch t.category {
+ | Uident(currentToken) =>
+ switch currentToken {
+ | Lident(lident) =>
+ let guess = String.capitalize_ascii(lident)
+ "Did you mean `" ++ (guess ++ ("` instead of `" ++ (lident ++ "`?")))
+ | t when Token.isKeyword(t) =>
+ let token = Token.toString(t)
+ "`" ++ (token ++ "` is a reserved keyword.")
+ | _ => "At this point, I'm looking for an uppercased identifier like `Belt` or `Array`"
+ }
+ | Lident(currentToken) =>
+ switch currentToken {
+ | Uident(uident) =>
+ let guess = String.uncapitalize_ascii(uident)
+ "Did you mean `" ++ (guess ++ ("` instead of `" ++ (uident ++ "`?")))
+ | t when Token.isKeyword(t) =>
+ let token = Token.toString(t)
+ "`" ++
+ (token ++
+ ("` is a reserved keyword. Keywords need to be escaped: \\\"" ++ (token ++ "\"")))
+ | Underscore => "`_` isn't a valid name."
+ | _ => "I'm expecting an lowercased identifier like `name` or `age`"
+ }
+ | Message(txt) => txt
+ | UnclosedString => "This string is missing a double quote at the end"
+ | UnclosedTemplate => "Did you forget to close this template expression with a backtick?"
+ | UnclosedComment => "This comment seems to be missing a closing `*/`"
+ | UnknownUchar(uchar) =>
+ switch uchar {
+ | 94 /* ^ */ => "Hmm, not sure what I should do here with this character.\nIf you're trying to deref an expression, use `foo.contents` instead."
+ | _ => "Hmm, I have no idea what this character means…"
+ }
+ | Expected({context, token: t}) =>
+ let hint = switch context {
+ | Some(grammar) => "It signals the start of " ++ Grammar.toString(grammar)
+ | None => ""
+ }
+
+ "Did you forget a `" ++ (Token.toString(t) ++ ("` here? " ++ hint))
+ | Unexpected({token: t, context: breadcrumbs}) =>
+ let name = Token.toString(t)
+ switch breadcrumbs {
+ | list{(AtomicTypExpr, _), ...breadcrumbs} =>
+ switch (breadcrumbs, t) {
+ | (
+ list{(StringFieldDeclarations | FieldDeclarations, _), ..._},
+ String(_) | At | Rbrace | Comma | Eof,
+ ) => "I'm missing a type here"
+ | (_, t) when Grammar.isStructureItemStart(t) || t == Eof => "Missing a type here"
+ | _ => defaultUnexpected(t)
+ }
+ | list{(ExprOperand, _), ...breadcrumbs} =>
+ switch (breadcrumbs, t) {
+ | (list{(ExprBlock, _), ..._}, Rbrace) => "It seems that this expression block is empty"
+ | (list{(ExprBlock, _), ..._}, Bar) => /* Pattern matching */
+ "Looks like there might be an expression missing here"
+ | (
+ list{(ExprSetField, _), ..._},
+ _,
+ ) => "It seems that this record field mutation misses an expression"
+ | (
+ list{(ExprArrayMutation, _), ..._},
+ _,
+ ) => "Seems that an expression is missing, with what do I mutate the array?"
+ | (
+ list{(ExprBinaryAfterOp(_) | ExprUnary, _), ..._},
+ _,
+ ) => "Did you forget to write an expression here?"
+ | (list{(Grammar.LetBinding, _), ..._}, _) => "This let-binding misses an expression"
+ | (list{_, ..._}, Rbracket | Rbrace) => "Missing expression"
+ | _ => "I'm not sure what to parse here when looking at \"" ++ (name ++ "\".")
+ }
+ | list{(TypeParam, _), ..._} =>
+ switch t {
+ | Lident(ident) => "Did you mean '" ++ (ident ++ "? A Type parameter starts with a quote.")
+ | _ => "I'm not sure what to parse here when looking at \"" ++ (name ++ "\".")
+ }
+ | _ =>
+ /* TODO: match on circumstance to verify Lident needed ? */
+ if Token.isKeyword(t) {
+ "`" ++
+ (name ++
+ ("` is a reserved keyword. Keywords need to be escaped: \\\"" ++
+ (Token.toString(t) ++
+ "\"")))
+ } else {
+ "I'm not sure what to parse here when looking at \"" ++ (name ++ "\".")
+ }
+ }
+ }
+
+ let toPlainString = (t, buffer) => {
+ Buffer.add_string(buffer, t.filename)
+ Buffer.add_char(buffer, '(')
+ Buffer.add_string(buffer, string_of_int(t.startPos.pos_cnum))
+ Buffer.add_char(buffer, ',')
+ Buffer.add_string(buffer, string_of_int(t.endPos.pos_cnum))
+ Buffer.add_char(buffer, ')')
+ Buffer.add_char(buffer, ':')
+ Buffer.add_string(buffer, explain(t))
+ }
+
+ let toString = (t, src) => {
+ open Lexing
+ let startchar = t.startPos.pos_cnum - t.startPos.pos_bol
+ let endchar = t.endPos.pos_cnum - t.startPos.pos_cnum + startchar
+ let locationInfo = Printf.sprintf /* ReasonLanguageServer requires the following format */(
+ "File \"%s\", line %d, characters %d-%d:",
+ t.filename,
+ t.startPos.pos_lnum,
+ startchar,
+ endchar,
+ )
+
+ let code = {
+ let missing = switch t.category {
+ | Expected({token: t}) => Some(String.length(Token.toString(t)))
+ | _ => None
+ }
+
+ Reporting.renderCodeContext(~missing, src, t.startPos, t.endPos)
+ }
+
+ let explanation = explain(t)
+ Printf.sprintf("%s\n\n%s\n\n%s\n\n", locationInfo, code, explanation)
+ }
+
+ let make = (~filename, ~startPos, ~endPos, category) => {
+ filename: filename,
+ startPos: startPos,
+ endPos: endPos,
+ category: category,
+ }
+
+ let stringOfReport = (~style, diagnostics, src) =>
+ switch style {
+ | Pretty =>
+ List.fold_left(
+ (report, diagnostic) => report ++ (toString(diagnostic, src) ++ "\n"),
+ "\n",
+ List.rev(diagnostics),
+ )
+ | Plain =>
+ let buffer = Buffer.create(100)
+ List.iter(diagnostic => {
+ toPlainString(diagnostic, buffer)
+ Buffer.add_char(buffer, '\n')
+ }, diagnostics)
+ Buffer.contents(buffer)
+ }
+
+ let unexpected = (token, context) => Unexpected({token: token, context: context})
+
+ let expected = (~grammar=?, pos, token) => Expected({context: grammar, pos: pos, token: token})
+
+ let uident = currentToken => Uident(currentToken)
+ let lident = currentToken => Lident(currentToken)
+ let unclosedString = UnclosedString
+ let unclosedComment = UnclosedComment
+ let unclosedTemplate = UnclosedTemplate
+ let unknownUchar = code => UnknownUchar(code)
+ let message = txt => Message(txt)
+}
+
+/* Collection of utilities to view the ast in a more a convenient form,
+ * allowing for easier processing.
+ * Example: given a ptyp_arrow type, what are its arguments and what is the
+ * returnType? */
+module ParsetreeViewer: {
+ /* Restructures a nested tree of arrow types into its args & returnType
+ * The parsetree contains: a => b => c => d, for printing purposes
+ * we restructure the tree into (a, b, c) and its returnType d */
+ let arrowType: Parsetree.core_type => (
+ Parsetree.attributes,
+ list<(Parsetree.attributes, Asttypes.arg_label, Parsetree.core_type)>,
+ Parsetree.core_type,
+ )
+
+ let functorType: Parsetree.module_type => (
+ list<(Parsetree.attributes, Asttypes.loc, option)>,
+ Parsetree.module_type,
+ )
+
+ /* filters @bs out of the provided attributes */
+ let processUncurriedAttribute: Parsetree.attributes => (bool, Parsetree.attributes)
+
+ /* if ... else if ... else ... is represented as nested expressions: if ... else { if ... }
+ * The purpose of this function is to flatten nested ifs into one sequence.
+ * Basically compute: ([if, else if, else if, else if], else) */
+ let collectIfExpressions: Parsetree.expression => (
+ list<(Parsetree.expression, Parsetree.expression)>,
+ option,
+ )
+
+ let collectListExpressions: Parsetree.expression => (
+ list,
+ option,
+ )
+
+ type funParamKind =
+ | Parameter({
+ attrs: Parsetree.attributes,
+ lbl: Asttypes.arg_label,
+ defaultExpr: option,
+ pat: Parsetree.pattern,
+ })
+ | NewTypes({attrs: Parsetree.attributes, locs: list>})
+
+ let funExpr: Parsetree.expression => (
+ Parsetree.attributes,
+ list,
+ Parsetree.expression,
+ )
+
+ /* example:
+ * `makeCoordinate({
+ * x: 1,
+ * y: 2,
+ * })`
+ * Notice howe `({` and `})` "hug" or stick to each other */
+ let isHuggableExpression: Parsetree.expression => bool
+
+ let isHuggablePattern: Parsetree.pattern => bool
+
+ let isHuggableRhs: Parsetree.expression => bool
+
+ let operatorPrecedence: string => int
+
+ let isUnaryExpression: Parsetree.expression => bool
+ let isBinaryOperator: string => bool
+ let isBinaryExpression: Parsetree.expression => bool
+
+ let flattenableOperators: (string, string) => bool
+
+ let hasAttributes: Parsetree.attributes => bool
+
+ let isArrayAccess: Parsetree.expression => bool
+ let isTernaryExpr: Parsetree.expression => bool
+
+ let collectTernaryParts: Parsetree.expression => (
+ list<(Parsetree.expression, Parsetree.expression)>,
+ Parsetree.expression,
+ )
+
+ let parametersShouldHug: list => bool
+
+ let filterTernaryAttributes: Parsetree.attributes => Parsetree.attributes
+
+ let isJsxExpression: Parsetree.expression => bool
+ let hasJsxAttribute: Parsetree.attributes => bool
+
+ let shouldIndentBinaryExpr: Parsetree.expression => bool
+ let shouldInlineRhsBinaryExpr: Parsetree.expression => bool
+ let filterPrinteableAttributes: Parsetree.attributes => Parsetree.attributes
+ let partitionPrinteableAttributes: Parsetree.attributes => (
+ Parsetree.attributes,
+ Parsetree.attributes,
+ )
+
+ let requiresSpecialCallbackPrintingLastArg: list<(
+ Asttypes.arg_label,
+ Parsetree.expression,
+ )> => bool
+ let requiresSpecialCallbackPrintingFirstArg: list<(
+ Asttypes.arg_label,
+ Parsetree.expression,
+ )> => bool
+
+ let modExprApply: Parsetree.module_expr => (list, Parsetree.module_expr)
+
+ let modExprFunctor: Parsetree.module_expr => (
+ list<(Parsetree.attributes, Asttypes.loc, option)>,
+ Parsetree.module_expr,
+ )
+
+ let splitGenTypeAttr: Parsetree.attributes => (bool, Parsetree.attributes)
+
+ let collectPatternsFromListConstruct: (
+ list,
+ Parsetree.pattern,
+ ) => (list, Parsetree.pattern)
+
+ let isBlockExpr: Parsetree.expression => bool
+
+ let isTemplateLiteral: Parsetree.expression => bool
+
+ let collectOrPatternChain: Parsetree.pattern => list
+
+ let processBracesAttr: Parsetree.expression => (option, Parsetree.expression)
+
+ let filterParsingAttrs: Parsetree.attributes => Parsetree.attributes
+
+ let isBracedExpr: Parsetree.expression => bool
+
+ let isPipeExpr: Parsetree.expression => bool
+
+ let extractValueDescriptionFromModExpr: Parsetree.module_expr => list
+
+ type jsImportScope =
+ | JsGlobalImport /* nothing */
+ | JsModuleImport(string) /* from "path" */
+ | JsScopedImport(list) /* window.location */
+
+ let classifyJsImport: Parsetree.value_description => jsImportScope
+
+ /* (__x) => f(a, __x, c) -----> f(a, _, c) */
+ let rewriteUnderscoreApply: Parsetree.expression => Parsetree.expression
+
+ /* (__x) => f(a, __x, c) -----> f(a, _, c) */
+ let isUnderscoreApplySugar: Parsetree.expression => bool
+} = {
+ open Parsetree
+
+ let arrowType = ct => {
+ let rec process = (attrsBefore, acc, typ) =>
+ switch typ {
+ | {ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), ptyp_attributes: list{}} =>
+ let arg = (list{}, lbl, typ1)
+ process(attrsBefore, list{arg, ...acc}, typ2)
+ | {
+ ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2),
+ ptyp_attributes: list{({txt: "bs"}, _)} as attrs,
+ } =>
+ let arg = (attrs, lbl, typ1)
+ process(attrsBefore, list{arg, ...acc}, typ2)
+ | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: _attrs} as returnType =>
+ let args = List.rev(acc)
+ (attrsBefore, args, returnType)
+ | {
+ ptyp_desc: Ptyp_arrow((Labelled(_) | Optional(_)) as lbl, typ1, typ2),
+ ptyp_attributes: attrs,
+ } =>
+ let arg = (attrs, lbl, typ1)
+ process(attrsBefore, list{arg, ...acc}, typ2)
+ | typ => (attrsBefore, List.rev(acc), typ)
+ }
+
+ switch ct {
+ | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: attrs} as typ =>
+ process(attrs, list{}, {...typ, ptyp_attributes: list{}})
+ | typ => process(list{}, list{}, typ)
+ }
+ }
+
+ let functorType = modtype => {
+ let rec process = (acc, modtype) =>
+ switch modtype {
+ | {pmty_desc: Pmty_functor(lbl, argType, returnType), pmty_attributes: attrs} =>
+ let arg = (attrs, lbl, argType)
+ process(list{arg, ...acc}, returnType)
+ | modType => (List.rev(acc), modType)
+ }
+
+ process(list{}, modtype)
+ }
+
+ let processUncurriedAttribute = attrs => {
+ let rec process = (uncurriedSpotted, acc, attrs) =>
+ switch attrs {
+ | list{} => (uncurriedSpotted, List.rev(acc))
+ | list{({Location.txt: "bs"}, _), ...rest} => process(true, acc, rest)
+ | list{attr, ...rest} => process(uncurriedSpotted, list{attr, ...acc}, rest)
+ }
+
+ process(false, list{}, attrs)
+ }
+
+ let collectIfExpressions = expr => {
+ let rec collect = (acc, expr) =>
+ switch expr.pexp_desc {
+ | Pexp_ifthenelse(ifExpr, thenExpr, Some(elseExpr)) =>
+ collect(list{(ifExpr, thenExpr), ...acc}, elseExpr)
+ | Pexp_ifthenelse(ifExpr, thenExpr, None as elseExpr) =>
+ let ifs = List.rev(list{(ifExpr, thenExpr), ...acc})
+ (ifs, elseExpr)
+ | _ => (List.rev(acc), Some(expr))
+ }
+
+ collect(list{}, expr)
+ }
+
+ let collectListExpressions = expr => {
+ let rec collect = (acc, expr) =>
+ switch expr.pexp_desc {
+ | Pexp_construct({txt: Longident.Lident("[]")}, _) => (List.rev(acc), None)
+ | Pexp_construct(
+ {txt: Longident.Lident("::")},
+ Some({pexp_desc: Pexp_tuple(list{hd, tail})}),
+ ) =>
+ collect(list{hd, ...acc}, tail)
+ | _ => (List.rev(acc), Some(expr))
+ }
+
+ collect(list{}, expr)
+ }
+
+ /* (__x) => f(a, __x, c) -----> f(a, _, c) */
+ let rewriteUnderscoreApply = expr =>
+ switch expr.pexp_desc {
+ | Pexp_fun(
+ Nolabel,
+ None,
+ {ppat_desc: Ppat_var({txt: "__x"})},
+ {pexp_desc: Pexp_apply(callExpr, args)} as e,
+ ) =>
+ let newArgs = List.map(arg =>
+ switch arg {
+ | (lbl, {pexp_desc: Pexp_ident({txt: Longident.Lident("__x")} as lid)} as argExpr) => (
+ lbl,
+ {...argExpr, pexp_desc: Pexp_ident({...lid, txt: Longident.Lident("_")})},
+ )
+ | arg => arg
+ }
+ , args)
+ {...e, pexp_desc: Pexp_apply(callExpr, newArgs)}
+ | _ => expr
+ }
+
+ type funParamKind =
+ | Parameter({
+ attrs: Parsetree.attributes,
+ lbl: Asttypes.arg_label,
+ defaultExpr: option,
+ pat: Parsetree.pattern,
+ })
+ | NewTypes({attrs: Parsetree.attributes, locs: list>})
+
+ let funExpr = expr => {
+ /* Turns (type t, type u, type z) into "type t u z" */
+ let rec collectNewTypes = (acc, returnExpr) =>
+ switch returnExpr {
+ | {pexp_desc: Pexp_newtype(stringLoc, returnExpr), pexp_attributes: list{}} =>
+ collectNewTypes(list{stringLoc, ...acc}, returnExpr)
+ | returnExpr => (List.rev(acc), returnExpr)
+ }
+
+ let rec collect = (attrsBefore, acc, expr) =>
+ switch expr {
+ | {
+ pexp_desc:
+ Pexp_fun(
+ Nolabel,
+ None,
+ {ppat_desc: Ppat_var({txt: "__x"})},
+ {pexp_desc: Pexp_apply(_)},
+ ),
+ } => (attrsBefore, List.rev(acc), rewriteUnderscoreApply(expr))
+ | {pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), pexp_attributes: list{}} =>
+ let parameter = Parameter({
+ attrs: list{},
+ lbl: lbl,
+ defaultExpr: defaultExpr,
+ pat: pattern,
+ })
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | {pexp_desc: Pexp_newtype(stringLoc, rest), pexp_attributes: attrs} =>
+ let (stringLocs, returnExpr) = collectNewTypes(list{stringLoc}, rest)
+ let param = NewTypes({attrs: attrs, locs: stringLocs})
+ collect(attrsBefore, list{param, ...acc}, returnExpr)
+ | {
+ pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr),
+ pexp_attributes: list{({txt: "bs"}, _)} as attrs,
+ } =>
+ let parameter = Parameter({
+ attrs: attrs,
+ lbl: lbl,
+ defaultExpr: defaultExpr,
+ pat: pattern,
+ })
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | {
+ pexp_desc: Pexp_fun((Labelled(_) | Optional(_)) as lbl, defaultExpr, pattern, returnExpr),
+ pexp_attributes: attrs,
+ } =>
+ let parameter = Parameter({
+ attrs: attrs,
+ lbl: lbl,
+ defaultExpr: defaultExpr,
+ pat: pattern,
+ })
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | expr => (attrsBefore, List.rev(acc), expr)
+ }
+
+ switch expr {
+ | {
+ pexp_desc: Pexp_fun(Nolabel, _defaultExpr, _pattern, _returnExpr),
+ pexp_attributes: attrs,
+ } as expr =>
+ collect(attrs, list{}, {...expr, pexp_attributes: list{}})
+ | expr => collect(list{}, list{}, expr)
+ }
+ }
+
+ let processBracesAttr = expr =>
+ switch expr.pexp_attributes {
+ | list{({txt: "ns.braces"}, _) as attr, ...attrs} => (
+ Some(attr),
+ {...expr, pexp_attributes: attrs},
+ )
+ | _ => (None, expr)
+ }
+
+ let filterParsingAttrs = attrs => List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "ns.ternary" | "ns.braces" | "bs" | "ns.namedArgLoc"}, _) => false
+ | _ => true
+ }
+ , attrs)
+
+ let isBlockExpr = expr =>
+ switch expr.pexp_desc {
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_let(_)
+ | Pexp_open(_)
+ | Pexp_sequence(_) => true
+ | _ => false
+ }
+
+ let isBracedExpr = expr =>
+ switch processBracesAttr(expr) {
+ | (Some(_), _) => true
+ | _ => false
+ }
+
+ let isHuggableExpression = expr =>
+ switch expr.pexp_desc {
+ | Pexp_array(_)
+ | Pexp_tuple(_)
+ | Pexp_construct({txt: Longident.Lident("::" | "[]")}, _)
+ | Pexp_extension({txt: "bs.obj"}, _)
+ | Pexp_record(_) => true
+ | _ when isBlockExpr(expr) => true
+ | _ when isBracedExpr(expr) => true
+ | _ => false
+ }
+
+ let isHuggableRhs = expr =>
+ switch expr.pexp_desc {
+ | Pexp_array(_)
+ | Pexp_tuple(_)
+ | Pexp_construct({txt: Longident.Lident("::" | "[]")}, _)
+ | Pexp_extension({txt: "bs.obj"}, _)
+ | Pexp_record(_) => true
+ | _ when isBracedExpr(expr) => true
+ | _ => false
+ }
+
+ let isHuggablePattern = pattern =>
+ switch pattern.ppat_desc {
+ | Ppat_array(_) | Ppat_tuple(_) | Ppat_record(_) | Ppat_construct(_) => true
+ | _ => false
+ }
+
+ let operatorPrecedence = operator =>
+ switch operator {
+ | ":=" => 1
+ | "||" => 2
+ | "&&" => 3
+ | "=" | "==" | "<" | ">" | "!=" | "<>" | "!==" | "<=" | ">=" | "|>" => 4
+ | "+" | "+." | "-" | "-." | "^" => 5
+ | "*" | "*." | "/" | "/." => 6
+ | "**" => 7
+ | "#" | "##" | "|." => 8
+ | _ => 0
+ }
+
+ let isUnaryOperator = operator =>
+ switch operator {
+ | "~+" | "~+." | "~-" | "~-." | "not" => true
+ | _ => false
+ }
+
+ let isUnaryExpression = expr =>
+ switch expr.pexp_desc {
+ | Pexp_apply({pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})}, list{(Nolabel, _arg)})
+ when isUnaryOperator(operator) => true
+ | _ => false
+ }
+
+ let isBinaryOperator = operator =>
+ switch operator {
+ | ":="
+ | "||"
+ | "&&"
+ | "="
+ | "=="
+ | "<"
+ | ">"
+ | "!="
+ | "!=="
+ | "<="
+ | ">="
+ | "|>"
+ | "+"
+ | "+."
+ | "-"
+ | "-."
+ | "^"
+ | "*"
+ | "*."
+ | "/"
+ | "/."
+ | "**"
+ | "|."
+ | "<>" => true
+ | _ => false
+ }
+
+ let isBinaryExpression = expr =>
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(Nolabel, _operand1), (Nolabel, _operand2)},
+ ) when isBinaryOperator(operator) => true
+ | _ => false
+ }
+
+ let isEqualityOperator = operator =>
+ switch operator {
+ | "=" | "==" | "<>" | "!=" => true
+ | _ => false
+ }
+
+ let flattenableOperators = (parentOperator, childOperator) => {
+ let precParent = operatorPrecedence(parentOperator)
+ let precChild = operatorPrecedence(childOperator)
+ if precParent === precChild {
+ !(isEqualityOperator(parentOperator) && isEqualityOperator(childOperator))
+ } else {
+ false
+ }
+ }
+
+ let hasAttributes = attrs => List.exists(attr =>
+ switch attr {
+ | ({Location.txt: "bs" | "ns.ternary" | "ns.braces"}, _) => false
+ | _ => true
+ }
+ , attrs)
+
+ let isArrayAccess = expr =>
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "get")})},
+ list{(Nolabel, _parentExpr), (Nolabel, _memberExpr)},
+ ) => true
+ | _ => false
+ }
+
+ let rec hasTernaryAttribute = attrs =>
+ switch attrs {
+ | list{} => false
+ | list{({Location.txt: "ns.ternary"}, _), ..._} => true
+ | list{_, ...attrs} => hasTernaryAttribute(attrs)
+ }
+
+ let isTernaryExpr = expr =>
+ switch expr {
+ | {pexp_attributes: attrs, pexp_desc: Pexp_ifthenelse(_)}
+ when hasTernaryAttribute(attrs) => true
+ | _ => false
+ }
+
+ let collectTernaryParts = expr => {
+ let rec collect = (acc, expr) =>
+ switch expr {
+ | {pexp_attributes: attrs, pexp_desc: Pexp_ifthenelse(condition, consequent, Some(alternate))}
+ when hasTernaryAttribute(attrs) =>
+ collect(list{(condition, consequent), ...acc}, alternate)
+ | alternate => (List.rev(acc), alternate)
+ }
+
+ collect(list{}, expr)
+ }
+
+ let parametersShouldHug = parameters =>
+ switch parameters {
+ | list{Parameter({attrs: list{}, lbl: Asttypes.Nolabel, defaultExpr: None, pat})}
+ when isHuggablePattern(pat) => true
+ | _ => false
+ }
+
+ let filterTernaryAttributes = attrs => List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "ns.ternary"}, _) => false
+ | _ => true
+ }
+ , attrs)
+
+ let isJsxExpression = expr => {
+ let rec loop = attrs =>
+ switch attrs {
+ | list{} => false
+ | list{({Location.txt: "JSX"}, _), ..._} => true
+ | list{_, ...attrs} => loop(attrs)
+ }
+
+ switch expr.pexp_desc {
+ | Pexp_apply(_) => loop(expr.Parsetree.pexp_attributes)
+ | _ => false
+ }
+ }
+
+ let hasJsxAttribute = attributes =>
+ switch attributes {
+ | list{({Location.txt: "JSX"}, _), ..._} => true
+ | _ => false
+ }
+
+ let shouldIndentBinaryExpr = expr => {
+ let samePrecedenceSubExpression = (operator, subExpression) =>
+ switch subExpression {
+ | {
+ pexp_desc:
+ Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(subOperator)})},
+ list{(Nolabel, _lhs), (Nolabel, _rhs)},
+ ),
+ } when isBinaryOperator(subOperator) =>
+ flattenableOperators(operator, subOperator)
+ | _ => true
+ }
+
+ switch expr {
+ | {
+ pexp_desc:
+ Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(Nolabel, lhs), (Nolabel, _rhs)},
+ ),
+ } when isBinaryOperator(operator) =>
+ isEqualityOperator(operator) ||
+ (!samePrecedenceSubExpression(operator, lhs) ||
+ operator == ":=")
+ | _ => false
+ }
+ }
+
+ let shouldInlineRhsBinaryExpr = rhs =>
+ switch rhs.pexp_desc {
+ | Parsetree.Pexp_constant(_)
+ | Pexp_let(_)
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_sequence(_)
+ | Pexp_open(_)
+ | Pexp_ifthenelse(_)
+ | Pexp_for(_)
+ | Pexp_while(_)
+ | Pexp_try(_)
+ | Pexp_array(_)
+ | Pexp_record(_) => true
+ | _ => false
+ }
+
+ let filterPrinteableAttributes = attrs => List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "bs" | "ns.ternary"}, _) => false
+ | _ => true
+ }
+ , attrs)
+
+ let partitionPrinteableAttributes = attrs => List.partition(attr =>
+ switch attr {
+ | ({Location.txt: "bs" | "ns.ternary"}, _) => false
+ | _ => true
+ }
+ , attrs)
+
+ let requiresSpecialCallbackPrintingLastArg = args => {
+ let rec loop = args =>
+ switch args {
+ | list{} => false
+ | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)})} => true
+ | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ..._} => false
+ | list{_, ...rest} => loop(rest)
+ }
+
+ loop(args)
+ }
+
+ let requiresSpecialCallbackPrintingFirstArg = args => {
+ let rec loop = args =>
+ switch args {
+ | list{} => true
+ | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ..._} => false
+ | list{_, ...rest} => loop(rest)
+ }
+
+ switch args {
+ | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)})} => false
+ | list{(_, {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)}), ...rest} => loop(rest)
+ | _ => false
+ }
+ }
+
+ let modExprApply = modExpr => {
+ let rec loop = (acc, modExpr) =>
+ switch modExpr {
+ | {pmod_desc: Pmod_apply(next, arg)} => loop(list{arg, ...acc}, next)
+ | _ => (acc, modExpr)
+ }
+
+ loop(list{}, modExpr)
+ }
+
+ let modExprFunctor = modExpr => {
+ let rec loop = (acc, modExpr) =>
+ switch modExpr {
+ | {pmod_desc: Pmod_functor(lbl, modType, returnModExpr), pmod_attributes: attrs} =>
+ let param = (attrs, lbl, modType)
+ loop(list{param, ...acc}, returnModExpr)
+ | returnModExpr => (List.rev(acc), returnModExpr)
+ }
+
+ loop(list{}, modExpr)
+ }
+
+ let splitGenTypeAttr = attrs =>
+ switch attrs {
+ | list{({Location.txt: "genType"}, _), ...attrs} => (true, attrs)
+ | attrs => (false, attrs)
+ }
+
+ let rec collectPatternsFromListConstruct = (acc, pattern) => {
+ open Parsetree
+ switch pattern.ppat_desc {
+ | Ppat_construct(
+ {txt: Longident.Lident("::")},
+ Some({ppat_desc: Ppat_tuple(list{pat, rest})}),
+ ) =>
+ collectPatternsFromListConstruct(list{pat, ...acc}, rest)
+ | _ => (List.rev(acc), pattern)
+ }
+ }
+
+ let rec isTemplateLiteral = expr => {
+ let isPexpConstantString = expr =>
+ switch expr.pexp_desc {
+ | Pexp_constant(Pconst_string(_, Some(_))) => true
+ | _ => false
+ }
+
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("^")})},
+ list{(Nolabel, arg1), (Nolabel, arg2)},
+ ) when !(isPexpConstantString(arg1) && isPexpConstantString(arg2)) =>
+ isTemplateLiteral(arg1) || isTemplateLiteral(arg2)
+ | Pexp_constant(Pconst_string(_, Some(_))) => true
+ | _ => false
+ }
+ }
+
+ /* Blue | Red | Green -> [Blue; Red; Green] */
+ let collectOrPatternChain = pat => {
+ let rec loop = (pattern, chain) =>
+ switch pattern.ppat_desc {
+ | Ppat_or(left, right) => loop(left, list{right, ...chain})
+ | _ => list{pattern, ...chain}
+ }
+
+ loop(pat, list{})
+ }
+
+ let isPipeExpr = expr =>
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("|." | "|>")})},
+ list{(Nolabel, _operand1), (Nolabel, _operand2)},
+ ) => true
+ | _ => false
+ }
+
+ let extractValueDescriptionFromModExpr = modExpr => {
+ let rec loop = (structure, acc) =>
+ switch structure {
+ | list{} => List.rev(acc)
+ | list{structureItem, ...structure} =>
+ switch structureItem.Parsetree.pstr_desc {
+ | Pstr_primitive(vd) => loop(structure, list{vd, ...acc})
+ | _ => loop(structure, acc)
+ }
+ }
+
+ switch modExpr.pmod_desc {
+ | Pmod_structure(structure) => loop(structure, list{})
+ | _ => list{}
+ }
+ }
+
+ type jsImportScope =
+ | JsGlobalImport /* nothing */
+ | JsModuleImport(string) /* from "path" */
+ | JsScopedImport(list) /* window.location */
+
+ let classifyJsImport = valueDescription => {
+ let rec loop = attrs => {
+ open Parsetree
+ switch attrs {
+ | list{} => JsGlobalImport
+ | list{
+ (
+ {Location.txt: "bs.scope"},
+ PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(s, _))}, _)}}),
+ ),
+ ..._,
+ } =>
+ JsScopedImport(list{s})
+ | list{
+ (
+ {Location.txt: "genType.import"},
+ PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(s, _))}, _)}}),
+ ),
+ ..._,
+ } =>
+ JsModuleImport(s)
+ | list{
+ (
+ {Location.txt: "bs.scope"},
+ PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_tuple(exprs)}, _)}}),
+ ),
+ ..._,
+ } =>
+ let scopes = List.fold_left((acc, curr) =>
+ switch curr.Parsetree.pexp_desc {
+ | Pexp_constant(Pconst_string(s, _)) => list{s, ...acc}
+ | _ => acc
+ }
+ , list{}, exprs)
+
+ JsScopedImport(List.rev(scopes))
+ | list{_, ...attrs} => loop(attrs)
+ }
+ }
+
+ loop(valueDescription.pval_attributes)
+ }
+
+ let isUnderscoreApplySugar = expr =>
+ switch expr.pexp_desc {
+ | Pexp_fun(
+ Nolabel,
+ None,
+ {ppat_desc: Ppat_var({txt: "__x"})},
+ {pexp_desc: Pexp_apply(_)},
+ ) => true
+ | _ => false
+ }
+}
+
+module Parens: {
+ type kind = Parenthesized | Braced(Location.t) | Nothing
+
+ let expr: Parsetree.expression => kind
+ let structureExpr: Parsetree.expression => kind
+
+ let unaryExprOperand: Parsetree.expression => kind
+
+ let binaryExprOperand: (~isLhs: bool, Parsetree.expression) => kind
+ let subBinaryExprOperand: (string, string) => bool
+ let rhsBinaryExprOperand: (string, Parsetree.expression) => bool
+ let flattenOperandRhs: (string, Parsetree.expression) => bool
+
+ let lazyOrAssertExprRhs: Parsetree.expression => kind
+
+ let fieldExpr: Parsetree.expression => kind
+
+ let setFieldExprRhs: Parsetree.expression => kind
+
+ let ternaryOperand: Parsetree.expression => kind
+
+ let jsxPropExpr: Parsetree.expression => kind
+ let jsxChildExpr: Parsetree.expression => kind
+
+ let binaryExpr: Parsetree.expression => kind
+ let modTypeFunctorReturn: Parsetree.module_type => bool
+ let modTypeWithOperand: Parsetree.module_type => bool
+ let modExprFunctorConstraint: Parsetree.module_type => bool
+
+ let bracedExpr: Parsetree.expression => bool
+ let callExpr: Parsetree.expression => kind
+
+ let includeModExpr: Parsetree.module_expr => bool
+} = {
+ type kind = Parenthesized | Braced(Location.t) | Nothing
+
+ let expr = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | _ =>
+ switch expr {
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ } =>
+ Nothing
+ | {pexp_desc: Pexp_constraint(_)} => Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let callExpr = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | _ =>
+ switch expr {
+ | {Parsetree.pexp_attributes: attrs}
+ when switch ParsetreeViewer.filterParsingAttrs(attrs) {
+ | list{_, ..._} => true
+ | list{} => false
+ } =>
+ Parenthesized
+ | _
+ when ParsetreeViewer.isUnaryExpression(expr) || ParsetreeViewer.isBinaryExpression(expr) =>
+ Parenthesized
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ } =>
+ Nothing
+ | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing
+ | {
+ pexp_desc:
+ Pexp_lazy(_)
+ | Pexp_assert(_)
+ | Pexp_fun(_)
+ | Pexp_newtype(_)
+ | Pexp_function(_)
+ | Pexp_constraint(_)
+ | Pexp_setfield(_)
+ | Pexp_match(_)
+ | Pexp_try(_)
+ | Pexp_while(_)
+ | Pexp_for(_)
+ | Pexp_ifthenelse(_),
+ } =>
+ Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let structureExpr = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | _
+ when ParsetreeViewer.hasAttributes(expr.pexp_attributes) &&
+ !ParsetreeViewer.isJsxExpression(expr) =>
+ Parenthesized
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ } =>
+ Nothing
+ | {pexp_desc: Pexp_constraint(_)} => Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let unaryExprOperand = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {Parsetree.pexp_attributes: attrs}
+ when switch ParsetreeViewer.filterParsingAttrs(attrs) {
+ | list{_, ..._} => true
+ | list{} => false
+ } =>
+ Parenthesized
+ | expr
+ when ParsetreeViewer.isUnaryExpression(expr) || ParsetreeViewer.isBinaryExpression(expr) =>
+ Parenthesized
+ | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} =>
+ Nothing
+ | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing
+ | {
+ pexp_desc:
+ Pexp_lazy(_)
+ | Pexp_assert(_)
+ | Pexp_fun(_)
+ | Pexp_newtype(_)
+ | Pexp_function(_)
+ | Pexp_constraint(_)
+ | Pexp_setfield(_)
+ | Pexp_extension(_)
+ | Pexp_match(_)
+ | Pexp_try(_)
+ | Pexp_while(_)
+ | Pexp_for(_)
+ | Pexp_ifthenelse(_),
+ } =>
+ Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let binaryExprOperand = (~isLhs, expr) => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ } =>
+ Nothing
+ | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing
+ | {pexp_desc: Pexp_constraint(_) | Pexp_fun(_) | Pexp_function(_) | Pexp_newtype(_)} =>
+ Parenthesized
+ | expr when ParsetreeViewer.isBinaryExpression(expr) => Parenthesized
+ | expr when ParsetreeViewer.isTernaryExpr(expr) => Parenthesized
+ | {pexp_desc: Pexp_lazy(_) | Pexp_assert(_)} when isLhs => Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let subBinaryExprOperand = (parentOperator, childOperator) => {
+ let precParent = ParsetreeViewer.operatorPrecedence(parentOperator)
+ let precChild = ParsetreeViewer.operatorPrecedence(childOperator)
+ precParent > precChild ||
+ ((precParent === precChild &&
+ !ParsetreeViewer.flattenableOperators(parentOperator, childOperator)) ||
+ /* a && b || c, add parens to (a && b) for readability, who knows the difference by heart… */
+ parentOperator == "||" && childOperator == "&&")
+ }
+
+ let rhsBinaryExprOperand = (parentOperator, rhs) =>
+ switch rhs.Parsetree.pexp_desc {
+ | Parsetree.Pexp_apply(
+ {pexp_attributes: list{}, pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(_, _left), (_, _right)},
+ ) when ParsetreeViewer.isBinaryOperator(operator) =>
+ let precParent = ParsetreeViewer.operatorPrecedence(parentOperator)
+ let precChild = ParsetreeViewer.operatorPrecedence(operator)
+ precParent === precChild
+ | _ => false
+ }
+
+ let flattenOperandRhs = (parentOperator, rhs) =>
+ switch rhs.Parsetree.pexp_desc {
+ | Parsetree.Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(_, _left), (_, _right)},
+ ) when ParsetreeViewer.isBinaryOperator(operator) =>
+ let precParent = ParsetreeViewer.operatorPrecedence(parentOperator)
+ let precChild = ParsetreeViewer.operatorPrecedence(operator)
+ precParent >= precChild || rhs.pexp_attributes != list{}
+ | Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}) => false
+ | Pexp_fun(_) when ParsetreeViewer.isUnderscoreApplySugar(rhs) => false
+ | Pexp_fun(_) | Pexp_newtype(_) | Pexp_setfield(_) | Pexp_constraint(_) => true
+ | _ when ParsetreeViewer.isTernaryExpr(rhs) => true
+ | _ => false
+ }
+
+ let lazyOrAssertExprRhs = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {Parsetree.pexp_attributes: attrs}
+ when switch ParsetreeViewer.filterParsingAttrs(attrs) {
+ | list{_, ..._} => true
+ | list{} => false
+ } =>
+ Parenthesized
+ | expr when ParsetreeViewer.isBinaryExpression(expr) => Parenthesized
+ | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} =>
+ Nothing
+ | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing
+ | {
+ pexp_desc:
+ Pexp_lazy(_)
+ | Pexp_assert(_)
+ | Pexp_fun(_)
+ | Pexp_newtype(_)
+ | Pexp_function(_)
+ | Pexp_constraint(_)
+ | Pexp_setfield(_)
+ | Pexp_match(_)
+ | Pexp_try(_)
+ | Pexp_while(_)
+ | Pexp_for(_)
+ | Pexp_ifthenelse(_),
+ } =>
+ Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let isNegativeConstant = constant => {
+ let isNeg = txt => {
+ let len = String.length(txt)
+ len > 0 && (@doesNotRaise String.get)(txt, 0) == '-'
+ }
+
+ switch constant {
+ | Parsetree.Pconst_integer(i, _) | Pconst_float(i, _) when isNeg(i) => true
+ | _ => false
+ }
+ }
+
+ let fieldExpr = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {Parsetree.pexp_attributes: attrs}
+ when switch ParsetreeViewer.filterParsingAttrs(attrs) {
+ | list{_, ..._} => true
+ | list{} => false
+ } =>
+ Parenthesized
+ | expr
+ when ParsetreeViewer.isBinaryExpression(expr) || ParsetreeViewer.isUnaryExpression(expr) =>
+ Parenthesized
+ | {pexp_desc: Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)})} =>
+ Nothing
+ | {pexp_desc: Pexp_constant(c)} when isNegativeConstant(c) => Parenthesized
+ | {pexp_desc: Pexp_fun(_)} when ParsetreeViewer.isUnderscoreApplySugar(expr) => Nothing
+ | {
+ pexp_desc:
+ Pexp_lazy(_)
+ | Pexp_assert(_)
+ | Pexp_extension(_)
+ | Pexp_fun(_)
+ | Pexp_newtype(_)
+ | Pexp_function(_)
+ | Pexp_constraint(_)
+ | Pexp_setfield(_)
+ | Pexp_match(_)
+ | Pexp_try(_)
+ | Pexp_while(_)
+ | Pexp_for(_)
+ | Pexp_ifthenelse(_),
+ } =>
+ Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let setFieldExprRhs = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ } =>
+ Nothing
+ | {pexp_desc: Pexp_constraint(_)} => Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let ternaryOperand = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ } =>
+ Nothing
+ | {pexp_desc: Pexp_constraint(_)} => Parenthesized
+ | {pexp_desc: Pexp_fun(_) | Pexp_newtype(_)} =>
+ let (_attrsOnArrow, _parameters, returnExpr) = ParsetreeViewer.funExpr(expr)
+ switch returnExpr.pexp_desc {
+ | Pexp_constraint(_) => Parenthesized
+ | _ => Nothing
+ }
+ | _ => Nothing
+ }
+ }
+ }
+
+ let startsWithMinus = txt => {
+ let len = String.length(txt)
+ if len === 0 {
+ false
+ } else {
+ let s = (@doesNotRaise String.get)(txt, 0)
+ s == '-'
+ }
+ }
+
+ let jsxPropExpr = expr =>
+ switch expr.Parsetree.pexp_desc {
+ | Parsetree.Pexp_let(_)
+ | Pexp_sequence(_)
+ | Pexp_letexception(_)
+ | Pexp_letmodule(_)
+ | Pexp_open(_) =>
+ Nothing
+ | _ =>
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {
+ Parsetree.pexp_desc: Pexp_constant(Pconst_integer(x, _) | Pconst_float(x, _)),
+ pexp_attributes: list{},
+ } when startsWithMinus(x) =>
+ Parenthesized
+ | {
+ Parsetree.pexp_desc:
+ Pexp_ident(_)
+ | Pexp_constant(_)
+ | Pexp_field(_)
+ | Pexp_construct(_)
+ | Pexp_variant(_)
+ | Pexp_array(_)
+ | Pexp_pack(_)
+ | Pexp_record(_)
+ | Pexp_extension(_)
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_open(_)
+ | Pexp_sequence(_)
+ | Pexp_let(_)
+ | Pexp_tuple(_),
+ pexp_attributes: list{},
+ } =>
+ Nothing
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ pexp_attributes: list{},
+ } =>
+ Nothing
+ | _ => Parenthesized
+ }
+ }
+ }
+
+ let jsxChildExpr = expr =>
+ switch expr.Parsetree.pexp_desc {
+ | Parsetree.Pexp_let(_)
+ | Pexp_sequence(_)
+ | Pexp_letexception(_)
+ | Pexp_letmodule(_)
+ | Pexp_open(_) =>
+ Nothing
+ | _ =>
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | _ =>
+ switch expr {
+ | {
+ Parsetree.pexp_desc: Pexp_constant(Pconst_integer(x, _) | Pconst_float(x, _)),
+ pexp_attributes: list{},
+ } when startsWithMinus(x) =>
+ Parenthesized
+ | {
+ Parsetree.pexp_desc:
+ Pexp_ident(_)
+ | Pexp_constant(_)
+ | Pexp_field(_)
+ | Pexp_construct(_)
+ | Pexp_variant(_)
+ | Pexp_array(_)
+ | Pexp_pack(_)
+ | Pexp_record(_)
+ | Pexp_extension(_)
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_open(_)
+ | Pexp_sequence(_)
+ | Pexp_let(_),
+ pexp_attributes: list{},
+ } =>
+ Nothing
+ | {
+ Parsetree.pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}),
+ pexp_attributes: list{},
+ } =>
+ Nothing
+ | expr when ParsetreeViewer.isJsxExpression(expr) => Nothing
+ | _ => Parenthesized
+ }
+ }
+ }
+
+ let binaryExpr = expr => {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(expr)
+ switch optBraces {
+ | Some({Location.loc: bracesLoc}, _) => Braced(bracesLoc)
+ | None =>
+ switch expr {
+ | {Parsetree.pexp_attributes: list{_, ..._}} as expr
+ when ParsetreeViewer.isBinaryExpression(expr) =>
+ Parenthesized
+ | _ => Nothing
+ }
+ }
+ }
+
+ let modTypeFunctorReturn = modType =>
+ switch modType {
+ | {Parsetree.pmty_desc: Pmty_with(_)} => true
+ | _ => false
+ }
+
+ /* Add parens for readability:
+ module type Functor = SetLike => Set with type t = A.t
+ This is actually:
+ module type Functor = (SetLike => Set) with type t = A.t
+ */
+ let modTypeWithOperand = modType =>
+ switch modType {
+ | {Parsetree.pmty_desc: Pmty_functor(_)} => true
+ | _ => false
+ }
+
+ let modExprFunctorConstraint = modType =>
+ switch modType {
+ | {Parsetree.pmty_desc: Pmty_functor(_) | Pmty_with(_)} => true
+ | _ => false
+ }
+
+ let bracedExpr = expr =>
+ switch expr.Parsetree.pexp_desc {
+ | Pexp_constraint({pexp_desc: Pexp_pack(_)}, {ptyp_desc: Ptyp_package(_)}) => false
+ | Pexp_constraint(_) => true
+ | _ => false
+ }
+
+ let includeModExpr = modExpr =>
+ switch modExpr.Parsetree.pmod_desc {
+ | Parsetree.Pmod_constraint(_) => true
+ | _ => false
+ }
+}
+
+module CommentTable = {
+ type t = {
+ leading: Hashtbl.t>,
+ inside: Hashtbl.t>,
+ trailing: Hashtbl.t>,
+ }
+
+ let make = () => {
+ leading: Hashtbl.create(100),
+ inside: Hashtbl.create(100),
+ trailing: Hashtbl.create(100),
+ }
+
+ let empty = make()
+
+ @live
+ let log = t => {
+ open Location
+ let leadingStuff = Hashtbl.fold((k: Location.t, v: list, acc) => {
+ let loc = Doc.concat(list{
+ Doc.lbracket,
+ Doc.text(string_of_int(k.loc_start.pos_lnum)),
+ Doc.text(":"),
+ Doc.text(string_of_int(k.loc_start.pos_cnum - k.loc_start.pos_bol)),
+ Doc.text("-"),
+ Doc.text(string_of_int(k.loc_end.pos_lnum)),
+ Doc.text(":"),
+ Doc.text(string_of_int(k.loc_end.pos_cnum - k.loc_end.pos_bol)),
+ Doc.rbracket,
+ })
+ let doc = Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ loc,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ Doc.join(~sep=Doc.comma, List.map(c => Doc.text(Comment.txt(c)), v)),
+ }),
+ ),
+ Doc.line,
+ }),
+ )
+ list{doc, ...acc}
+ }, t.leading, list{})
+
+ let trailingStuff = Hashtbl.fold((k: Location.t, v: list, acc) => {
+ let loc = Doc.concat(list{
+ Doc.lbracket,
+ Doc.text(string_of_int(k.loc_start.pos_lnum)),
+ Doc.text(":"),
+ Doc.text(string_of_int(k.loc_start.pos_cnum - k.loc_start.pos_bol)),
+ Doc.text("-"),
+ Doc.text(string_of_int(k.loc_end.pos_lnum)),
+ Doc.text(":"),
+ Doc.text(string_of_int(k.loc_end.pos_cnum - k.loc_end.pos_bol)),
+ Doc.rbracket,
+ })
+ let doc = Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ loc,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(c => Doc.text(Comment.txt(c)), v),
+ ),
+ }),
+ ),
+ Doc.line,
+ }),
+ )
+ list{doc, ...acc}
+ }, t.trailing, list{})
+
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.text("leading comments:"),
+ Doc.line,
+ Doc.indent(Doc.concat(leadingStuff)),
+ Doc.line,
+ Doc.line,
+ Doc.text("trailing comments:"),
+ Doc.indent(Doc.concat(trailingStuff)),
+ Doc.line,
+ Doc.line,
+ }),
+ )
+ |> Doc.toString(~width=80)
+ |> print_endline
+ }
+ let attach = (tbl, loc, comments) =>
+ switch comments {
+ | list{} => ()
+ | comments => Hashtbl.replace(tbl, loc, comments)
+ }
+
+ let partitionByLoc = (comments, loc) => {
+ let rec loop = ((leading, inside, trailing), comments) => {
+ open Location
+ switch comments {
+ | list{comment, ...rest} =>
+ let cmtLoc = Comment.loc(comment)
+ if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum {
+ loop((list{comment, ...leading}, inside, trailing), rest)
+ } else if cmtLoc.loc_start.pos_cnum >= loc.loc_end.pos_cnum {
+ loop((leading, inside, list{comment, ...trailing}), rest)
+ } else {
+ loop((leading, list{comment, ...inside}, trailing), rest)
+ }
+ | list{} => (List.rev(leading), List.rev(inside), List.rev(trailing))
+ }
+ }
+
+ loop((list{}, list{}, list{}), comments)
+ }
+
+ let partitionLeadingTrailing = (comments, loc) => {
+ let rec loop = ((leading, trailing), comments) => {
+ open Location
+ switch comments {
+ | list{comment, ...rest} =>
+ let cmtLoc = Comment.loc(comment)
+ if cmtLoc.loc_end.pos_cnum <= loc.loc_start.pos_cnum {
+ loop((list{comment, ...leading}, trailing), rest)
+ } else {
+ loop((leading, list{comment, ...trailing}), rest)
+ }
+ | list{} => (List.rev(leading), List.rev(trailing))
+ }
+ }
+
+ loop((list{}, list{}), comments)
+ }
+
+ let partitionByOnSameLine = (loc, comments) => {
+ let rec loop = ((onSameLine, onOtherLine), comments) => {
+ open Location
+ switch comments {
+ | list{} => (List.rev(onSameLine), List.rev(onOtherLine))
+ | list{comment, ...rest} =>
+ let cmtLoc = Comment.loc(comment)
+ if cmtLoc.loc_start.pos_lnum === loc.loc_end.pos_lnum {
+ loop((list{comment, ...onSameLine}, onOtherLine), rest)
+ } else {
+ loop((onSameLine, list{comment, ...onOtherLine}), rest)
+ }
+ }
+ }
+
+ loop((list{}, list{}), comments)
+ }
+
+ let partitionAdjacentTrailing = (loc1, comments) => {
+ open Location
+ open Lexing
+ let rec loop = (~prevEndPos, afterLoc1, comments) =>
+ switch comments {
+ | list{} => (List.rev(afterLoc1), list{})
+ | list{comment, ...rest} as comments =>
+ let cmtPrevEndPos = Comment.prevTokEndPos(comment)
+ if prevEndPos.Lexing.pos_cnum === cmtPrevEndPos.pos_cnum {
+ let commentEnd = Comment.loc(comment).loc_end
+ loop(~prevEndPos=commentEnd, list{comment, ...afterLoc1}, rest)
+ } else {
+ (List.rev(afterLoc1), comments)
+ }
+ }
+
+ loop(~prevEndPos=loc1.loc_end, list{}, comments)
+ }
+
+ let rec collectListPatterns = (acc, pattern) => {
+ open Parsetree
+ switch pattern.ppat_desc {
+ | Ppat_construct(
+ {txt: Longident.Lident("::")},
+ Some({ppat_desc: Ppat_tuple(list{pat, rest})}),
+ ) =>
+ collectListPatterns(list{pat, ...acc}, rest)
+ | Ppat_construct({txt: Longident.Lident("[]")}, None) => List.rev(acc)
+ | _ => List.rev(list{pattern, ...acc})
+ }
+ }
+
+ let rec collectListExprs = (acc, expr) => {
+ open Parsetree
+ switch expr.pexp_desc {
+ | Pexp_construct(
+ {txt: Longident.Lident("::")},
+ Some({pexp_desc: Pexp_tuple(list{expr, rest})}),
+ ) =>
+ collectListExprs(list{expr, ...acc}, rest)
+ | Pexp_construct({txt: Longident.Lident("[]")}, _) => List.rev(acc)
+ | _ => List.rev(list{expr, ...acc})
+ }
+ }
+
+ /* TODO: use ParsetreeViewer */
+ let arrowType = ct => {
+ open Parsetree
+ let rec process = (attrsBefore, acc, typ) =>
+ switch typ {
+ | {ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2), ptyp_attributes: list{}} =>
+ let arg = (list{}, lbl, typ1)
+ process(attrsBefore, list{arg, ...acc}, typ2)
+ | {
+ ptyp_desc: Ptyp_arrow(Nolabel as lbl, typ1, typ2),
+ ptyp_attributes: list{({txt: "bs"}, _)} as attrs,
+ } =>
+ let arg = (attrs, lbl, typ1)
+ process(attrsBefore, list{arg, ...acc}, typ2)
+ | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: _attrs} as returnType =>
+ let args = List.rev(acc)
+ (attrsBefore, args, returnType)
+ | {
+ ptyp_desc: Ptyp_arrow((Labelled(_) | Optional(_)) as lbl, typ1, typ2),
+ ptyp_attributes: attrs,
+ } =>
+ let arg = (attrs, lbl, typ1)
+ process(attrsBefore, list{arg, ...acc}, typ2)
+ | typ => (attrsBefore, List.rev(acc), typ)
+ }
+
+ switch ct {
+ | {ptyp_desc: Ptyp_arrow(Nolabel, _typ1, _typ2), ptyp_attributes: attrs} as typ =>
+ process(attrs, list{}, {...typ, ptyp_attributes: list{}})
+ | typ => process(list{}, list{}, typ)
+ }
+ }
+
+ /* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? */
+ let modExprApply = modExpr => {
+ let rec loop = (acc, modExpr) =>
+ switch modExpr {
+ | {Parsetree.pmod_desc: Pmod_apply(next, arg)} => loop(list{arg, ...acc}, next)
+ | _ => list{modExpr, ...acc}
+ }
+
+ loop(list{}, modExpr)
+ }
+
+ /* TODO: avoiding the dependency on ParsetreeViewer here, is this a good idea? */
+ let modExprFunctor = modExpr => {
+ let rec loop = (acc, modExpr) =>
+ switch modExpr {
+ | {Parsetree.pmod_desc: Pmod_functor(lbl, modType, returnModExpr), pmod_attributes: attrs} =>
+ let param = (attrs, lbl, modType)
+ loop(list{param, ...acc}, returnModExpr)
+ | returnModExpr => (List.rev(acc), returnModExpr)
+ }
+
+ loop(list{}, modExpr)
+ }
+
+ let functorType = modtype => {
+ let rec process = (acc, modtype) =>
+ switch modtype {
+ | {Parsetree.pmty_desc: Pmty_functor(lbl, argType, returnType), pmty_attributes: attrs} =>
+ let arg = (attrs, lbl, argType)
+ process(list{arg, ...acc}, returnType)
+ | modType => (List.rev(acc), modType)
+ }
+
+ process(list{}, modtype)
+ }
+
+ let funExpr = expr => {
+ open Parsetree
+ /* Turns (type t, type u, type z) into "type t u z" */
+ let rec collectNewTypes = (acc, returnExpr) =>
+ switch returnExpr {
+ | {pexp_desc: Pexp_newtype(stringLoc, returnExpr), pexp_attributes: list{}} =>
+ collectNewTypes(list{stringLoc, ...acc}, returnExpr)
+ | returnExpr =>
+ let loc = switch (acc, List.rev(acc)) {
+ | (list{_startLoc, ..._}, list{endLoc, ..._}) => {
+ ...endLoc.loc,
+ loc_end: endLoc.loc.loc_end,
+ }
+ | _ => Location.none
+ }
+
+ let txt = List.fold_right((curr, acc) => acc ++ (" " ++ curr.Location.txt), acc, "type")
+ (Location.mkloc(txt, loc), returnExpr)
+ }
+
+ /* For simplicity reason Pexp_newtype gets converted to a Nolabel parameter,
+ * otherwise this function would need to return a variant:
+ * | NormalParamater(...)
+ * | NewType(...)
+ * This complicates printing with an extra variant/boxing/allocation for a code-path
+ * that is not often used. Lets just keep it simple for now */
+ let rec collect = (attrsBefore, acc, expr) =>
+ switch expr {
+ | {pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr), pexp_attributes: list{}} =>
+ let parameter = (list{}, lbl, defaultExpr, pattern)
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | {pexp_desc: Pexp_newtype(stringLoc, rest), pexp_attributes: attrs} =>
+ let (var, returnExpr) = collectNewTypes(list{stringLoc}, rest)
+ let parameter = (attrs, Asttypes.Nolabel, None, Ast_helper.Pat.var(~loc=stringLoc.loc, var))
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | {
+ pexp_desc: Pexp_fun(lbl, defaultExpr, pattern, returnExpr),
+ pexp_attributes: list{({txt: "bs"}, _)} as attrs,
+ } =>
+ let parameter = (attrs, lbl, defaultExpr, pattern)
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | {
+ pexp_desc: Pexp_fun((Labelled(_) | Optional(_)) as lbl, defaultExpr, pattern, returnExpr),
+ pexp_attributes: attrs,
+ } =>
+ let parameter = (attrs, lbl, defaultExpr, pattern)
+ collect(attrsBefore, list{parameter, ...acc}, returnExpr)
+ | expr => (attrsBefore, List.rev(acc), expr)
+ }
+
+ switch expr {
+ | {
+ pexp_desc: Pexp_fun(Nolabel, _defaultExpr, _pattern, _returnExpr),
+ pexp_attributes: attrs,
+ } as expr =>
+ collect(attrs, list{}, {...expr, pexp_attributes: list{}})
+ | expr => collect(list{}, list{}, expr)
+ }
+ }
+
+ let rec isBlockExpr = expr => {
+ open Parsetree
+ switch expr.pexp_desc {
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_let(_)
+ | Pexp_open(_)
+ | Pexp_sequence(_) => true
+ | Pexp_apply(callExpr, _) when isBlockExpr(callExpr) => true
+ | Pexp_constraint(expr, _) when isBlockExpr(expr) => true
+ | Pexp_field(expr, _) when isBlockExpr(expr) => true
+ | Pexp_setfield(expr, _, _) when isBlockExpr(expr) => true
+ | _ => false
+ }
+ }
+
+ let rec walkStructure = (s, t, comments) =>
+ switch s {
+ | _ when comments == list{} => ()
+ | list{} => attach(t.inside, Location.none, comments)
+ | s => walkList(~getLoc=n => n.Parsetree.pstr_loc, ~walkNode=walkStructureItem, s, t, comments)
+ }
+
+ and walkStructureItem = (si, t, comments) =>
+ switch si.Parsetree.pstr_desc {
+ | _ when comments == list{} => ()
+ | Pstr_primitive(valueDescription) => walkValueDescription(valueDescription, t, comments)
+ | Pstr_open(openDescription) => walkOpenDescription(openDescription, t, comments)
+ | Pstr_value(_, valueBindings) => walkValueBindings(valueBindings, t, comments)
+ | Pstr_type(_, typeDeclarations) => walkTypeDeclarations(typeDeclarations, t, comments)
+ | Pstr_eval(expr, _) => walkExpr(expr, t, comments)
+ | Pstr_module(moduleBinding) => walkModuleBinding(moduleBinding, t, comments)
+ | Pstr_recmodule(moduleBindings) =>
+ walkList(
+ ~getLoc=mb => mb.Parsetree.pmb_loc,
+ ~walkNode=walkModuleBinding,
+ moduleBindings,
+ t,
+ comments,
+ )
+ | Pstr_modtype(modTypDecl) => walkModuleTypeDeclaration(modTypDecl, t, comments)
+ | Pstr_attribute(attribute) => walkAttribute(attribute, t, comments)
+ | Pstr_extension(extension, _) => walkExtension(extension, t, comments)
+ | Pstr_include(includeDeclaration) => walkIncludeDeclaration(includeDeclaration, t, comments)
+ | Pstr_exception(extensionConstructor) => walkExtConstr(extensionConstructor, t, comments)
+ | Pstr_typext(typeExtension) => walkTypeExtension(typeExtension, t, comments)
+ | Pstr_class_type(_) | Pstr_class(_) => ()
+ }
+
+ and walkValueDescription = (vd, t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, vd.pval_name.loc)
+ attach(t.leading, vd.pval_name.loc, leading)
+ let (afterName, rest) = partitionAdjacentTrailing(vd.pval_name.loc, trailing)
+ attach(t.trailing, vd.pval_name.loc, afterName)
+ let (before, inside, after) = partitionByLoc(rest, vd.pval_type.ptyp_loc)
+
+ attach(t.leading, vd.pval_type.ptyp_loc, before)
+ walkTypExpr(vd.pval_type, t, inside)
+ attach(t.trailing, vd.pval_type.ptyp_loc, after)
+ }
+
+ and walkTypeExtension = (te, t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, te.ptyext_path.loc)
+ attach(t.leading, te.ptyext_path.loc, leading)
+ let (afterPath, rest) = partitionAdjacentTrailing(te.ptyext_path.loc, trailing)
+ attach(t.trailing, te.ptyext_path.loc, afterPath)
+
+ /* type params */
+ let rest = switch te.ptyext_params {
+ | list{} => rest
+ | typeParams =>
+ visitListButContinueWithRemainingComments(
+ ~getLoc=((typexpr, _variance)) => typexpr.Parsetree.ptyp_loc,
+ ~walkNode=walkTypeParam,
+ ~newlineDelimited=false,
+ typeParams,
+ t,
+ rest,
+ )
+ }
+
+ walkList(
+ ~getLoc=n => n.Parsetree.pext_loc,
+ ~walkNode=walkExtConstr,
+ te.ptyext_constructors,
+ t,
+ rest,
+ )
+ }
+
+ and walkIncludeDeclaration = (inclDecl, t, comments) => {
+ let (before, inside, after) = partitionByLoc(comments, inclDecl.pincl_mod.pmod_loc)
+ attach(t.leading, inclDecl.pincl_mod.pmod_loc, before)
+ walkModExpr(inclDecl.pincl_mod, t, inside)
+ attach(t.trailing, inclDecl.pincl_mod.pmod_loc, after)
+ }
+
+ and walkModuleTypeDeclaration = (mtd, t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, mtd.pmtd_name.loc)
+ attach(t.leading, mtd.pmtd_name.loc, leading)
+ switch mtd.pmtd_type {
+ | None => attach(t.trailing, mtd.pmtd_name.loc, trailing)
+ | Some(modType) =>
+ let (afterName, rest) = partitionAdjacentTrailing(mtd.pmtd_name.loc, trailing)
+ attach(t.trailing, mtd.pmtd_name.loc, afterName)
+ let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc)
+ attach(t.leading, modType.pmty_loc, before)
+ walkModType(modType, t, inside)
+ attach(t.trailing, modType.pmty_loc, after)
+ }
+ }
+
+ and walkModuleBinding = (mb, t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, mb.pmb_name.loc)
+ attach(t.leading, mb.pmb_name.loc, leading)
+ let (afterName, rest) = partitionAdjacentTrailing(mb.pmb_name.loc, trailing)
+ attach(t.trailing, mb.pmb_name.loc, afterName)
+ let (leading, inside, trailing) = partitionByLoc(rest, mb.pmb_expr.pmod_loc)
+ switch mb.pmb_expr.pmod_desc {
+ | Pmod_constraint(_) => walkModExpr(mb.pmb_expr, t, List.concat(list{leading, inside}))
+ | _ =>
+ attach(t.leading, mb.pmb_expr.pmod_loc, leading)
+ walkModExpr(mb.pmb_expr, t, inside)
+ }
+ attach(t.trailing, mb.pmb_expr.pmod_loc, trailing)
+ }
+
+ and walkSignature = (signature, t, comments) =>
+ switch signature {
+ | _ when comments == list{} => ()
+ | list{} => attach(t.inside, Location.none, comments)
+ | _s =>
+ walkList(
+ ~getLoc=n => n.Parsetree.psig_loc,
+ ~walkNode=walkSignatureItem,
+ signature,
+ t,
+ comments,
+ )
+ }
+
+ and walkSignatureItem = (si, t, comments) =>
+ switch si.psig_desc {
+ | _ when comments == list{} => ()
+ | Psig_value(valueDescription) => walkValueDescription(valueDescription, t, comments)
+ | Psig_type(_, typeDeclarations) => walkTypeDeclarations(typeDeclarations, t, comments)
+ | Psig_typext(typeExtension) => walkTypeExtension(typeExtension, t, comments)
+ | Psig_exception(extensionConstructor) => walkExtConstr(extensionConstructor, t, comments)
+ | Psig_module(moduleDeclaration) => walkModuleDeclaration(moduleDeclaration, t, comments)
+ | Psig_recmodule(moduleDeclarations) =>
+ walkList(
+ ~getLoc=n => n.Parsetree.pmd_loc,
+ ~walkNode=walkModuleDeclaration,
+ moduleDeclarations,
+ t,
+ comments,
+ )
+ | Psig_modtype(moduleTypeDeclaration) =>
+ walkModuleTypeDeclaration(moduleTypeDeclaration, t, comments)
+ | Psig_open(openDescription) => walkOpenDescription(openDescription, t, comments)
+ | Psig_include(includeDescription) => walkIncludeDescription(includeDescription, t, comments)
+ | Psig_attribute(attribute) => walkAttribute(attribute, t, comments)
+ | Psig_extension(extension, _) => walkExtension(extension, t, comments)
+ | Psig_class(_) | Psig_class_type(_) => ()
+ }
+
+ and walkIncludeDescription = (id, t, comments) => {
+ let (before, inside, after) = partitionByLoc(comments, id.pincl_mod.pmty_loc)
+ attach(t.leading, id.pincl_mod.pmty_loc, before)
+ walkModType(id.pincl_mod, t, inside)
+ attach(t.trailing, id.pincl_mod.pmty_loc, after)
+ }
+
+ and walkModuleDeclaration = (md, t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, md.pmd_name.loc)
+ attach(t.leading, md.pmd_name.loc, leading)
+ let (afterName, rest) = partitionAdjacentTrailing(md.pmd_name.loc, trailing)
+ attach(t.trailing, md.pmd_name.loc, afterName)
+ let (leading, inside, trailing) = partitionByLoc(rest, md.pmd_type.pmty_loc)
+ attach(t.leading, md.pmd_type.pmty_loc, leading)
+ walkModType(md.pmd_type, t, inside)
+ attach(t.trailing, md.pmd_type.pmty_loc, trailing)
+ }
+
+ and walkList: 'node. (
+ ~prevLoc: Location.t=?,
+ ~getLoc: 'node => Location.t,
+ ~walkNode: ('node, t, list) => unit,
+ list<'node>,
+ t,
+ list,
+ ) => unit = (~prevLoc=?, ~getLoc, ~walkNode, l, t, comments) => {
+ open Location
+ switch l {
+ | _ when comments == list{} => ()
+ | list{} =>
+ switch prevLoc {
+ | Some(loc) => attach(t.trailing, loc, comments)
+ | None => ()
+ }
+ | list{node, ...rest} =>
+ let currLoc = getLoc(node)
+ let (leading, inside, trailing) = partitionByLoc(comments, currLoc)
+ switch prevLoc {
+ | None =>
+ /* first node, all leading comments attach here */
+ attach(t.leading, currLoc, leading)
+ | Some(prevLoc) =>
+ /* Same line */
+ if prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum {
+ let (afterPrev, beforeCurr) = partitionAdjacentTrailing(prevLoc, leading)
+ let () = attach(t.trailing, prevLoc, afterPrev)
+ attach(t.leading, currLoc, beforeCurr)
+ } else {
+ let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine(prevLoc, leading)
+ let () = attach(t.trailing, prevLoc, onSameLineAsPrev)
+ let (leading, _inside, _trailing) = partitionByLoc(afterPrev, currLoc)
+ attach(t.leading, currLoc, leading)
+ }
+ }
+ walkNode(node, t, inside)
+ walkList(~prevLoc=currLoc, ~getLoc, ~walkNode, rest, t, trailing)
+ }
+ }
+
+ /* The parsetree doesn't always contain location info about the opening or
+ * closing token of a "list-of-things". This routine visits the whole list,
+ * but returns any remaining comments that likely fall after the whole list. */
+ and visitListButContinueWithRemainingComments: 'node. (
+ ~prevLoc: Location.t=?,
+ ~newlineDelimited: bool,
+ ~getLoc: 'node => Location.t,
+ ~walkNode: ('node, t, list) => unit,
+ list<'node>,
+ t,
+ list,
+ ) => list = (~prevLoc=?, ~newlineDelimited, ~getLoc, ~walkNode, l, t, comments) => {
+ open Location
+ switch l {
+ | _ when comments == list{} => list{}
+ | list{} =>
+ switch prevLoc {
+ | Some(loc) =>
+ let (afterPrev, rest) = if newlineDelimited {
+ partitionByOnSameLine(loc, comments)
+ } else {
+ partitionAdjacentTrailing(loc, comments)
+ }
+
+ attach(t.trailing, loc, afterPrev)
+ rest
+ | None => comments
+ }
+ | list{node, ...rest} =>
+ let currLoc = getLoc(node)
+ let (leading, inside, trailing) = partitionByLoc(comments, currLoc)
+ let () = switch prevLoc {
+ | None =>
+ /* first node, all leading comments attach here */
+ attach(t.leading, currLoc, leading)
+ ()
+ | Some(prevLoc) =>
+ /* Same line */
+ if prevLoc.loc_end.pos_lnum === currLoc.loc_start.pos_lnum {
+ let (afterPrev, beforeCurr) = partitionAdjacentTrailing(prevLoc, leading)
+ let () = attach(t.trailing, prevLoc, afterPrev)
+ let () = attach(t.leading, currLoc, beforeCurr)
+ } else {
+ let (onSameLineAsPrev, afterPrev) = partitionByOnSameLine(prevLoc, leading)
+ let () = attach(t.trailing, prevLoc, onSameLineAsPrev)
+ let (leading, _inside, _trailing) = partitionByLoc(afterPrev, currLoc)
+ let () = attach(t.leading, currLoc, leading)
+ }
+ }
+
+ walkNode(node, t, inside)
+ visitListButContinueWithRemainingComments(
+ ~prevLoc=currLoc,
+ ~getLoc,
+ ~walkNode,
+ ~newlineDelimited,
+ rest,
+ t,
+ trailing,
+ )
+ }
+ }
+
+ and walkValueBindings = (vbs, t, comments) =>
+ walkList(~getLoc=n => n.Parsetree.pvb_loc, ~walkNode=walkValueBinding, vbs, t, comments)
+
+ and walkOpenDescription = (openDescription, t, comments) => {
+ let loc = openDescription.popen_lid.loc
+ let (leading, trailing) = partitionLeadingTrailing(comments, loc)
+ attach(t.leading, loc, leading)
+ attach(t.trailing, loc, trailing)
+ }
+
+ and walkTypeDeclarations = (typeDeclarations, t, comments) =>
+ walkList(
+ ~getLoc=n => n.Parsetree.ptype_loc,
+ ~walkNode=walkTypeDeclaration,
+ typeDeclarations,
+ t,
+ comments,
+ )
+
+ and walkTypeParam = ((typexpr, _variance), t, comments) => walkTypExpr(typexpr, t, comments)
+
+ and walkTypeDeclaration = (td, t, comments) => {
+ let (beforeName, rest) = partitionLeadingTrailing(comments, td.ptype_name.loc)
+ attach(t.leading, td.ptype_name.loc, beforeName)
+
+ let (afterName, rest) = partitionAdjacentTrailing(td.ptype_name.loc, rest)
+ attach(t.trailing, td.ptype_name.loc, afterName)
+
+ /* type params */
+ let rest = switch td.ptype_params {
+ | list{} => rest
+ | typeParams =>
+ visitListButContinueWithRemainingComments(
+ ~getLoc=((typexpr, _variance)) => typexpr.Parsetree.ptyp_loc,
+ ~walkNode=walkTypeParam,
+ ~newlineDelimited=false,
+ typeParams,
+ t,
+ rest,
+ )
+ }
+
+ /* manifest: = typexpr */
+ let rest = switch td.ptype_manifest {
+ | Some(typexpr) =>
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.Parsetree.ptyp_loc, afterTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ rest
+ | None => rest
+ }
+
+ let rest = switch td.ptype_kind {
+ | Ptype_abstract | Ptype_open => rest
+ | Ptype_record(labelDeclarations) =>
+ let () = walkList(
+ ~getLoc=ld => ld.Parsetree.pld_loc,
+ ~walkNode=walkLabelDeclaration,
+ labelDeclarations,
+ t,
+ rest,
+ )
+
+ list{}
+ | Ptype_variant(constructorDeclarations) =>
+ walkConstructorDeclarations(constructorDeclarations, t, rest)
+ }
+
+ attach(t.trailing, td.ptype_loc, rest)
+ }
+
+ and walkLabelDeclarations = (lds, t, comments) =>
+ visitListButContinueWithRemainingComments(
+ ~getLoc=ld => ld.Parsetree.pld_loc,
+ ~walkNode=walkLabelDeclaration,
+ ~newlineDelimited=false,
+ lds,
+ t,
+ comments,
+ )
+
+ and walkLabelDeclaration = (ld, t, comments) => {
+ let (beforeName, rest) = partitionLeadingTrailing(comments, ld.pld_name.loc)
+ attach(t.leading, ld.pld_name.loc, beforeName)
+ let (afterName, rest) = partitionAdjacentTrailing(ld.pld_name.loc, rest)
+ attach(t.trailing, ld.pld_name.loc, afterName)
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, ld.pld_type.ptyp_loc)
+ attach(t.leading, ld.pld_type.ptyp_loc, beforeTyp)
+ walkTypExpr(ld.pld_type, t, insideTyp)
+ attach(t.trailing, ld.pld_type.ptyp_loc, afterTyp)
+ }
+
+ and walkConstructorDeclarations = (cds, t, comments) =>
+ visitListButContinueWithRemainingComments(
+ ~getLoc=cd => cd.Parsetree.pcd_loc,
+ ~walkNode=walkConstructorDeclaration,
+ ~newlineDelimited=false,
+ cds,
+ t,
+ comments,
+ )
+
+ and walkConstructorDeclaration = (cd, t, comments) => {
+ let (beforeName, rest) = partitionLeadingTrailing(comments, cd.pcd_name.loc)
+ attach(t.leading, cd.pcd_name.loc, beforeName)
+ let (afterName, rest) = partitionAdjacentTrailing(cd.pcd_name.loc, rest)
+ attach(t.trailing, cd.pcd_name.loc, afterName)
+ let rest = walkConstructorArguments(cd.pcd_args, t, rest)
+
+ let rest = switch cd.pcd_res {
+ | Some(typexpr) =>
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.Parsetree.ptyp_loc, afterTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ rest
+ | None => rest
+ }
+
+ attach(t.trailing, cd.pcd_loc, rest)
+ }
+
+ and walkConstructorArguments = (args, t, comments) =>
+ switch args {
+ | Pcstr_tuple(typexprs) =>
+ visitListButContinueWithRemainingComments(
+ ~getLoc=n => n.Parsetree.ptyp_loc,
+ ~walkNode=walkTypExpr,
+ ~newlineDelimited=false,
+ typexprs,
+ t,
+ comments,
+ )
+ | Pcstr_record(labelDeclarations) => walkLabelDeclarations(labelDeclarations, t, comments)
+ }
+
+ and walkValueBinding = (vb, t, comments) => {
+ open Location
+
+ let vb = {
+ open Parsetree
+ switch (vb.pvb_pat, vb.pvb_expr) {
+ | (
+ {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{}, t)})},
+ {pexp_desc: Pexp_constraint(expr, _typ)},
+ ) => {
+ ...vb,
+ pvb_pat: Ast_helper.Pat.constraint_(
+ ~loc={...pat.ppat_loc, loc_end: t.Parsetree.ptyp_loc.loc_end},
+ pat,
+ t,
+ ),
+ pvb_expr: expr,
+ }
+ | (
+ {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{_, ..._}, t)})},
+ {pexp_desc: Pexp_fun(_)},
+ ) => {
+ ...vb,
+ pvb_pat: {
+ ...vb.pvb_pat,
+ ppat_loc: {...pat.ppat_loc, loc_end: t.ptyp_loc.loc_end},
+ },
+ }
+ | _ => vb
+ }
+ }
+
+ let patternLoc = vb.Parsetree.pvb_pat.ppat_loc
+ let exprLoc = vb.Parsetree.pvb_expr.pexp_loc
+
+ let (leading, inside, trailing) = partitionByLoc(comments, patternLoc)
+
+ /* everything before start of pattern can only be leading on the pattern:
+ * let |* before *| a = 1 */
+ attach(t.leading, patternLoc, leading)
+ walkPattern(vb.Parsetree.pvb_pat, t, inside)
+ /* let pattern = expr -> pattern and expr on the same line */
+ /* if patternLoc.loc_end.pos_lnum == exprLoc.loc_start.pos_lnum then ( */
+ let (afterPat, surroundingExpr) = partitionAdjacentTrailing(patternLoc, trailing)
+
+ attach(t.trailing, patternLoc, afterPat)
+ let (beforeExpr, insideExpr, afterExpr) = partitionByLoc(surroundingExpr, exprLoc)
+ if isBlockExpr(vb.pvb_expr) {
+ walkExpr(vb.pvb_expr, t, List.concat(list{beforeExpr, insideExpr, afterExpr}))
+ } else {
+ attach(t.leading, exprLoc, beforeExpr)
+ walkExpr(vb.Parsetree.pvb_expr, t, insideExpr)
+ attach(t.trailing, exprLoc, afterExpr)
+ }
+ }
+
+ and walkExpr = (expr, t, comments) => {
+ open Location
+ switch expr.Parsetree.pexp_desc {
+ | _ when comments == list{} => ()
+ | Pexp_constant(_) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ attach(t.trailing, expr.pexp_loc, trailing)
+ | Pexp_ident(longident) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, leading)
+ attach(t.trailing, longident.loc, trailing)
+ | Pexp_let(_recFlag, valueBindings, expr2) =>
+ let comments = visitListButContinueWithRemainingComments(~getLoc=n =>
+ if n.Parsetree.pvb_pat.ppat_loc.loc_ghost {
+ n.pvb_expr.pexp_loc
+ } else {
+ n.Parsetree.pvb_loc
+ }
+ , ~walkNode=walkValueBinding, ~newlineDelimited=true, valueBindings, t, comments)
+
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, comments)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(comments, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_sequence(expr1, expr2) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc)
+ let comments = if isBlockExpr(expr1) {
+ let (afterExpr, comments) = partitionByOnSameLine(expr1.pexp_loc, trailing)
+ walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr}))
+ comments
+ } else {
+ attach(t.leading, expr1.pexp_loc, leading)
+ walkExpr(expr1, t, inside)
+ let (afterExpr, comments) = partitionByOnSameLine(expr1.pexp_loc, trailing)
+ attach(t.trailing, expr1.pexp_loc, afterExpr)
+ comments
+ }
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, comments)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(comments, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_open(_override, longident, expr2) =>
+ let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc)
+ attach(t.leading, {...expr.pexp_loc, loc_end: longident.loc.loc_end}, leading)
+ let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, leading)
+ let (afterLongident, rest) = partitionByOnSameLine(longident.loc, trailing)
+ attach(t.trailing, longident.loc, afterLongident)
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_extension(
+ {txt: "bs.obj"},
+ PStr(list{{pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list{})}}),
+ ) =>
+ walkList(~getLoc=((longident, expr): (Asttypes.loc, Parsetree.expression)) => {
+ ...longident.loc,
+ loc_end: expr.pexp_loc.loc_end,
+ }, ~walkNode=walkExprRecordRow, rows, t, comments)
+ | Pexp_extension(extension) => walkExtension(extension, t, comments)
+ | Pexp_letexception(extensionConstructor, expr2) =>
+ let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc)
+ attach(t.leading, {...expr.pexp_loc, loc_end: extensionConstructor.pext_loc.loc_end}, leading)
+ let (leading, inside, trailing) = partitionByLoc(comments, extensionConstructor.pext_loc)
+ attach(t.leading, extensionConstructor.pext_loc, leading)
+ walkExtConstr(extensionConstructor, t, inside)
+ let (afterExtConstr, rest) = partitionByOnSameLine(extensionConstructor.pext_loc, trailing)
+ attach(t.trailing, extensionConstructor.pext_loc, afterExtConstr)
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_letmodule(stringLoc, modExpr, expr2) =>
+ let (leading, comments) = partitionLeadingTrailing(comments, expr.pexp_loc)
+ attach(t.leading, {...expr.pexp_loc, loc_end: modExpr.pmod_loc.loc_end}, leading)
+ let (leading, trailing) = partitionLeadingTrailing(comments, stringLoc.loc)
+ attach(t.leading, stringLoc.loc, leading)
+ let (afterString, rest) = partitionAdjacentTrailing(stringLoc.loc, trailing)
+ attach(t.trailing, stringLoc.loc, afterString)
+ let (beforeModExpr, insideModExpr, afterModExpr) = partitionByLoc(rest, modExpr.pmod_loc)
+ attach(t.leading, modExpr.pmod_loc, beforeModExpr)
+ walkModExpr(modExpr, t, insideModExpr)
+ let (afterModExpr, rest) = partitionByOnSameLine(modExpr.pmod_loc, afterModExpr)
+ attach(t.trailing, modExpr.pmod_loc, afterModExpr)
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_assert(expr) | Pexp_lazy(expr) =>
+ if isBlockExpr(expr) {
+ walkExpr(expr, t, comments)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, trailing)
+ }
+ | Pexp_coerce(expr, optTypexpr, typexpr) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)
+ attach(t.trailing, expr.pexp_loc, afterExpr)
+ let rest = switch optTypexpr {
+ | Some(typexpr) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, leading)
+ walkTypExpr(typexpr, t, inside)
+ let (afterTyp, rest) = partitionAdjacentTrailing(typexpr.ptyp_loc, trailing)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ rest
+ | None => rest
+ }
+
+ let (leading, inside, trailing) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, leading)
+ walkTypExpr(typexpr, t, inside)
+ attach(t.trailing, typexpr.ptyp_loc, trailing)
+ | Pexp_constraint(expr, typexpr) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)
+ attach(t.trailing, expr.pexp_loc, afterExpr)
+ let (leading, inside, trailing) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, leading)
+ walkTypExpr(typexpr, t, inside)
+ attach(t.trailing, typexpr.ptyp_loc, trailing)
+ | Pexp_tuple(list{}) | Pexp_array(list{}) | Pexp_construct({txt: Longident.Lident("[]")}, _) =>
+ attach(t.inside, expr.pexp_loc, comments)
+ | Pexp_construct({txt: Longident.Lident("::")}, _) =>
+ walkList(
+ ~getLoc=n => n.Parsetree.pexp_loc,
+ ~walkNode=walkExpr,
+ collectListExprs(list{}, expr),
+ t,
+ comments,
+ )
+ | Pexp_construct(longident, args) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, leading)
+ switch args {
+ | Some(expr) =>
+ let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, trailing)
+ attach(t.trailing, longident.loc, afterLongident)
+ walkExpr(expr, t, rest)
+ | None => attach(t.trailing, longident.loc, trailing)
+ }
+ | Pexp_variant(_label, None) => ()
+ | Pexp_variant(_label, Some(expr)) => walkExpr(expr, t, comments)
+ | Pexp_array(exprs) | Pexp_tuple(exprs) =>
+ walkList(~getLoc=n => n.Parsetree.pexp_loc, ~walkNode=walkExpr, exprs, t, comments)
+ | Pexp_record(rows, spreadExpr) =>
+ let comments = switch spreadExpr {
+ | None => comments
+ | Some(expr) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)
+ attach(t.trailing, expr.pexp_loc, afterExpr)
+ rest
+ }
+
+ walkList(~getLoc=((longident, expr): (Asttypes.loc, Parsetree.expression)) => {
+ ...longident.loc,
+ loc_end: expr.pexp_loc.loc_end,
+ }, ~walkNode=walkExprRecordRow, rows, t, comments)
+ | Pexp_field(expr, longident) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ let trailing = if isBlockExpr(expr) {
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)
+ walkExpr(expr, t, List.concat(list{leading, inside, afterExpr}))
+ rest
+ } else {
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ trailing
+ }
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, trailing)
+ attach(t.trailing, expr.pexp_loc, afterExpr)
+ let (leading, trailing) = partitionLeadingTrailing(rest, longident.loc)
+ attach(t.leading, longident.loc, leading)
+ attach(t.trailing, longident.loc, trailing)
+ | Pexp_setfield(expr1, longident, expr2) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc)
+ let rest = if isBlockExpr(expr1) {
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)
+ walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr}))
+ rest
+ } else {
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)
+ attach(t.leading, expr1.pexp_loc, leading)
+ walkExpr(expr1, t, inside)
+ attach(t.trailing, expr1.pexp_loc, afterExpr)
+ rest
+ }
+ let (beforeLongident, afterLongident) = partitionLeadingTrailing(rest, longident.loc)
+ attach(t.leading, longident.loc, beforeLongident)
+ let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)
+ attach(t.trailing, longident.loc, afterLongident)
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_ifthenelse(ifExpr, thenExpr, elseExpr) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, ifExpr.pexp_loc)
+ let comments = if isBlockExpr(ifExpr) {
+ let (afterExpr, comments) = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing)
+ walkExpr(ifExpr, t, List.concat(list{leading, inside, afterExpr}))
+ comments
+ } else {
+ attach(t.leading, ifExpr.pexp_loc, leading)
+ walkExpr(ifExpr, t, inside)
+ let (afterExpr, comments) = partitionAdjacentTrailing(ifExpr.pexp_loc, trailing)
+ attach(t.trailing, ifExpr.pexp_loc, afterExpr)
+ comments
+ }
+ let (leading, inside, trailing) = partitionByLoc(comments, thenExpr.pexp_loc)
+ let comments = if isBlockExpr(thenExpr) {
+ let (afterExpr, trailing) = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing)
+ walkExpr(thenExpr, t, List.concat(list{leading, inside, afterExpr}))
+ trailing
+ } else {
+ attach(t.leading, thenExpr.pexp_loc, leading)
+ walkExpr(thenExpr, t, inside)
+ let (afterExpr, comments) = partitionAdjacentTrailing(thenExpr.pexp_loc, trailing)
+ attach(t.trailing, thenExpr.pexp_loc, afterExpr)
+ comments
+ }
+ switch elseExpr {
+ | None => ()
+ | Some(expr) =>
+ if isBlockExpr(expr) {
+ walkExpr(expr, t, comments)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, trailing)
+ }
+ }
+ | Pexp_while(expr1, expr2) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, expr1.pexp_loc)
+ let rest = if isBlockExpr(expr1) {
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)
+ walkExpr(expr1, t, List.concat(list{leading, inside, afterExpr}))
+ rest
+ } else {
+ attach(t.leading, expr1.pexp_loc, leading)
+ walkExpr(expr1, t, inside)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)
+ attach(t.trailing, expr1.pexp_loc, afterExpr)
+ rest
+ }
+ if isBlockExpr(expr2) {
+ walkExpr(expr2, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ attach(t.trailing, expr2.pexp_loc, trailing)
+ }
+ | Pexp_for(pat, expr1, expr2, _, expr3) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, pat.ppat_loc)
+ attach(t.leading, pat.ppat_loc, leading)
+ walkPattern(pat, t, inside)
+ let (afterPat, rest) = partitionAdjacentTrailing(pat.ppat_loc, trailing)
+ attach(t.trailing, pat.ppat_loc, afterPat)
+ let (leading, inside, trailing) = partitionByLoc(rest, expr1.pexp_loc)
+ attach(t.leading, expr1.pexp_loc, leading)
+ walkExpr(expr1, t, inside)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr1.pexp_loc, trailing)
+ attach(t.trailing, expr1.pexp_loc, afterExpr)
+ let (leading, inside, trailing) = partitionByLoc(rest, expr2.pexp_loc)
+ attach(t.leading, expr2.pexp_loc, leading)
+ walkExpr(expr2, t, inside)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr2.pexp_loc, trailing)
+ attach(t.trailing, expr2.pexp_loc, afterExpr)
+ if isBlockExpr(expr3) {
+ walkExpr(expr3, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr3.pexp_loc)
+ attach(t.leading, expr3.pexp_loc, leading)
+ walkExpr(expr3, t, inside)
+ attach(t.trailing, expr3.pexp_loc, trailing)
+ }
+ | Pexp_pack(modExpr) =>
+ let (before, inside, after) = partitionByLoc(comments, modExpr.pmod_loc)
+ attach(t.leading, modExpr.pmod_loc, before)
+ walkModExpr(modExpr, t, inside)
+ attach(t.trailing, modExpr.pmod_loc, after)
+ | Pexp_match(expr, cases) | Pexp_try(expr, cases) =>
+ let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc)
+ let after = if isBlockExpr(expr) {
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after)
+ walkExpr(expr, t, List.concat(list{before, inside, afterExpr}))
+ rest
+ } else {
+ attach(t.leading, expr.pexp_loc, before)
+ walkExpr(expr, t, inside)
+ after
+ }
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after)
+ attach(t.trailing, expr.pexp_loc, afterExpr)
+ walkList(~getLoc=n => {
+ ...n.Parsetree.pc_lhs.ppat_loc,
+ loc_end: n.pc_rhs.pexp_loc.loc_end,
+ }, ~walkNode=walkCase, cases, t, rest)
+ /* unary expression: todo use parsetreeviewer */
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("~+" | "~+." | "~-" | "~-." | "not" | "!")})},
+ list{(Nolabel, argExpr)},
+ ) =>
+ let (before, inside, after) = partitionByLoc(comments, argExpr.pexp_loc)
+ attach(t.leading, argExpr.pexp_loc, before)
+ walkExpr(argExpr, t, inside)
+ attach(t.trailing, argExpr.pexp_loc, after)
+ /* binary expression */
+ | Pexp_apply(
+ {
+ pexp_desc:
+ Pexp_ident({
+ txt:
+ Longident.Lident(
+ ":="
+ | "||"
+ | "&&"
+ | "="
+ | "=="
+ | "<"
+ | ">"
+ | "!="
+ | "!=="
+ | "<="
+ | ">="
+ | "|>"
+ | "+"
+ | "+."
+ | "-"
+ | "-."
+ | "++"
+ | "^"
+ | "*"
+ | "*."
+ | "/"
+ | "/."
+ | "**"
+ | "|."
+ | "<>",
+ ),
+ }),
+ },
+ list{(Nolabel, operand1), (Nolabel, operand2)},
+ ) =>
+ let (before, inside, after) = partitionByLoc(comments, operand1.pexp_loc)
+ attach(t.leading, operand1.pexp_loc, before)
+ walkExpr(operand1, t, inside)
+ let (afterOperand1, rest) = partitionAdjacentTrailing(operand1.pexp_loc, after)
+ attach(t.trailing, operand1.pexp_loc, afterOperand1)
+ let (before, inside, after) = partitionByLoc(rest, operand2.pexp_loc)
+ attach(t.leading, operand2.pexp_loc, before)
+ walkExpr(operand2, t, inside) /* (List.concat [inside; after]); */
+ attach(t.trailing, operand2.pexp_loc, after)
+ | Pexp_apply(callExpr, arguments) =>
+ let (before, inside, after) = partitionByLoc(comments, callExpr.pexp_loc)
+ let after = if isBlockExpr(callExpr) {
+ let (afterExpr, rest) = partitionAdjacentTrailing(callExpr.pexp_loc, after)
+ walkExpr(callExpr, t, List.concat(list{before, inside, afterExpr}))
+ rest
+ } else {
+ attach(t.leading, callExpr.pexp_loc, before)
+ walkExpr(callExpr, t, inside)
+ after
+ }
+ let (afterExpr, rest) = partitionAdjacentTrailing(callExpr.pexp_loc, after)
+ attach(t.trailing, callExpr.pexp_loc, afterExpr)
+ walkList(~getLoc=((_argLabel, expr)) => {
+ let loc = switch expr.Parsetree.pexp_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._attrs} => {
+ ...loc,
+ loc_end: expr.pexp_loc.loc_end,
+ }
+ | _ => expr.pexp_loc
+ }
+
+ loc
+ }, ~walkNode=walkExprArgument, arguments, t, rest)
+ | Pexp_fun(_, _, _, _) | Pexp_newtype(_) =>
+ let (_, parameters, returnExpr) = funExpr(expr)
+ let comments = visitListButContinueWithRemainingComments(
+ ~newlineDelimited=false,
+ ~walkNode=walkExprPararameter,
+ ~getLoc=((_attrs, _argLbl, exprOpt, pattern)) => {
+ open Parsetree
+ let startPos = switch pattern.ppat_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._attrs} => loc.loc_start
+ | _ => pattern.ppat_loc.loc_start
+ }
+
+ switch exprOpt {
+ | None => {...pattern.ppat_loc, loc_start: startPos}
+ | Some(expr) => {
+ ...pattern.ppat_loc,
+ loc_start: startPos,
+ loc_end: expr.pexp_loc.loc_end,
+ }
+ }
+ },
+ parameters,
+ t,
+ comments,
+ )
+
+ switch returnExpr.pexp_desc {
+ | Pexp_constraint(expr, typ)
+ when expr.pexp_loc.loc_start.pos_cnum >= typ.ptyp_loc.loc_end.pos_cnum =>
+ let (leading, inside, trailing) = partitionByLoc(comments, typ.ptyp_loc)
+ attach(t.leading, typ.ptyp_loc, leading)
+ walkTypExpr(typ, t, inside)
+ let (afterTyp, comments) = partitionAdjacentTrailing(typ.ptyp_loc, trailing)
+ attach(t.trailing, typ.ptyp_loc, afterTyp)
+ if isBlockExpr(expr) {
+ walkExpr(expr, t, comments)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, trailing)
+ }
+ | _ =>
+ if isBlockExpr(returnExpr) {
+ walkExpr(returnExpr, t, comments)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(comments, returnExpr.pexp_loc)
+ attach(t.leading, returnExpr.pexp_loc, leading)
+ walkExpr(returnExpr, t, inside)
+ attach(t.trailing, returnExpr.pexp_loc, trailing)
+ }
+ }
+ | _ => ()
+ }
+ }
+
+ and walkExprPararameter = ((_attrs, _argLbl, exprOpt, pattern), t, comments) => {
+ let (leading, inside, trailing) = partitionByLoc(comments, pattern.ppat_loc)
+ attach(t.leading, pattern.ppat_loc, leading)
+ walkPattern(pattern, t, inside)
+ switch exprOpt {
+ | Some(expr) =>
+ let (_afterPat, rest) = partitionAdjacentTrailing(pattern.ppat_loc, trailing)
+ attach(t.trailing, pattern.ppat_loc, trailing)
+ if isBlockExpr(expr) {
+ walkExpr(expr, t, rest)
+ } else {
+ let (leading, inside, trailing) = partitionByLoc(rest, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, trailing)
+ }
+ | None => attach(t.trailing, pattern.ppat_loc, trailing)
+ }
+ }
+
+ and walkExprArgument = ((_argLabel, expr), t, comments) =>
+ switch expr.Parsetree.pexp_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._attrs} =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, loc)
+ attach(t.leading, loc, leading)
+ let (afterLabel, rest) = partitionAdjacentTrailing(loc, trailing)
+ attach(t.trailing, loc, afterLabel)
+ let (before, inside, after) = partitionByLoc(rest, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, before)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, after)
+ | _ =>
+ let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, before)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, after)
+ }
+
+ and walkCase = (case, t, comments) => {
+ let (before, inside, after) = partitionByLoc(comments, case.pc_lhs.ppat_loc)
+ /* cases don't have a location on their own, leading comments should go
+ * after the bar on the pattern */
+ walkPattern(case.pc_lhs, t, List.concat(list{before, inside}))
+ let (afterPat, rest) = partitionAdjacentTrailing(case.pc_lhs.ppat_loc, after)
+ attach(t.trailing, case.pc_lhs.ppat_loc, afterPat)
+ let comments = switch case.pc_guard {
+ | Some(expr) =>
+ let (before, inside, after) = partitionByLoc(rest, expr.pexp_loc)
+ let (afterExpr, rest) = partitionAdjacentTrailing(expr.pexp_loc, after)
+ if isBlockExpr(expr) {
+ walkExpr(expr, t, List.concat(list{before, inside, afterExpr}))
+ } else {
+ attach(t.leading, expr.pexp_loc, before)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, afterExpr)
+ }
+ rest
+ | None => rest
+ }
+
+ if isBlockExpr(case.pc_rhs) {
+ walkExpr(case.pc_rhs, t, comments)
+ } else {
+ let (before, inside, after) = partitionByLoc(comments, case.pc_rhs.pexp_loc)
+ attach(t.leading, case.pc_rhs.pexp_loc, before)
+ walkExpr(case.pc_rhs, t, inside)
+ attach(t.trailing, case.pc_rhs.pexp_loc, after)
+ }
+ }
+
+ and walkExprRecordRow = ((longident, expr), t, comments) => {
+ let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)
+
+ attach(t.leading, longident.loc, beforeLongident)
+ let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)
+ attach(t.trailing, longident.loc, afterLongident)
+ let (leading, inside, trailing) = partitionByLoc(rest, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, leading)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, trailing)
+ }
+
+ and walkExtConstr = (extConstr, t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, extConstr.pext_name.loc)
+ attach(t.leading, extConstr.pext_name.loc, leading)
+ let (afterName, rest) = partitionAdjacentTrailing(extConstr.pext_name.loc, trailing)
+ attach(t.trailing, extConstr.pext_name.loc, afterName)
+ walkExtensionConstructorKind(extConstr.pext_kind, t, rest)
+ }
+
+ and walkExtensionConstructorKind = (kind, t, comments) =>
+ switch kind {
+ | Pext_rebind(longident) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, leading)
+ attach(t.trailing, longident.loc, trailing)
+ | Pext_decl(constructorArguments, maybeTypExpr) =>
+ let rest = walkConstructorArguments(constructorArguments, t, comments)
+ switch maybeTypExpr {
+ | None => ()
+ | Some(typexpr) =>
+ let (before, inside, after) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, before)
+ walkTypExpr(typexpr, t, inside)
+ attach(t.trailing, typexpr.ptyp_loc, after)
+ }
+ }
+
+ and walkModExpr = (modExpr, t, comments) =>
+ switch modExpr.pmod_desc {
+ | Pmod_ident(longident) =>
+ let (before, after) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, before)
+ attach(t.trailing, longident.loc, after)
+ | Pmod_structure(structure) => walkStructure(structure, t, comments)
+ | Pmod_extension(extension) => walkExtension(extension, t, comments)
+ | Pmod_unpack(expr) =>
+ let (before, inside, after) = partitionByLoc(comments, expr.pexp_loc)
+ attach(t.leading, expr.pexp_loc, before)
+ walkExpr(expr, t, inside)
+ attach(t.trailing, expr.pexp_loc, after)
+ | Pmod_constraint(modexpr, modtype) =>
+ if modtype.pmty_loc.loc_start >= modexpr.pmod_loc.loc_end {
+ let (before, inside, after) = partitionByLoc(comments, modexpr.pmod_loc)
+ attach(t.leading, modexpr.pmod_loc, before)
+ walkModExpr(modexpr, t, inside)
+ let (after, rest) = partitionAdjacentTrailing(modexpr.pmod_loc, after)
+ attach(t.trailing, modexpr.pmod_loc, after)
+ let (before, inside, after) = partitionByLoc(rest, modtype.pmty_loc)
+ attach(t.leading, modtype.pmty_loc, before)
+ walkModType(modtype, t, inside)
+ attach(t.trailing, modtype.pmty_loc, after)
+ } else {
+ let (before, inside, after) = partitionByLoc(comments, modtype.pmty_loc)
+ attach(t.leading, modtype.pmty_loc, before)
+ walkModType(modtype, t, inside)
+ let (after, rest) = partitionAdjacentTrailing(modtype.pmty_loc, after)
+ attach(t.trailing, modtype.pmty_loc, after)
+ let (before, inside, after) = partitionByLoc(rest, modexpr.pmod_loc)
+ attach(t.leading, modexpr.pmod_loc, before)
+ walkModExpr(modexpr, t, inside)
+ attach(t.trailing, modexpr.pmod_loc, after)
+ }
+ | Pmod_apply(_callModExpr, _argModExpr) =>
+ let modExprs = modExprApply(modExpr)
+ walkList(~getLoc=n => n.Parsetree.pmod_loc, ~walkNode=walkModExpr, modExprs, t, comments)
+ | Pmod_functor(_) =>
+ let (parameters, returnModExpr) = modExprFunctor(modExpr)
+ let comments = visitListButContinueWithRemainingComments(~getLoc=((_, lbl, modTypeOption)) =>
+ switch modTypeOption {
+ | None => lbl.Asttypes.loc
+ | Some(modType) => {...lbl.loc, loc_end: modType.Parsetree.pmty_loc.loc_end}
+ }
+ , ~walkNode=walkModExprParameter, ~newlineDelimited=false, parameters, t, comments)
+
+ switch returnModExpr.pmod_desc {
+ | Pmod_constraint(modExpr, modType)
+ when modType.pmty_loc.loc_end.pos_cnum <= modExpr.pmod_loc.loc_start.pos_cnum =>
+ let (before, inside, after) = partitionByLoc(comments, modType.pmty_loc)
+ attach(t.leading, modType.pmty_loc, before)
+ walkModType(modType, t, inside)
+ let (after, rest) = partitionAdjacentTrailing(modType.pmty_loc, after)
+ attach(t.trailing, modType.pmty_loc, after)
+ let (before, inside, after) = partitionByLoc(rest, modExpr.pmod_loc)
+ attach(t.leading, modExpr.pmod_loc, before)
+ walkModExpr(modExpr, t, inside)
+ attach(t.trailing, modExpr.pmod_loc, after)
+ | _ =>
+ let (before, inside, after) = partitionByLoc(comments, returnModExpr.pmod_loc)
+ attach(t.leading, returnModExpr.pmod_loc, before)
+ walkModExpr(returnModExpr, t, inside)
+ attach(t.trailing, returnModExpr.pmod_loc, after)
+ }
+ }
+
+ and walkModExprParameter = (parameter, t, comments) => {
+ let (_attrs, lbl, modTypeOption) = parameter
+ let (leading, trailing) = partitionLeadingTrailing(comments, lbl.loc)
+ attach(t.leading, lbl.loc, leading)
+ switch modTypeOption {
+ | None => attach(t.trailing, lbl.loc, trailing)
+ | Some(modType) =>
+ let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, trailing)
+ attach(t.trailing, lbl.loc, afterLbl)
+ let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc)
+ attach(t.leading, modType.pmty_loc, before)
+ walkModType(modType, t, inside)
+ attach(t.trailing, modType.pmty_loc, after)
+ }
+ }
+
+ and walkModType = (modType, t, comments) =>
+ switch modType.pmty_desc {
+ | Pmty_ident(longident) | Pmty_alias(longident) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, leading)
+ attach(t.trailing, longident.loc, trailing)
+ | Pmty_signature(signature) => walkSignature(signature, t, comments)
+ | Pmty_extension(extension) => walkExtension(extension, t, comments)
+ | Pmty_typeof(modExpr) =>
+ let (before, inside, after) = partitionByLoc(comments, modExpr.pmod_loc)
+ attach(t.leading, modExpr.pmod_loc, before)
+ walkModExpr(modExpr, t, inside)
+ attach(t.trailing, modExpr.pmod_loc, after)
+ | Pmty_with(modType, _withConstraints) =>
+ let (before, inside, after) = partitionByLoc(comments, modType.pmty_loc)
+ attach(t.leading, modType.pmty_loc, before)
+ walkModType(modType, t, inside)
+ attach(t.trailing, modType.pmty_loc, after)
+ /* TODO: withConstraints */
+ | Pmty_functor(_) =>
+ let (parameters, returnModType) = functorType(modType)
+ let comments = visitListButContinueWithRemainingComments(~getLoc=((_, lbl, modTypeOption)) =>
+ switch modTypeOption {
+ | None => lbl.Asttypes.loc
+ | Some(modType) =>
+ if lbl.txt == "_" {
+ modType.Parsetree.pmty_loc
+ } else {
+ {...lbl.loc, loc_end: modType.Parsetree.pmty_loc.loc_end}
+ }
+ }
+ , ~walkNode=walkModTypeParameter, ~newlineDelimited=false, parameters, t, comments)
+
+ let (before, inside, after) = partitionByLoc(comments, returnModType.pmty_loc)
+ attach(t.leading, returnModType.pmty_loc, before)
+ walkModType(returnModType, t, inside)
+ attach(t.trailing, returnModType.pmty_loc, after)
+ }
+
+ and walkModTypeParameter = ((_, lbl, modTypeOption), t, comments) => {
+ let (leading, trailing) = partitionLeadingTrailing(comments, lbl.loc)
+ attach(t.leading, lbl.loc, leading)
+ switch modTypeOption {
+ | None => attach(t.trailing, lbl.loc, trailing)
+ | Some(modType) =>
+ let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, trailing)
+ attach(t.trailing, lbl.loc, afterLbl)
+ let (before, inside, after) = partitionByLoc(rest, modType.pmty_loc)
+ attach(t.leading, modType.pmty_loc, before)
+ walkModType(modType, t, inside)
+ attach(t.trailing, modType.pmty_loc, after)
+ }
+ }
+
+ and walkPattern = (pat, t, comments) => {
+ open Location
+ switch pat.Parsetree.ppat_desc {
+ | _ when comments == list{} => ()
+ | Ppat_alias(pat, alias) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, pat.ppat_loc)
+ attach(t.leading, pat.ppat_loc, leading)
+ walkPattern(pat, t, inside)
+ let (afterPat, rest) = partitionAdjacentTrailing(pat.ppat_loc, trailing)
+ attach(t.leading, pat.ppat_loc, leading)
+ attach(t.trailing, pat.ppat_loc, afterPat)
+ let (beforeAlias, afterAlias) = partitionLeadingTrailing(rest, alias.loc)
+ attach(t.leading, alias.loc, beforeAlias)
+ attach(t.trailing, alias.loc, afterAlias)
+ | Ppat_tuple(list{})
+ | Ppat_array(list{})
+ | Ppat_construct({txt: Longident.Lident("()")}, _)
+ | Ppat_construct({txt: Longident.Lident("[]")}, _) =>
+ attach(t.inside, pat.ppat_loc, comments)
+ | Ppat_array(patterns) =>
+ walkList(~getLoc=n => n.Parsetree.ppat_loc, ~walkNode=walkPattern, patterns, t, comments)
+ | Ppat_tuple(patterns) =>
+ walkList(~getLoc=n => n.Parsetree.ppat_loc, ~walkNode=walkPattern, patterns, t, comments)
+ | Ppat_construct({txt: Longident.Lident("::")}, _) =>
+ walkList(
+ ~getLoc=n => n.Parsetree.ppat_loc,
+ ~walkNode=walkPattern,
+ collectListPatterns(list{}, pat),
+ t,
+ comments,
+ )
+ | Ppat_construct(constr, None) =>
+ let (beforeConstr, afterConstr) = partitionLeadingTrailing(comments, constr.loc)
+
+ attach(t.leading, constr.loc, beforeConstr)
+ attach(t.trailing, constr.loc, afterConstr)
+ | Ppat_construct(constr, Some(pat)) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, constr.loc)
+ attach(t.leading, constr.loc, leading)
+ let (leading, inside, trailing) = partitionByLoc(trailing, pat.ppat_loc)
+ attach(t.leading, pat.ppat_loc, leading)
+ walkPattern(pat, t, inside)
+ attach(t.trailing, pat.ppat_loc, trailing)
+ | Ppat_variant(_label, None) => ()
+ | Ppat_variant(_label, Some(pat)) => walkPattern(pat, t, comments)
+ | Ppat_type(_) => ()
+ | Ppat_record(recordRows, _) => walkList(~getLoc=((longidentLoc, pattern): (
+ Asttypes.loc,
+ Parsetree.pattern,
+ )) => {
+ ...longidentLoc.loc,
+ loc_end: pattern.Parsetree.ppat_loc.loc_end,
+ }, ~walkNode=walkPatternRecordRow, recordRows, t, comments)
+ | Ppat_or(pattern1, pattern2) =>
+ let (beforePattern1, insidePattern1, afterPattern1) = partitionByLoc(
+ comments,
+ pattern1.ppat_loc,
+ )
+
+ attach(t.leading, pattern1.ppat_loc, beforePattern1)
+ walkPattern(pattern1, t, insidePattern1)
+ let (afterPattern1, rest) = partitionAdjacentTrailing(pattern1.ppat_loc, afterPattern1)
+
+ attach(t.trailing, pattern1.ppat_loc, afterPattern1)
+ let (beforePattern2, insidePattern2, afterPattern2) = partitionByLoc(rest, pattern2.ppat_loc)
+
+ attach(t.leading, pattern2.ppat_loc, beforePattern2)
+ walkPattern(pattern2, t, insidePattern2)
+ attach(t.trailing, pattern2.ppat_loc, afterPattern2)
+ | Ppat_constraint(pattern, typ) =>
+ let (beforePattern, insidePattern, afterPattern) = partitionByLoc(comments, pattern.ppat_loc)
+
+ attach(t.leading, pattern.ppat_loc, beforePattern)
+ walkPattern(pattern, t, insidePattern)
+ let (afterPattern, rest) = partitionAdjacentTrailing(pattern.ppat_loc, afterPattern)
+
+ attach(t.trailing, pattern.ppat_loc, afterPattern)
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typ.ptyp_loc)
+
+ attach(t.leading, typ.ptyp_loc, beforeTyp)
+ walkTypExpr(typ, t, insideTyp)
+ attach(t.trailing, typ.ptyp_loc, afterTyp)
+ | Ppat_lazy(pattern) | Ppat_exception(pattern) =>
+ let (leading, inside, trailing) = partitionByLoc(comments, pattern.ppat_loc)
+ attach(t.leading, pattern.ppat_loc, leading)
+ walkPattern(pattern, t, inside)
+ attach(t.trailing, pattern.ppat_loc, trailing)
+ | Ppat_unpack(stringLoc) =>
+ let (leading, trailing) = partitionLeadingTrailing(comments, stringLoc.loc)
+ attach(t.leading, stringLoc.loc, leading)
+ attach(t.trailing, stringLoc.loc, trailing)
+ | Ppat_extension(extension) => walkExtension(extension, t, comments)
+ | _ => ()
+ }
+ }
+
+ /* name: firstName */
+ and walkPatternRecordRow = (row, t, comments) =>
+ switch row {
+ /* punned {x} */
+ | (
+ {Location.txt: Longident.Lident(ident), loc: longidentLoc},
+ {Parsetree.ppat_desc: Ppat_var({txt, _})},
+ ) when ident == txt =>
+ let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, longidentLoc)
+
+ attach(t.leading, longidentLoc, beforeLbl)
+ attach(t.trailing, longidentLoc, afterLbl)
+ | (longident, pattern) =>
+ let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, longident.loc)
+
+ attach(t.leading, longident.loc, beforeLbl)
+ let (afterLbl, rest) = partitionAdjacentTrailing(longident.loc, afterLbl)
+ attach(t.trailing, longident.loc, afterLbl)
+ let (leading, inside, trailing) = partitionByLoc(rest, pattern.ppat_loc)
+ attach(t.leading, pattern.ppat_loc, leading)
+ walkPattern(pattern, t, inside)
+ attach(t.trailing, pattern.ppat_loc, trailing)
+ }
+
+ and walkTypExpr = (typ, t, comments) =>
+ switch typ.Parsetree.ptyp_desc {
+ | _ when comments == list{} => ()
+ | Ptyp_tuple(typexprs) =>
+ walkList(~getLoc=n => n.Parsetree.ptyp_loc, ~walkNode=walkTypExpr, typexprs, t, comments)
+ | Ptyp_extension(extension) => walkExtension(extension, t, comments)
+ | Ptyp_package(packageType) => walkPackageType(packageType, t, comments)
+ | Ptyp_alias(typexpr, _alias) =>
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ | Ptyp_poly(strings, typexpr) =>
+ let comments = visitListButContinueWithRemainingComments(
+ ~getLoc=n => n.Asttypes.loc,
+ ~walkNode=(longident, t, comments) => {
+ let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, beforeLongident)
+ attach(t.trailing, longident.loc, afterLongident)
+ },
+ ~newlineDelimited=false,
+ strings,
+ t,
+ comments,
+ )
+
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ | Ptyp_constr(longident, typexprs) =>
+ let (beforeLongident, _afterLongident) = partitionLeadingTrailing(comments, longident.loc)
+ let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, comments)
+ attach(t.leading, longident.loc, beforeLongident)
+ attach(t.trailing, longident.loc, afterLongident)
+ walkList(~getLoc=n => n.Parsetree.ptyp_loc, ~walkNode=walkTypExpr, typexprs, t, rest)
+ | Ptyp_arrow(_) =>
+ let (_, parameters, typexpr) = arrowType(typ)
+ let comments = walkTypeParameters(parameters, t, comments)
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ | Ptyp_object(fields, _) => walkTypObjectFields(fields, t, comments)
+ | _ => ()
+ }
+
+ and walkTypObjectFields = (fields, t, comments) => walkList(~getLoc=field =>
+ switch field {
+ | Parsetree.Otag(lbl, _, typ) => {...lbl.loc, loc_end: typ.ptyp_loc.loc_end}
+ | _ => Location.none
+ }
+ , ~walkNode=walkTypObjectField, fields, t, comments)
+
+ and walkTypObjectField = (field, t, comments) =>
+ switch field {
+ | Otag(lbl, _, typexpr) =>
+ let (beforeLbl, afterLbl) = partitionLeadingTrailing(comments, lbl.loc)
+ attach(t.leading, lbl.loc, beforeLbl)
+ let (afterLbl, rest) = partitionAdjacentTrailing(lbl.loc, afterLbl)
+ attach(t.trailing, lbl.loc, afterLbl)
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ | _ => ()
+ }
+
+ and walkTypeParameters = (typeParameters, t, comments) =>
+ visitListButContinueWithRemainingComments(
+ ~getLoc=((_, _, typexpr)) => typexpr.Parsetree.ptyp_loc,
+ ~walkNode=walkTypeParameter,
+ ~newlineDelimited=false,
+ typeParameters,
+ t,
+ comments,
+ )
+
+ and walkTypeParameter = ((_attrs, _lbl, typexpr), t, comments) => {
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(comments, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ }
+
+ and walkPackageType = (packageType, t, comments) => {
+ let (longident, packageConstraints) = packageType
+ let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, beforeLongident)
+ let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)
+ attach(t.trailing, longident.loc, afterLongident)
+ walkPackageConstraints(packageConstraints, t, rest)
+ }
+
+ and walkPackageConstraints = (packageConstraints, t, comments) => walkList(~getLoc=((
+ longident,
+ typexpr,
+ )) => {
+ ...longident.Asttypes.loc,
+ loc_end: typexpr.Parsetree.ptyp_loc.loc_end,
+ }, ~walkNode=walkPackageConstraint, packageConstraints, t, comments)
+
+ and walkPackageConstraint = (packageConstraint, t, comments) => {
+ let (longident, typexpr) = packageConstraint
+ let (beforeLongident, afterLongident) = partitionLeadingTrailing(comments, longident.loc)
+ attach(t.leading, longident.loc, beforeLongident)
+ let (afterLongident, rest) = partitionAdjacentTrailing(longident.loc, afterLongident)
+ attach(t.trailing, longident.loc, afterLongident)
+ let (beforeTyp, insideTyp, afterTyp) = partitionByLoc(rest, typexpr.ptyp_loc)
+ attach(t.leading, typexpr.ptyp_loc, beforeTyp)
+ walkTypExpr(typexpr, t, insideTyp)
+ attach(t.trailing, typexpr.ptyp_loc, afterTyp)
+ }
+
+ and walkExtension = (extension, t, comments) => {
+ let (id, payload) = extension
+ let (beforeId, afterId) = partitionLeadingTrailing(comments, id.loc)
+ attach(t.leading, id.loc, beforeId)
+ let (afterId, rest) = partitionAdjacentTrailing(id.loc, afterId)
+ attach(t.trailing, id.loc, afterId)
+ walkPayload(payload, t, rest)
+ }
+
+ and walkAttribute = ((id, payload), t, comments) => {
+ let (beforeId, afterId) = partitionLeadingTrailing(comments, id.loc)
+ attach(t.leading, id.loc, beforeId)
+ let (afterId, rest) = partitionAdjacentTrailing(id.loc, afterId)
+ attach(t.trailing, id.loc, afterId)
+ walkPayload(payload, t, rest)
+ }
+
+ and walkPayload = (payload, t, comments) =>
+ switch payload {
+ | PStr(s) => walkStructure(s, t, comments)
+ | _ => ()
+ }
+}
+
+module Printer = {
+ let addParens = doc =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(Doc.concat(list{Doc.softLine, doc})),
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+
+ let addBraces = doc => Doc.group(Doc.concat(list{Doc.lbrace, doc, Doc.rbrace}))
+
+ let getFirstLeadingComment = (tbl, loc) =>
+ switch Hashtbl.find(tbl.CommentTable.leading, loc) {
+ | list{comment, ..._} => Some(comment)
+ | list{} => None
+ | exception Not_found => None
+ }
+
+ let printMultilineCommentContent = txt => {
+ /* Turns
+ * |* first line
+ * * second line
+ * * third line *|
+ * Into
+ * |* first line
+ * * second line
+ * * third line *|
+ *
+ * What makes a comment suitable for this kind of indentation?
+ * -> multiple lines + every line starts with a star
+ */
+ let rec indentStars = (lines, acc) =>
+ switch lines {
+ | list{} => Doc.nil
+ | list{lastLine} =>
+ let line = String.trim(lastLine)
+ let doc = Doc.text(" " ++ line)
+ let trailingSpace = if String.length(line) > 0 {
+ Doc.space
+ } else {
+ Doc.nil
+ }
+ List.rev(list{trailingSpace, doc, ...acc}) |> Doc.concat
+ | list{line, ...lines} =>
+ let line = String.trim(line)
+ let len = String.length(line)
+ if len > 0 && (@doesNotRaise String.get)(line, 0) === '*' {
+ let doc = Doc.text(" " ++ String.trim(line))
+ indentStars(lines, list{Doc.hardLine, doc, ...acc})
+ } else {
+ let trailingSpace = {
+ let len = String.length(txt)
+ if len > 0 && String.unsafe_get(txt, len - 1) == ' ' {
+ Doc.space
+ } else {
+ Doc.nil
+ }
+ }
+
+ let content = Comment.trimSpaces(txt)
+ Doc.concat(list{Doc.text(content), trailingSpace})
+ }
+ }
+
+ let lines = String.split_on_char('\n', txt)
+ switch lines {
+ | list{} => Doc.text("/* */")
+ | list{line} =>
+ Doc.concat(list{Doc.text("/* "), Doc.text(Comment.trimSpaces(line)), Doc.text(" */")})
+ | list{first, ...rest} =>
+ let firstLine = Comment.trimSpaces(first)
+ Doc.concat(list{
+ Doc.text("/*"),
+ if String.length(firstLine) > 0 && !String.equal(firstLine, "*") {
+ Doc.space
+ } else {
+ Doc.nil
+ },
+ indentStars(rest, list{Doc.hardLine, Doc.text(firstLine)}),
+ Doc.text("*/"),
+ })
+ }
+ }
+
+ let printTrailingComment = (nodeLoc: Location.t, comment) => {
+ let singleLine = Comment.isSingleLineComment(comment)
+ let content = {
+ let txt = Comment.txt(comment)
+ if singleLine {
+ Doc.text("// " ++ String.trim(txt))
+ } else {
+ printMultilineCommentContent(txt)
+ }
+ }
+
+ let diff = {
+ let cmtStart = Comment.loc(comment).loc_start
+ let prevTokEndPos = Comment.prevTokEndPos(comment)
+ cmtStart.pos_lnum - prevTokEndPos.pos_lnum
+ }
+
+ let isBelow = Comment.loc(comment).loc_start.pos_lnum > nodeLoc.loc_end.pos_lnum
+ if diff > 0 || isBelow {
+ Doc.concat(list{
+ Doc.breakParent,
+ Doc.lineSuffix(
+ Doc.concat(list{
+ Doc.hardLine,
+ if diff > 1 {
+ Doc.hardLine
+ } else {
+ Doc.nil
+ },
+ content,
+ }),
+ ),
+ })
+ } else if !singleLine {
+ Doc.concat(list{Doc.space, content})
+ } else {
+ Doc.lineSuffix(Doc.concat(list{Doc.space, content}))
+ }
+ }
+
+ let printLeadingComment = (~nextComment=?, comment) => {
+ let singleLine = Comment.isSingleLineComment(comment)
+ let content = {
+ let txt = Comment.txt(comment)
+ if singleLine {
+ Doc.text("// " ++ String.trim(txt))
+ } else {
+ printMultilineCommentContent(txt)
+ }
+ }
+
+ let separator = Doc.concat(list{
+ if singleLine {
+ Doc.concat(list{Doc.hardLine, Doc.breakParent})
+ } else {
+ Doc.nil
+ },
+ switch nextComment {
+ | Some(next) =>
+ let nextLoc = Comment.loc(next)
+ let currLoc = Comment.loc(comment)
+ let diff = nextLoc.Location.loc_start.pos_lnum - currLoc.Location.loc_end.pos_lnum
+
+ let nextSingleLine = Comment.isSingleLineComment(next)
+ if singleLine && nextSingleLine {
+ if diff > 1 {
+ Doc.hardLine
+ } else {
+ Doc.nil
+ }
+ } else if singleLine && !nextSingleLine {
+ if diff > 1 {
+ Doc.hardLine
+ } else {
+ Doc.nil
+ }
+ } else if diff > 1 {
+ Doc.concat(list{Doc.hardLine, Doc.hardLine})
+ } else if diff === 1 {
+ Doc.hardLine
+ } else {
+ Doc.space
+ }
+ | None => Doc.nil
+ },
+ })
+
+ Doc.concat(list{content, separator})
+ }
+
+ let printCommentsInside = (cmtTbl, loc) => {
+ let rec loop = (acc, comments) =>
+ switch comments {
+ | list{} => Doc.nil
+ | list{comment} =>
+ let cmtDoc = printLeadingComment(comment)
+ let doc = Doc.group(Doc.concat(list{Doc.concat(List.rev(list{cmtDoc, ...acc}))}))
+
+ doc
+ | list{comment, ...list{nextComment, ..._comments} as rest} =>
+ let cmtDoc = printLeadingComment(~nextComment, comment)
+ loop(list{cmtDoc, ...acc}, rest)
+ }
+
+ switch Hashtbl.find(cmtTbl.CommentTable.inside, loc) {
+ | exception Not_found => Doc.nil
+ | comments =>
+ Hashtbl.remove(cmtTbl.inside, loc)
+ Doc.group(loop(list{}, comments))
+ }
+ }
+
+ let printLeadingComments = (node, tbl, loc) => {
+ let rec loop = (acc, comments) =>
+ switch comments {
+ | list{} => node
+ | list{comment} =>
+ let cmtDoc = printLeadingComment(comment)
+ let diff = loc.Location.loc_start.pos_lnum - Comment.loc(comment).Location.loc_end.pos_lnum
+
+ let separator = if Comment.isSingleLineComment(comment) {
+ if diff > 1 {
+ Doc.hardLine
+ } else {
+ Doc.nil
+ }
+ } else if diff === 0 {
+ Doc.space
+ } else if diff > 1 {
+ Doc.concat(list{Doc.hardLine, Doc.hardLine})
+ } else {
+ Doc.hardLine
+ }
+
+ let doc = Doc.group(
+ Doc.concat(list{Doc.concat(List.rev(list{cmtDoc, ...acc})), separator, node}),
+ )
+
+ doc
+ | list{comment, ...list{nextComment, ..._comments} as rest} =>
+ let cmtDoc = printLeadingComment(~nextComment, comment)
+ loop(list{cmtDoc, ...acc}, rest)
+ }
+
+ switch Hashtbl.find(tbl, loc) {
+ | exception Not_found => node
+ | comments =>
+ /* Remove comments from tbl: Some ast nodes have the same location.
+ * We only want to print comments once */
+ Hashtbl.remove(tbl, loc)
+ loop(list{}, comments)
+ }
+ }
+
+ let printTrailingComments = (node, tbl, loc) => {
+ let rec loop = (acc, comments) =>
+ switch comments {
+ | list{} => Doc.concat(List.rev(acc))
+ | list{comment, ...comments} =>
+ let cmtDoc = printTrailingComment(loc, comment)
+ loop(list{cmtDoc, ...acc}, comments)
+ }
+
+ switch Hashtbl.find(tbl, loc) {
+ | exception Not_found => node
+ | list{} => node
+ | list{_first, ..._} as comments =>
+ /* Remove comments from tbl: Some ast nodes have the same location.
+ * We only want to print comments once */
+ Hashtbl.remove(tbl, loc)
+ let cmtsDoc = loop(list{}, comments)
+ Doc.concat(list{node, cmtsDoc})
+ }
+ }
+
+ let printComments = (doc, tbl: CommentTable.t, loc) => {
+ let docWithLeadingComments = printLeadingComments(doc, tbl.leading, loc)
+ printTrailingComments(docWithLeadingComments, tbl.trailing, loc)
+ }
+
+ let printList = (~getLoc, ~nodes, ~print, ~forceBreak=false, t) => {
+ let rec loop = (prevLoc: Location.t, acc, nodes) =>
+ switch nodes {
+ | list{} => (prevLoc, Doc.concat(List.rev(acc)))
+ | list{node, ...nodes} =>
+ let loc = getLoc(node)
+ let startPos = switch getFirstLeadingComment(t, loc) {
+ | None => loc.loc_start
+ | Some(comment) => Comment.loc(comment).loc_start
+ }
+
+ let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 {
+ Doc.concat(list{Doc.hardLine, Doc.hardLine})
+ } else {
+ Doc.hardLine
+ }
+
+ let doc = printComments(print(node, t), t, loc)
+ loop(loc, list{doc, sep, ...acc}, nodes)
+ }
+
+ switch nodes {
+ | list{} => Doc.nil
+ | list{node, ...nodes} =>
+ let firstLoc = getLoc(node)
+ let doc = printComments(print(node, t), t, firstLoc)
+ let (lastLoc, docs) = loop(firstLoc, list{doc}, nodes)
+ let forceBreak = forceBreak || firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum
+
+ Doc.breakableGroup(~forceBreak, docs)
+ }
+ }
+
+ let printListi = (~getLoc, ~nodes, ~print, ~forceBreak=false, t) => {
+ let rec loop = (i, prevLoc: Location.t, acc, nodes) =>
+ switch nodes {
+ | list{} => (prevLoc, Doc.concat(List.rev(acc)))
+ | list{node, ...nodes} =>
+ let loc = getLoc(node)
+ let startPos = switch getFirstLeadingComment(t, loc) {
+ | None => loc.loc_start
+ | Some(comment) => Comment.loc(comment).loc_start
+ }
+
+ let sep = if startPos.pos_lnum - prevLoc.loc_end.pos_lnum > 1 {
+ Doc.concat(list{Doc.hardLine, Doc.hardLine})
+ } else {
+ Doc.line
+ }
+
+ let doc = printComments(print(node, t, i), t, loc)
+ loop(i + 1, loc, list{doc, sep, ...acc}, nodes)
+ }
+
+ switch nodes {
+ | list{} => Doc.nil
+ | list{node, ...nodes} =>
+ let firstLoc = getLoc(node)
+ let doc = printComments(print(node, t, 0), t, firstLoc)
+ let (lastLoc, docs) = loop(1, firstLoc, list{doc}, nodes)
+ let forceBreak = forceBreak || firstLoc.loc_start.pos_lnum !== lastLoc.loc_end.pos_lnum
+
+ Doc.breakableGroup(~forceBreak, docs)
+ }
+ }
+
+ let rec printLongidentAux = (accu, x) =>
+ switch x {
+ | Longident.Lident(s) => list{Doc.text(s), ...accu}
+ | Ldot(lid, s) => printLongidentAux(list{Doc.text(s), ...accu}, lid)
+ | Lapply(lid1, lid2) =>
+ let d1 = Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid1))
+ let d2 = Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid2))
+ list{Doc.concat(list{d1, Doc.lparen, d2, Doc.rparen}), ...accu}
+ }
+
+ let printLongident = x =>
+ switch x {
+ | Longident.Lident(txt) => Doc.text(txt)
+ | lid => Doc.join(~sep=Doc.dot, printLongidentAux(list{}, lid))
+ }
+
+ type identifierStyle =
+ | ExoticIdent
+ | NormalIdent
+
+ let classifyIdentContent = (~allowUident=false, txt) => {
+ let len = String.length(txt)
+ let rec go = i =>
+ if i === len {
+ NormalIdent
+ } else {
+ let c = String.unsafe_get(txt, i)
+ if (
+ i === 0 &&
+ !(
+ (allowUident && (c >= 'A' && c <= 'Z')) ||
+ ((c >= 'a' && c <= 'z') ||
+ (c == '_' || (c >= '0' && c <= '9')))
+ )
+ ) {
+ ExoticIdent
+ } else if (
+ !(
+ (c >= 'a' && c <= 'z') ||
+ ((c >= 'A' && c <= 'Z') ||
+ (c == '\'' || (c == '_' || c >= '0' && c <= '9')))
+ )
+ ) {
+ ExoticIdent
+ } else {
+ go(i + 1)
+ }
+ }
+
+ if Token.isKeywordTxt(txt) && txt != "list" {
+ ExoticIdent
+ } else {
+ go(0)
+ }
+ }
+
+ let printIdentLike = (~allowUident=?, txt) =>
+ switch classifyIdentContent(~allowUident?, txt) {
+ | ExoticIdent => Doc.concat(list{Doc.text("\\\""), Doc.text(txt), Doc.text("\"")})
+ | NormalIdent => Doc.text(txt)
+ }
+
+ let printLident = l =>
+ switch l {
+ | Longident.Lident(txt) => printIdentLike(txt)
+ | Longident.Ldot(path, txt) =>
+ let txts = Longident.flatten(path)
+ Doc.concat(list{
+ Doc.join(~sep=Doc.dot, List.map(Doc.text, txts)),
+ Doc.dot,
+ printIdentLike(txt),
+ })
+ | _ => Doc.text("printLident: Longident.Lapply is not supported")
+ }
+
+ let printLongidentLocation = (l, cmtTbl) => {
+ let doc = printLongident(l.Location.txt)
+ printComments(doc, cmtTbl, l.loc)
+ }
+
+ /* Module.SubModule.x */
+ let printLidentPath = (path, cmtTbl) => {
+ let doc = printLident(path.Location.txt)
+ printComments(doc, cmtTbl, path.loc)
+ }
+
+ /* Module.SubModule.x or Module.SubModule.X */
+ let printIdentPath = (path, cmtTbl) => {
+ let doc = printLident(path.Location.txt)
+ printComments(doc, cmtTbl, path.loc)
+ }
+
+ let printStringLoc = (sloc, cmtTbl) => {
+ let doc = printIdentLike(sloc.Location.txt)
+ printComments(doc, cmtTbl, sloc.loc)
+ }
+
+ let printConstant = c =>
+ switch c {
+ | Parsetree.Pconst_integer(s, suffix) =>
+ switch suffix {
+ | Some(c) => Doc.text(s ++ Char.escaped(c))
+ | None => Doc.text(s)
+ }
+ | Pconst_string(txt, None) => Doc.text("\"" ++ (txt ++ "\""))
+ | Pconst_string(txt, Some(prefix)) =>
+ Doc.concat(list{
+ if prefix == "" {
+ Doc.nil
+ } else {
+ Doc.text(prefix)
+ },
+ Doc.text("`" ++ (txt ++ "`")),
+ })
+ | Pconst_float(s, _) => Doc.text(s)
+ | Pconst_char(c) => Doc.text("'" ++ (Char.escaped(c) ++ "'"))
+ }
+
+ let rec printStructure = (s: Parsetree.structure, t) =>
+ switch s {
+ | list{} => printCommentsInside(t, Location.none)
+ | structure =>
+ printList(~getLoc=s => s.Parsetree.pstr_loc, ~nodes=structure, ~print=printStructureItem, t)
+ }
+
+ and printStructureItem = (si: Parsetree.structure_item, cmtTbl) =>
+ switch si.pstr_desc {
+ | Pstr_value(rec_flag, valueBindings) =>
+ let recFlag = switch rec_flag {
+ | Asttypes.Nonrecursive => Doc.nil
+ | Asttypes.Recursive => Doc.text("rec ")
+ }
+
+ printValueBindings(~recFlag, valueBindings, cmtTbl)
+ | Pstr_type(recFlag, typeDeclarations) =>
+ let recFlag = switch recFlag {
+ | Asttypes.Nonrecursive => Doc.nil
+ | Asttypes.Recursive => Doc.text("rec ")
+ }
+
+ printTypeDeclarations(~recFlag, typeDeclarations, cmtTbl)
+ | Pstr_primitive(valueDescription) => printValueDescription(valueDescription, cmtTbl)
+ | Pstr_eval(expr, attrs) =>
+ let exprDoc = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.structureExpr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{printAttributes(attrs), exprDoc})
+ | Pstr_attribute(attr) =>
+ Doc.concat(list{Doc.text("@"), printAttributeWithComments(attr, cmtTbl)})
+ | Pstr_extension(extension, attrs) =>
+ Doc.concat(list{
+ printAttributes(attrs),
+ Doc.concat(list{printExtensionWithComments(~atModuleLvl=true, extension, cmtTbl)}),
+ })
+ | Pstr_include(includeDeclaration) => printIncludeDeclaration(includeDeclaration, cmtTbl)
+ | Pstr_open(openDescription) => printOpenDescription(openDescription, cmtTbl)
+ | Pstr_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl, cmtTbl)
+ | Pstr_module(moduleBinding) => printModuleBinding(~isRec=false, moduleBinding, cmtTbl, 0)
+ | Pstr_recmodule(moduleBindings) =>
+ printListi(
+ ~getLoc=mb => mb.Parsetree.pmb_loc,
+ ~nodes=moduleBindings,
+ ~print=printModuleBinding(~isRec=true),
+ cmtTbl,
+ )
+ | Pstr_exception(extensionConstructor) => printExceptionDef(extensionConstructor, cmtTbl)
+ | Pstr_typext(typeExtension) => printTypeExtension(typeExtension, cmtTbl)
+ | Pstr_class(_) | Pstr_class_type(_) => Doc.nil
+ }
+
+ and printTypeExtension = (te: Parsetree.type_extension, cmtTbl) => {
+ let prefix = Doc.text("type ")
+ let name = printLidentPath(te.ptyext_path, cmtTbl)
+ let typeParams = printTypeParams(te.ptyext_params, cmtTbl)
+ let extensionConstructors = {
+ let ecs = te.ptyext_constructors
+ let forceBreak = switch (ecs, List.rev(ecs)) {
+ | (list{first, ..._}, list{last, ..._}) =>
+ first.pext_loc.loc_start.pos_lnum > te.ptyext_path.loc.loc_end.pos_lnum ||
+ first.pext_loc.loc_start.pos_lnum < last.pext_loc.loc_end.pos_lnum
+ | _ => false
+ }
+
+ let privateFlag = switch te.ptyext_private {
+ | Asttypes.Private => Doc.concat(list{Doc.text("private"), Doc.line})
+ | Public => Doc.nil
+ }
+
+ let rows = printListi(
+ ~getLoc=n => n.Parsetree.pext_loc,
+ ~print=printExtensionConstructor,
+ ~nodes=ecs,
+ ~forceBreak,
+ cmtTbl,
+ )
+
+ Doc.breakableGroup(
+ ~forceBreak,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ privateFlag,
+ rows,
+ /* Doc.join ~sep:Doc.line ( */
+ /* List.mapi printExtensionConstructor ecs */
+ /* ) */
+ }),
+ ),
+ )
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(~loc=te.ptyext_path.loc, te.ptyext_attributes),
+ prefix,
+ name,
+ typeParams,
+ Doc.text(" +="),
+ extensionConstructors,
+ }),
+ )
+ }
+
+ and printModuleBinding = (~isRec, moduleBinding, cmtTbl, i) => {
+ let prefix = if i == 0 {
+ Doc.concat(list{
+ Doc.text("module "),
+ if isRec {
+ Doc.text("rec ")
+ } else {
+ Doc.nil
+ },
+ })
+ } else {
+ Doc.text("and ")
+ }
+
+ let (modExprDoc, modConstraintDoc) = switch moduleBinding.pmb_expr {
+ | {pmod_desc: Pmod_constraint(modExpr, modType)} => (
+ printModExpr(modExpr, cmtTbl),
+ Doc.concat(list{Doc.text(": "), printModType(modType, cmtTbl)}),
+ )
+ | modExpr => (printModExpr(modExpr, cmtTbl), Doc.nil)
+ }
+
+ let modName = {
+ let doc = Doc.text(moduleBinding.pmb_name.Location.txt)
+ printComments(doc, cmtTbl, moduleBinding.pmb_name.loc)
+ }
+
+ let doc = Doc.concat(list{
+ printAttributes(~loc=moduleBinding.pmb_name.loc, moduleBinding.pmb_attributes),
+ prefix,
+ modName,
+ modConstraintDoc,
+ Doc.text(" = "),
+ modExprDoc,
+ })
+ printComments(doc, cmtTbl, moduleBinding.pmb_loc)
+ }
+
+ and printModuleTypeDeclaration = (modTypeDecl: Parsetree.module_type_declaration, cmtTbl) => {
+ let modName = {
+ let doc = Doc.text(modTypeDecl.pmtd_name.txt)
+ printComments(doc, cmtTbl, modTypeDecl.pmtd_name.loc)
+ }
+
+ Doc.concat(list{
+ printAttributes(modTypeDecl.pmtd_attributes),
+ Doc.text("module type "),
+ modName,
+ switch modTypeDecl.pmtd_type {
+ | None => Doc.nil
+ | Some(modType) => Doc.concat(list{Doc.text(" = "), printModType(modType, cmtTbl)})
+ },
+ })
+ }
+
+ and printModType = (modType, cmtTbl) => {
+ let modTypeDoc = switch modType.pmty_desc {
+ | Parsetree.Pmty_ident(longident) =>
+ Doc.concat(list{
+ printAttributes(~loc=longident.loc, modType.pmty_attributes),
+ printLongidentLocation(longident, cmtTbl),
+ })
+ | Pmty_signature(signature) =>
+ let signatureDoc = Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(Doc.concat(list{Doc.line, printSignature(signature, cmtTbl)})),
+ Doc.line,
+ Doc.rbrace,
+ }),
+ )
+ Doc.concat(list{printAttributes(modType.pmty_attributes), signatureDoc})
+ | Pmty_functor(_) =>
+ let (parameters, returnType) = ParsetreeViewer.functorType(modType)
+ let parametersDoc = switch parameters {
+ | list{} => Doc.nil
+ | list{(attrs, {Location.txt: "_", loc}, Some(modType))} =>
+ let cmtLoc = {...loc, loc_end: modType.Parsetree.pmty_loc.loc_end}
+
+ let attrs = switch attrs {
+ | list{} => Doc.nil
+ | attrs =>
+ Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line})
+ }
+ let doc = Doc.concat(list{attrs, printModType(modType, cmtTbl)})
+ printComments(doc, cmtTbl, cmtLoc)
+ | params =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(((
+ attrs,
+ lbl,
+ modType,
+ )) => {
+ let cmtLoc = switch modType {
+ | None => lbl.Asttypes.loc
+ | Some(modType) => {
+ ...lbl.Asttypes.loc,
+ loc_end: modType.Parsetree.pmty_loc.loc_end,
+ }
+ }
+
+ let attrs = switch attrs {
+ | list{} => Doc.nil
+ | attrs =>
+ Doc.concat(list{
+ Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)),
+ Doc.line,
+ })
+ }
+ let lblDoc = if lbl.Location.txt == "_" {
+ Doc.nil
+ } else {
+ let doc = Doc.text(lbl.txt)
+ printComments(doc, cmtTbl, lbl.loc)
+ }
+
+ let doc = Doc.concat(list{
+ attrs,
+ lblDoc,
+ switch modType {
+ | None => Doc.nil
+ | Some(modType) =>
+ Doc.concat(list{
+ if lbl.txt == "_" {
+ Doc.nil
+ } else {
+ Doc.text(": ")
+ },
+ printModType(modType, cmtTbl),
+ })
+ },
+ })
+ printComments(doc, cmtTbl, cmtLoc)
+ }, params)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ }
+
+ let returnDoc = {
+ let doc = printModType(returnType, cmtTbl)
+ if Parens.modTypeFunctorReturn(returnType) {
+ addParens(doc)
+ } else {
+ doc
+ }
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ parametersDoc,
+ Doc.group(Doc.concat(list{Doc.text(" =>"), Doc.line, returnDoc})),
+ }),
+ )
+ | Pmty_typeof(modExpr) =>
+ Doc.concat(list{Doc.text("module type of "), printModExpr(modExpr, cmtTbl)})
+ | Pmty_extension(extension) => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl)
+ | Pmty_alias(longident) =>
+ Doc.concat(list{Doc.text("module "), printLongidentLocation(longident, cmtTbl)})
+ | Pmty_with(modType, withConstraints) =>
+ let operand = {
+ let doc = printModType(modType, cmtTbl)
+ if Parens.modTypeWithOperand(modType) {
+ addParens(doc)
+ } else {
+ doc
+ }
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ operand,
+ Doc.indent(Doc.concat(list{Doc.line, printWithConstraints(withConstraints, cmtTbl)})),
+ }),
+ )
+ }
+
+ let attrsAlreadyPrinted = switch modType.pmty_desc {
+ | Pmty_functor(_) | Pmty_signature(_) | Pmty_ident(_) => true
+ | _ => false
+ }
+
+ let doc = Doc.concat(list{
+ if attrsAlreadyPrinted {
+ Doc.nil
+ } else {
+ printAttributes(modType.pmty_attributes)
+ },
+ modTypeDoc,
+ })
+ printComments(doc, cmtTbl, modType.pmty_loc)
+ }
+
+ and printWithConstraints = (withConstraints, cmtTbl) => {
+ let rows = List.mapi((i, withConstraint) =>
+ Doc.group(
+ Doc.concat(list{
+ if i === 0 {
+ Doc.text("with ")
+ } else {
+ Doc.text("and ")
+ },
+ printWithConstraint(withConstraint, cmtTbl),
+ }),
+ )
+ , withConstraints)
+
+ Doc.join(~sep=Doc.line, rows)
+ }
+
+ and printWithConstraint = (withConstraint: Parsetree.with_constraint, cmtTbl) =>
+ switch withConstraint {
+ /* with type X.t = ... */
+ | Pwith_type(longident, typeDeclaration) =>
+ Doc.group(
+ printTypeDeclaration(
+ ~name=printLidentPath(longident, cmtTbl),
+ ~equalSign="=",
+ ~recFlag=Doc.nil,
+ 0,
+ typeDeclaration,
+ CommentTable.empty,
+ ),
+ )
+ /* with module X.Y = Z */
+ | Pwith_module({txt: longident1}, {txt: longident2}) =>
+ Doc.concat(list{
+ Doc.text("module "),
+ printLongident(longident1),
+ Doc.text(" ="),
+ Doc.indent(Doc.concat(list{Doc.line, printLongident(longident2)})),
+ })
+ /* with type X.t := ..., same format as [Pwith_type] */
+ | Pwith_typesubst(longident, typeDeclaration) =>
+ Doc.group(
+ printTypeDeclaration(
+ ~name=printLidentPath(longident, cmtTbl),
+ ~equalSign=":=",
+ ~recFlag=Doc.nil,
+ 0,
+ typeDeclaration,
+ CommentTable.empty,
+ ),
+ )
+ | Pwith_modsubst({txt: longident1}, {txt: longident2}) =>
+ Doc.concat(list{
+ Doc.text("module "),
+ printLongident(longident1),
+ Doc.text(" :="),
+ Doc.indent(Doc.concat(list{Doc.line, printLongident(longident2)})),
+ })
+ }
+
+ and printSignature = (signature, cmtTbl) =>
+ switch signature {
+ | list{} => printCommentsInside(cmtTbl, Location.none)
+ | signature =>
+ printList(
+ ~getLoc=s => s.Parsetree.psig_loc,
+ ~nodes=signature,
+ ~print=printSignatureItem,
+ cmtTbl,
+ )
+ }
+
+ and printSignatureItem = (si: Parsetree.signature_item, cmtTbl) =>
+ switch si.psig_desc {
+ | Parsetree.Psig_value(valueDescription) => printValueDescription(valueDescription, cmtTbl)
+ | Psig_type(recFlag, typeDeclarations) =>
+ let recFlag = switch recFlag {
+ | Asttypes.Nonrecursive => Doc.nil
+ | Asttypes.Recursive => Doc.text("rec ")
+ }
+
+ printTypeDeclarations(~recFlag, typeDeclarations, cmtTbl)
+ | Psig_typext(typeExtension) => printTypeExtension(typeExtension, cmtTbl)
+ | Psig_exception(extensionConstructor) => printExceptionDef(extensionConstructor, cmtTbl)
+ | Psig_module(moduleDeclaration) => printModuleDeclaration(moduleDeclaration, cmtTbl)
+ | Psig_recmodule(moduleDeclarations) => printRecModuleDeclarations(moduleDeclarations, cmtTbl)
+ | Psig_modtype(modTypeDecl) => printModuleTypeDeclaration(modTypeDecl, cmtTbl)
+ | Psig_open(openDescription) => printOpenDescription(openDescription, cmtTbl)
+ | Psig_include(includeDescription) => printIncludeDescription(includeDescription, cmtTbl)
+ | Psig_attribute(attr) =>
+ Doc.concat(list{Doc.text("@"), printAttributeWithComments(attr, cmtTbl)})
+ | Psig_extension(extension, attrs) =>
+ Doc.concat(list{
+ printAttributes(attrs),
+ Doc.concat(list{printExtensionWithComments(~atModuleLvl=true, extension, cmtTbl)}),
+ })
+ | Psig_class(_) | Psig_class_type(_) => Doc.nil
+ }
+
+ and printRecModuleDeclarations = (moduleDeclarations, cmtTbl) =>
+ printListi(
+ ~getLoc=n => n.Parsetree.pmd_loc,
+ ~nodes=moduleDeclarations,
+ ~print=printRecModuleDeclaration,
+ cmtTbl,
+ )
+
+ and printRecModuleDeclaration = (md, cmtTbl, i) => {
+ let body = switch md.pmd_type.pmty_desc {
+ | Parsetree.Pmty_alias(longident) =>
+ Doc.concat(list{Doc.text(" = "), printLongidentLocation(longident, cmtTbl)})
+ | _ =>
+ let needsParens = switch md.pmd_type.pmty_desc {
+ | Pmty_with(_) => true
+ | _ => false
+ }
+
+ let modTypeDoc = {
+ let doc = printModType(md.pmd_type, cmtTbl)
+ if needsParens {
+ addParens(doc)
+ } else {
+ doc
+ }
+ }
+
+ Doc.concat(list{Doc.text(": "), modTypeDoc})
+ }
+
+ let prefix = if i < 1 {
+ "module rec "
+ } else {
+ "and "
+ }
+ Doc.concat(list{
+ printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes),
+ Doc.text(prefix),
+ printComments(Doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc),
+ body,
+ })
+ }
+
+ and printModuleDeclaration = (md: Parsetree.module_declaration, cmtTbl) => {
+ let body = switch md.pmd_type.pmty_desc {
+ | Parsetree.Pmty_alias(longident) =>
+ Doc.concat(list{Doc.text(" = "), printLongidentLocation(longident, cmtTbl)})
+ | _ => Doc.concat(list{Doc.text(": "), printModType(md.pmd_type, cmtTbl)})
+ }
+
+ Doc.concat(list{
+ printAttributes(~loc=md.pmd_name.loc, md.pmd_attributes),
+ Doc.text("module "),
+ printComments(Doc.text(md.pmd_name.txt), cmtTbl, md.pmd_name.loc),
+ body,
+ })
+ }
+
+ and printOpenDescription = (openDescription: Parsetree.open_description, p) =>
+ Doc.concat(list{
+ printAttributes(openDescription.popen_attributes),
+ Doc.text("open"),
+ switch openDescription.popen_override {
+ | Asttypes.Fresh => Doc.space
+ | Asttypes.Override => Doc.text("! ")
+ },
+ printLongidentLocation(openDescription.popen_lid, p),
+ })
+
+ and printIncludeDescription = (includeDescription: Parsetree.include_description, cmtTbl) =>
+ Doc.concat(list{
+ printAttributes(includeDescription.pincl_attributes),
+ Doc.text("include "),
+ printModType(includeDescription.pincl_mod, cmtTbl),
+ })
+
+ and printIncludeDeclaration = (includeDeclaration: Parsetree.include_declaration, cmtTbl) => {
+ let isJsFfiImport = List.exists(attr =>
+ switch attr {
+ | ({Location.txt: "ns.jsFfi"}, _) => true
+ | _ => false
+ }
+ , includeDeclaration.pincl_attributes)
+
+ if isJsFfiImport {
+ printJsFfiImportDeclaration(includeDeclaration, cmtTbl)
+ } else {
+ Doc.concat(list{
+ printAttributes(includeDeclaration.pincl_attributes),
+ Doc.text("include "),
+ {
+ let includeDoc = printModExpr(includeDeclaration.pincl_mod, cmtTbl)
+
+ if Parens.includeModExpr(includeDeclaration.pincl_mod) {
+ addParens(includeDoc)
+ } else {
+ includeDoc
+ }
+ },
+ })
+ }
+ }
+
+ and printJsFfiImport = (valueDescription: Parsetree.value_description, cmtTbl) => {
+ let attrs = List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "bs.val" | "genType.import" | "bs.scope"}, _) => false
+ | _ => true
+ }
+ , valueDescription.pval_attributes)
+ let (ident, alias) = switch valueDescription.pval_prim {
+ | list{primitive, ..._} =>
+ if primitive != valueDescription.pval_name.txt {
+ (
+ printIdentLike(primitive),
+ Doc.concat(list{Doc.text(" as "), printIdentLike(valueDescription.pval_name.txt)}),
+ )
+ } else {
+ (printIdentLike(primitive), Doc.nil)
+ }
+ | _ => (printIdentLike(valueDescription.pval_name.txt), Doc.nil)
+ }
+
+ Doc.concat(list{
+ printAttributes(~loc=valueDescription.pval_name.loc, attrs),
+ ident,
+ alias,
+ Doc.text(": "),
+ printTypExpr(valueDescription.pval_type, cmtTbl),
+ })
+ }
+
+ and printJsFfiImportScope = (scope: ParsetreeViewer.jsImportScope) =>
+ switch scope {
+ | JsGlobalImport => Doc.nil
+ | JsModuleImport(modName) =>
+ Doc.concat(list{Doc.text(" from "), Doc.doubleQuote, Doc.text(modName), Doc.doubleQuote})
+ | JsScopedImport(idents) =>
+ Doc.concat(list{Doc.text(" from "), Doc.join(~sep=Doc.dot, List.map(Doc.text, idents))})
+ }
+
+ and printJsFfiImportDeclaration = (includeDeclaration: Parsetree.include_declaration, cmtTbl) => {
+ let attrs = List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "ns.jsFfi"}, _) => false
+ | _ => true
+ }
+ , includeDeclaration.pincl_attributes)
+
+ let imports = ParsetreeViewer.extractValueDescriptionFromModExpr(includeDeclaration.pincl_mod)
+ let scope = switch imports {
+ | list{vd, ..._} => ParsetreeViewer.classifyJsImport(vd)
+ | list{} => ParsetreeViewer.JsGlobalImport
+ }
+
+ let scopeDoc = printJsFfiImportScope(scope)
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(attrs),
+ Doc.text("import "),
+ Doc.group(
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(vd => printJsFfiImport(vd, cmtTbl), imports),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ ),
+ scopeDoc,
+ }),
+ )
+ }
+
+ and printValueBindings = (~recFlag, vbs: list, cmtTbl) =>
+ printListi(
+ ~getLoc=vb => vb.Parsetree.pvb_loc,
+ ~nodes=vbs,
+ ~print=printValueBinding(~recFlag),
+ cmtTbl,
+ )
+
+ and printValueDescription = (valueDescription, cmtTbl) => {
+ let isExternal = switch valueDescription.pval_prim {
+ | list{} => false
+ | _ => true
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(valueDescription.pval_attributes),
+ Doc.text(
+ if isExternal {
+ "external "
+ } else {
+ "let "
+ },
+ ),
+ printComments(
+ printIdentLike(valueDescription.pval_name.txt),
+ cmtTbl,
+ valueDescription.pval_name.loc,
+ ),
+ Doc.text(": "),
+ printTypExpr(valueDescription.pval_type, cmtTbl),
+ if isExternal {
+ Doc.group(
+ Doc.concat(list{
+ Doc.text(" ="),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ Doc.join(
+ ~sep=Doc.line,
+ List.map(
+ s => Doc.concat(list{Doc.text("\""), Doc.text(s), Doc.text("\"")}),
+ valueDescription.pval_prim,
+ ),
+ ),
+ }),
+ ),
+ }),
+ )
+ } else {
+ Doc.nil
+ },
+ }),
+ )
+ }
+
+ and printTypeDeclarations = (~recFlag, typeDeclarations, cmtTbl) =>
+ printListi(
+ ~getLoc=n => n.Parsetree.ptype_loc,
+ ~nodes=typeDeclarations,
+ ~print=printTypeDeclaration2(~recFlag),
+ cmtTbl,
+ )
+
+ /*
+ * type_declaration = {
+ * ptype_name: string loc;
+ * ptype_params: (core_type * variance) list;
+ * (* ('a1,...'an) t; None represents _*)
+ * ptype_cstrs: (core_type * core_type * Location.t) list;
+ * (* ... constraint T1=T1' ... constraint Tn=Tn' *)
+ * ptype_kind: type_kind;
+ * ptype_private: private_flag; (* = private ... *)
+ * ptype_manifest: core_type option; (* = T *)
+ * ptype_attributes: attributes; (* ... [@@id1] [@@id2] *)
+ * ptype_loc: Location.t;
+ * }
+ *
+ *
+ * type t (abstract, no manifest)
+ * type t = T0 (abstract, manifest=T0)
+ * type t = C of T | ... (variant, no manifest)
+ * type t = T0 = C of T | ... (variant, manifest=T0)
+ * type t = {l: T; ...} (record, no manifest)
+ * type t = T0 = {l : T; ...} (record, manifest=T0)
+ * type t = .. (open, no manifest)
+ *
+ *
+ * and type_kind =
+ * | Ptype_abstract
+ * | Ptype_variant of constructor_declaration list
+ * (* Invariant: non-empty list *)
+ * | Ptype_record of label_declaration list
+ * (* Invariant: non-empty list *)
+ * | Ptype_open
+ */
+ and printTypeDeclaration = (
+ ~name,
+ ~equalSign,
+ ~recFlag,
+ i,
+ td: Parsetree.type_declaration,
+ cmtTbl,
+ ) => {
+ let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr(td.ptype_attributes)
+ let attrs = printAttributes(~loc=td.ptype_loc, attrs)
+ let prefix = if i > 0 {
+ Doc.concat(list{
+ Doc.text("and "),
+ if hasGenType {
+ Doc.text("export ")
+ } else {
+ Doc.nil
+ },
+ })
+ } else {
+ Doc.concat(list{
+ Doc.text(
+ if hasGenType {
+ "export type "
+ } else {
+ "type "
+ },
+ ),
+ recFlag,
+ })
+ }
+
+ let typeName = name
+ let typeParams = printTypeParams(td.ptype_params, cmtTbl)
+ let manifestAndKind = switch td.ptype_kind {
+ | Ptype_abstract =>
+ switch td.ptype_manifest {
+ | None => Doc.nil
+ | Some(typ) =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printPrivateFlag(td.ptype_private),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+ | Ptype_open =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printPrivateFlag(td.ptype_private),
+ Doc.text(".."),
+ })
+ | Ptype_record(lds) =>
+ let manifest = switch td.ptype_manifest {
+ | None => Doc.nil
+ | Some(typ) =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+
+ Doc.concat(list{
+ manifest,
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printPrivateFlag(td.ptype_private),
+ printRecordDeclaration(lds, cmtTbl),
+ })
+ | Ptype_variant(cds) =>
+ let manifest = switch td.ptype_manifest {
+ | None => Doc.nil
+ | Some(typ) =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+
+ Doc.concat(list{
+ manifest,
+ Doc.concat(list{Doc.space, Doc.text(equalSign)}),
+ printConstructorDeclarations(~privateFlag=td.ptype_private, cds, cmtTbl),
+ })
+ }
+
+ let constraints = printTypeDefinitionConstraints(td.ptype_cstrs)
+ Doc.group(Doc.concat(list{attrs, prefix, typeName, typeParams, manifestAndKind, constraints}))
+ }
+
+ and printTypeDeclaration2 = (~recFlag, td: Parsetree.type_declaration, cmtTbl, i) => {
+ let name = {
+ let doc = printIdentLike(td.Parsetree.ptype_name.txt)
+ printComments(doc, cmtTbl, td.ptype_name.loc)
+ }
+
+ let equalSign = "="
+ let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr(td.ptype_attributes)
+ let attrs = printAttributes(~loc=td.ptype_loc, attrs)
+ let prefix = if i > 0 {
+ Doc.concat(list{
+ Doc.text("and "),
+ if hasGenType {
+ Doc.text("export ")
+ } else {
+ Doc.nil
+ },
+ })
+ } else {
+ Doc.concat(list{
+ Doc.text(
+ if hasGenType {
+ "export type "
+ } else {
+ "type "
+ },
+ ),
+ recFlag,
+ })
+ }
+
+ let typeName = name
+ let typeParams = printTypeParams(td.ptype_params, cmtTbl)
+ let manifestAndKind = switch td.ptype_kind {
+ | Ptype_abstract =>
+ switch td.ptype_manifest {
+ | None => Doc.nil
+ | Some(typ) =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printPrivateFlag(td.ptype_private),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+ | Ptype_open =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printPrivateFlag(td.ptype_private),
+ Doc.text(".."),
+ })
+ | Ptype_record(lds) =>
+ let manifest = switch td.ptype_manifest {
+ | None => Doc.nil
+ | Some(typ) =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+
+ Doc.concat(list{
+ manifest,
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printPrivateFlag(td.ptype_private),
+ printRecordDeclaration(lds, cmtTbl),
+ })
+ | Ptype_variant(cds) =>
+ let manifest = switch td.ptype_manifest {
+ | None => Doc.nil
+ | Some(typ) =>
+ Doc.concat(list{
+ Doc.concat(list{Doc.space, Doc.text(equalSign), Doc.space}),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+
+ Doc.concat(list{
+ manifest,
+ Doc.concat(list{Doc.space, Doc.text(equalSign)}),
+ printConstructorDeclarations(~privateFlag=td.ptype_private, cds, cmtTbl),
+ })
+ }
+
+ let constraints = printTypeDefinitionConstraints(td.ptype_cstrs)
+ Doc.group(Doc.concat(list{attrs, prefix, typeName, typeParams, manifestAndKind, constraints}))
+ }
+
+ and printTypeDefinitionConstraints = cstrs =>
+ switch cstrs {
+ | list{} => Doc.nil
+ | cstrs =>
+ Doc.indent(
+ Doc.group(
+ Doc.concat(list{
+ Doc.line,
+ Doc.group(Doc.join(~sep=Doc.line, List.map(printTypeDefinitionConstraint, cstrs))),
+ }),
+ ),
+ )
+ }
+
+ and printTypeDefinitionConstraint = (
+ (typ1, typ2, _loc): (Parsetree.core_type, Parsetree.core_type, Location.t),
+ ) =>
+ Doc.concat(list{
+ Doc.text("constraint "),
+ printTypExpr(typ1, CommentTable.empty),
+ Doc.text(" = "),
+ printTypExpr(typ2, CommentTable.empty),
+ })
+
+ and printPrivateFlag = (flag: Asttypes.private_flag) =>
+ switch flag {
+ | Private => Doc.text("private ")
+ | Public => Doc.nil
+ }
+
+ and printTypeParams = (typeParams, cmtTbl) =>
+ switch typeParams {
+ | list{} => Doc.nil
+ | typeParams =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lessThan,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(typeParam => {
+ let doc = printTypeParam(typeParam, cmtTbl)
+ printComments(doc, cmtTbl, fst(typeParam).Parsetree.ptyp_loc)
+ }, typeParams)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.greaterThan,
+ }),
+ )
+ }
+
+ and printTypeParam = (param: (Parsetree.core_type, Asttypes.variance), cmtTbl) => {
+ let (typ, variance) = param
+ let printedVariance = switch variance {
+ | Covariant => Doc.text("+")
+ | Contravariant => Doc.text("-")
+ | Invariant => Doc.nil
+ }
+
+ Doc.concat(list{printedVariance, printTypExpr(typ, cmtTbl)})
+ }
+
+ and printRecordDeclaration = (lds: list, cmtTbl) => {
+ let forceBreak = switch (lds, List.rev(lds)) {
+ | (list{first, ..._}, list{last, ..._}) =>
+ first.pld_loc.loc_start.pos_lnum < last.pld_loc.loc_end.pos_lnum
+ | _ => false
+ }
+
+ Doc.breakableGroup(
+ ~forceBreak,
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ld => {
+ let doc = printLabelDeclaration(ld, cmtTbl)
+ printComments(doc, cmtTbl, ld.Parsetree.pld_loc)
+ }, lds)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ )
+ }
+
+ and printConstructorDeclarations = (
+ ~privateFlag,
+ cds: list,
+ cmtTbl,
+ ) => {
+ let forceBreak = switch (cds, List.rev(cds)) {
+ | (list{first, ..._}, list{last, ..._}) =>
+ first.pcd_loc.loc_start.pos_lnum < last.pcd_loc.loc_end.pos_lnum
+ | _ => false
+ }
+
+ let privateFlag = switch privateFlag {
+ | Asttypes.Private => Doc.concat(list{Doc.text("private"), Doc.line})
+ | Public => Doc.nil
+ }
+
+ let rows = printListi(
+ ~getLoc=cd => cd.Parsetree.pcd_loc,
+ ~nodes=cds,
+ ~print=(cd, cmtTbl, i) => {
+ let doc = printConstructorDeclaration2(i, cd, cmtTbl)
+ printComments(doc, cmtTbl, cd.Parsetree.pcd_loc)
+ },
+ ~forceBreak,
+ cmtTbl,
+ )
+
+ Doc.breakableGroup(~forceBreak, Doc.indent(Doc.concat(list{Doc.line, privateFlag, rows})))
+ }
+
+ and printConstructorDeclaration2 = (i, cd: Parsetree.constructor_declaration, cmtTbl) => {
+ let attrs = printAttributes(cd.pcd_attributes)
+ let bar = if i > 0 {
+ Doc.text("| ")
+ } else {
+ Doc.ifBreaks(Doc.text("| "), Doc.nil)
+ }
+
+ let constrName = {
+ let doc = Doc.text(cd.pcd_name.txt)
+ printComments(doc, cmtTbl, cd.pcd_name.loc)
+ }
+
+ let constrArgs = printConstructorArguments(~indent=true, cd.pcd_args, cmtTbl)
+ let gadt = switch cd.pcd_res {
+ | None => Doc.nil
+ | Some(typ) => Doc.indent(Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)}))
+ }
+
+ Doc.concat(list{
+ bar,
+ Doc.group(
+ Doc.concat(list{
+ attrs /* TODO: fix parsing of attributes, so when can print them above the bar? */,
+ constrName,
+ constrArgs,
+ gadt,
+ }),
+ ),
+ })
+ }
+
+ and printConstructorArguments = (~indent, cdArgs: Parsetree.constructor_arguments, cmtTbl) =>
+ switch cdArgs {
+ | Pcstr_tuple(list{}) => Doc.nil
+ | Pcstr_tuple(types) =>
+ let args = Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(typexpr => printTypExpr(typexpr, cmtTbl), types),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ })
+ Doc.group(
+ if indent {
+ Doc.indent(args)
+ } else {
+ args
+ },
+ )
+ | Pcstr_record(lds) =>
+ let args = Doc.concat(list{
+ Doc.lparen,
+ /* manually inline the printRecordDeclaration, gives better layout */
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(ld => {
+ let doc = printLabelDeclaration(ld, cmtTbl)
+ printComments(doc, cmtTbl, ld.Parsetree.pld_loc)
+ }, lds)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbrace,
+ Doc.rparen,
+ })
+ if indent {
+ Doc.indent(args)
+ } else {
+ args
+ }
+ }
+
+ and printLabelDeclaration = (ld: Parsetree.label_declaration, cmtTbl) => {
+ let attrs = printAttributes(~loc=ld.pld_name.loc, ld.pld_attributes)
+ let mutableFlag = switch ld.pld_mutable {
+ | Mutable => Doc.text("mutable ")
+ | Immutable => Doc.nil
+ }
+
+ let name = {
+ let doc = printIdentLike(ld.pld_name.txt)
+ printComments(doc, cmtTbl, ld.pld_name.loc)
+ }
+
+ Doc.group(
+ Doc.concat(list{attrs, mutableFlag, name, Doc.text(": "), printTypExpr(ld.pld_type, cmtTbl)}),
+ )
+ }
+
+ and printTypExpr = (typExpr: Parsetree.core_type, cmtTbl) => {
+ let renderedType = switch typExpr.ptyp_desc {
+ | Ptyp_any => Doc.text("_")
+ | Ptyp_var(var) => Doc.concat(list{Doc.text("'"), printIdentLike(var)})
+ | Ptyp_extension(extension) => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl)
+ | Ptyp_alias(typ, alias) =>
+ let typ = {
+ /* Technically type t = (string, float) => unit as 'x, doesn't require
+ * parens around the arrow expression. This is very confusing though.
+ * Is the "as" part of "unit" or "(string, float) => unit". By printing
+ * parens we guide the user towards its meaning. */
+ let needsParens = switch typ.ptyp_desc {
+ | Ptyp_arrow(_) => true
+ | _ => false
+ }
+
+ let doc = printTypExpr(typ, cmtTbl)
+ if needsParens {
+ Doc.concat(list{Doc.lparen, doc, Doc.rparen})
+ } else {
+ doc
+ }
+ }
+
+ Doc.concat(list{
+ typ,
+ Doc.text(" as "),
+ Doc.concat(list{Doc.text("'"), printIdentLike(alias)}),
+ })
+ | Ptyp_constr(
+ {txt: Longident.Ldot(Longident.Lident("Js"), "t")},
+ list{{ptyp_desc: Ptyp_object(_fields, _openFlag)} as typ},
+ ) =>
+ let bsObject = printTypExpr(typ, cmtTbl)
+ switch typExpr.ptyp_attributes {
+ | list{} => bsObject
+ | attrs =>
+ Doc.concat(list{
+ Doc.group(Doc.join(~sep=Doc.line, List.map(printAttribute, attrs))),
+ Doc.space,
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+ | Ptyp_constr(longidentLoc, list{{ptyp_desc: Parsetree.Ptyp_tuple(tuple)}}) =>
+ let constrName = printLidentPath(longidentLoc, cmtTbl)
+ Doc.group(
+ Doc.concat(list{
+ constrName,
+ Doc.lessThan,
+ printTupleType(~inline=true, tuple, cmtTbl),
+ Doc.greaterThan,
+ }),
+ )
+ | Ptyp_constr(longidentLoc, constrArgs) =>
+ let constrName = printLidentPath(longidentLoc, cmtTbl)
+ switch constrArgs {
+ | list{} => constrName
+ | list{{
+ Parsetree.ptyp_desc:
+ Ptyp_constr(
+ {txt: Longident.Ldot(Longident.Lident("Js"), "t")},
+ list{{ptyp_desc: Ptyp_object(fields, openFlag)}},
+ ),
+ }} =>
+ Doc.concat(list{
+ constrName,
+ Doc.lessThan,
+ printBsObjectSugar(~inline=true, fields, openFlag, cmtTbl),
+ Doc.greaterThan,
+ })
+ | _args =>
+ Doc.group(
+ Doc.concat(list{
+ constrName,
+ Doc.lessThan,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(typexpr => printTypExpr(typexpr, cmtTbl), constrArgs),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.greaterThan,
+ }),
+ )
+ }
+ | Ptyp_arrow(_) =>
+ let (attrsBefore, args, returnType) = ParsetreeViewer.arrowType(typExpr)
+ let returnTypeNeedsParens = switch returnType.ptyp_desc {
+ | Ptyp_alias(_) => true
+ | _ => false
+ }
+
+ let returnDoc = {
+ let doc = printTypExpr(returnType, cmtTbl)
+ if returnTypeNeedsParens {
+ Doc.concat(list{Doc.lparen, doc, Doc.rparen})
+ } else {
+ doc
+ }
+ }
+
+ let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsBefore)
+
+ switch args {
+ | list{} => Doc.nil
+ | list{(list{}, Nolabel, n)} when !isUncurried =>
+ let hasAttrsBefore = !(attrs == list{})
+ let attrs = if hasAttrsBefore {
+ Doc.concat(list{
+ Doc.join(~sep=Doc.line, List.map(printAttribute, attrsBefore)),
+ Doc.space,
+ })
+ } else {
+ Doc.nil
+ }
+
+ let typDoc = {
+ let doc = printTypExpr(n, cmtTbl)
+ switch n.ptyp_desc {
+ | Ptyp_arrow(_) | Ptyp_tuple(_) => addParens(doc)
+ | _ => doc
+ }
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ Doc.group(attrs),
+ Doc.group(
+ if hasAttrsBefore {
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(Doc.concat(list{Doc.softLine, typDoc, Doc.text(" => "), returnDoc})),
+ Doc.softLine,
+ Doc.rparen,
+ })
+ } else {
+ Doc.concat(list{typDoc, Doc.text(" => "), returnDoc})
+ },
+ ),
+ }),
+ )
+ | args =>
+ let attrs = switch attrs {
+ | list{} => Doc.nil
+ | attrs =>
+ Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.space})
+ }
+
+ let renderedArgs = Doc.concat(list{
+ attrs,
+ Doc.text("("),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ if isUncurried {
+ Doc.concat(list{Doc.dot, Doc.space})
+ } else {
+ Doc.nil
+ },
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(tp => printTypeParameter(tp, cmtTbl), args),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.text(")"),
+ })
+ Doc.group(Doc.concat(list{renderedArgs, Doc.text(" => "), returnDoc}))
+ }
+ | Ptyp_tuple(types) => printTupleType(~inline=false, types, cmtTbl)
+ | Ptyp_object(fields, openFlag) => printBsObjectSugar(~inline=false, fields, openFlag, cmtTbl)
+ | Ptyp_poly(list{}, typ) => printTypExpr(typ, cmtTbl)
+ | Ptyp_poly(stringLocs, typ) => Doc.concat(list{Doc.join(~sep=Doc.space, List.map(({
+ Location.txt: txt,
+ loc,
+ }) => {
+ let doc = Doc.concat(list{Doc.text("'"), Doc.text(txt)})
+ printComments(doc, cmtTbl, loc)
+ }, stringLocs)), Doc.dot, Doc.space, printTypExpr(typ, cmtTbl)})
+ | Ptyp_package(packageType) =>
+ printPackageType(~printModuleKeywordAndParens=true, packageType, cmtTbl)
+ | Ptyp_class(_) => Doc.text("classes are not supported in types")
+ | Ptyp_variant(rowFields, closedFlag, labelsOpt) =>
+ let printRowField = x =>
+ switch x {
+ | Parsetree.Rtag({txt}, attrs, true, list{}) =>
+ Doc.concat(list{
+ printAttributes(attrs),
+ Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, txt)}),
+ })
+ | Rtag({txt}, attrs, truth, types) =>
+ let doType = t =>
+ switch t.Parsetree.ptyp_desc {
+ | Ptyp_tuple(_) => printTypExpr(t, cmtTbl)
+ | _ => Doc.concat(list{Doc.lparen, printTypExpr(t, cmtTbl), Doc.rparen})
+ }
+
+ let printedTypes = List.map(doType, types)
+ let cases = Doc.join(~sep=Doc.concat(list{Doc.line, Doc.text("& ")}), printedTypes)
+ let cases = if truth {
+ Doc.concat(list{Doc.line, Doc.text("& "), cases})
+ } else {
+ cases
+ }
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(attrs),
+ Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, txt)}),
+ cases,
+ }),
+ )
+ | Rinherit(coreType) => printTypExpr(coreType, cmtTbl)
+ }
+
+ let docs = List.map(printRowField, rowFields)
+ let cases = Doc.join(~sep=Doc.concat(list{Doc.line, Doc.text("| ")}), docs)
+ let cases = if docs == list{} {
+ cases
+ } else {
+ Doc.concat(list{Doc.text("| "), cases})
+ }
+ let openingSymbol = if closedFlag == Open {
+ Doc.greaterThan
+ } else if labelsOpt == None {
+ Doc.nil
+ } else {
+ Doc.lessThan
+ }
+ let hasLabels = labelsOpt != None && labelsOpt != Some(list{})
+ let labels = switch labelsOpt {
+ | None | Some(list{}) => Doc.nil
+ | Some(labels) =>
+ Doc.concat(
+ List.map(
+ label =>
+ Doc.concat(list{Doc.line, Doc.text("#"), printIdentLike(~allowUident=true, label)}),
+ labels,
+ ),
+ )
+ }
+
+ let closingSymbol = if hasLabels {
+ Doc.text(" >")
+ } else {
+ Doc.nil
+ }
+ Doc.group(
+ Doc.concat(list{
+ Doc.lbracket,
+ openingSymbol,
+ Doc.line,
+ cases,
+ closingSymbol,
+ labels,
+ Doc.line,
+ Doc.rbracket,
+ }),
+ )
+ }
+
+ let shouldPrintItsOwnAttributes = switch typExpr.ptyp_desc {
+ | Ptyp_arrow(_) /* es6 arrow types print their own attributes */
+ | Ptyp_constr({txt: Longident.Ldot(Longident.Lident("Js"), "t")}, _) => true
+ | _ => false
+ }
+
+ let doc = switch typExpr.ptyp_attributes {
+ | list{_, ..._} as attrs when !shouldPrintItsOwnAttributes =>
+ Doc.group(Doc.concat(list{printAttributes(attrs), renderedType}))
+ | _ => renderedType
+ }
+
+ printComments(doc, cmtTbl, typExpr.ptyp_loc)
+ }
+
+ and printBsObjectSugar = (~inline, fields, openFlag, cmtTbl) => {
+ let doc = switch fields {
+ | list{} =>
+ Doc.concat(list{
+ Doc.lbrace,
+ switch openFlag {
+ | Asttypes.Closed => Doc.dot
+ | Open => Doc.dotdot
+ },
+ Doc.rbrace,
+ })
+ | fields =>
+ Doc.concat(list{
+ Doc.lbrace,
+ switch openFlag {
+ | Asttypes.Closed => Doc.nil
+ | Open => Doc.dotdot
+ },
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(field => printObjectField(field, cmtTbl), fields),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbrace,
+ })
+ }
+
+ if inline {
+ doc
+ } else {
+ Doc.group(doc)
+ }
+ }
+
+ and printTupleType = (~inline, types: list, cmtTbl) => {
+ let tuple = Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(typexpr => printTypExpr(typexpr, cmtTbl), types),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ })
+
+ if inline === false {
+ Doc.group(tuple)
+ } else {
+ tuple
+ }
+ }
+
+ and printObjectField = (field: Parsetree.object_field, cmtTbl) =>
+ switch field {
+ | Otag(labelLoc, attrs, typ) =>
+ let lbl = {
+ let doc = Doc.text("\"" ++ (labelLoc.txt ++ "\""))
+ printComments(doc, cmtTbl, labelLoc.loc)
+ }
+
+ let doc = Doc.concat(list{
+ printAttributes(~loc=labelLoc.loc, attrs),
+ lbl,
+ Doc.text(": "),
+ printTypExpr(typ, cmtTbl),
+ })
+ let cmtLoc = {...labelLoc.loc, loc_end: typ.ptyp_loc.loc_end}
+ printComments(doc, cmtTbl, cmtLoc)
+ | _ => Doc.nil
+ }
+
+ /* es6 arrow type arg
+ * type t = (~foo: string, ~bar: float=?, unit) => unit
+ * i.e. ~foo: string, ~bar: float */
+ and printTypeParameter = ((attrs, lbl, typ), cmtTbl) => {
+ let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrs)
+ let uncurried = if isUncurried {
+ Doc.concat(list{Doc.dot, Doc.space})
+ } else {
+ Doc.nil
+ }
+ let attrs = switch attrs {
+ | list{} => Doc.nil
+ | attrs => Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line})
+ }
+ let label = switch lbl {
+ | Asttypes.Nolabel => Doc.nil
+ | Labelled(lbl) => Doc.concat(list{Doc.text("~"), printIdentLike(lbl), Doc.text(": ")})
+ | Optional(lbl) => Doc.concat(list{Doc.text("~"), printIdentLike(lbl), Doc.text(": ")})
+ }
+
+ let optionalIndicator = switch lbl {
+ | Asttypes.Nolabel | Labelled(_) => Doc.nil
+ | Optional(_lbl) => Doc.text("=?")
+ }
+
+ let doc = Doc.group(
+ Doc.concat(list{uncurried, attrs, label, printTypExpr(typ, cmtTbl), optionalIndicator}),
+ )
+ printComments(doc, cmtTbl, typ.ptyp_loc)
+ }
+
+ and printValueBinding = (~recFlag, vb, cmtTbl, i) => {
+ let (hasGenType, attrs) = ParsetreeViewer.splitGenTypeAttr(vb.pvb_attributes)
+ let attrs = printAttributes(~loc=vb.pvb_pat.ppat_loc, attrs)
+ let header = if i === 0 {
+ Doc.concat(list{
+ if hasGenType {
+ Doc.text("export ")
+ } else {
+ Doc.text("let ")
+ },
+ recFlag,
+ })
+ } else {
+ Doc.concat(list{
+ Doc.text("and "),
+ if hasGenType {
+ Doc.text("export ")
+ } else {
+ Doc.nil
+ },
+ })
+ }
+
+ switch vb {
+ | {
+ pvb_pat: {ppat_desc: Ppat_constraint(pattern, {ptyp_desc: Ptyp_poly(_)})},
+ pvb_expr: {pexp_desc: Pexp_newtype(_)} as expr,
+ } =>
+ let (_attrs, parameters, returnExpr) = ParsetreeViewer.funExpr(expr)
+ let abstractType = switch parameters {
+ | list{NewTypes({locs: vars})} =>
+ Doc.concat(list{
+ Doc.text("type "),
+ Doc.join(~sep=Doc.space, List.map(var => Doc.text(var.Asttypes.txt), vars)),
+ Doc.dot,
+ })
+ | _ => Doc.nil
+ }
+
+ switch returnExpr.pexp_desc {
+ | Pexp_constraint(expr, typ) =>
+ Doc.group(
+ Doc.concat(list{
+ attrs,
+ header,
+ printPattern(pattern, cmtTbl),
+ Doc.text(":"),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ abstractType,
+ Doc.space,
+ printTypExpr(typ, cmtTbl),
+ Doc.text(" ="),
+ Doc.concat(list{Doc.line, printExpressionWithComments(expr, cmtTbl)}),
+ }),
+ ),
+ }),
+ )
+ | _ => Doc.nil
+ }
+ | _ =>
+ let (optBraces, expr) = ParsetreeViewer.processBracesAttr(vb.pvb_expr)
+ let printedExpr = {
+ let doc = printExpressionWithComments(vb.pvb_expr, cmtTbl)
+ switch Parens.expr(vb.pvb_expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ if ParsetreeViewer.isPipeExpr(vb.pvb_expr) {
+ Doc.customLayout(list{
+ Doc.group(
+ Doc.concat(list{
+ attrs,
+ header,
+ printPattern(vb.pvb_pat, cmtTbl),
+ Doc.text(" ="),
+ Doc.space,
+ printedExpr,
+ }),
+ ),
+ Doc.group(
+ Doc.concat(list{
+ attrs,
+ header,
+ printPattern(vb.pvb_pat, cmtTbl),
+ Doc.text(" ="),
+ Doc.indent(Doc.concat(list{Doc.line, printedExpr})),
+ }),
+ ),
+ })
+ } else {
+ let shouldIndent = switch optBraces {
+ | Some(_) => false
+ | _ =>
+ ParsetreeViewer.isBinaryExpression(expr) ||
+ switch vb.pvb_expr {
+ | {
+ pexp_attributes: list{({Location.txt: "ns.ternary"}, _)},
+ pexp_desc: Pexp_ifthenelse(ifExpr, _, _),
+ } =>
+ ParsetreeViewer.isBinaryExpression(ifExpr) ||
+ ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes)
+ | {pexp_desc: Pexp_newtype(_)} => false
+ | e =>
+ ParsetreeViewer.hasAttributes(e.pexp_attributes) || ParsetreeViewer.isArrayAccess(e)
+ }
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ attrs,
+ header,
+ printPattern(vb.pvb_pat, cmtTbl),
+ Doc.text(" ="),
+ if shouldIndent {
+ Doc.indent(Doc.concat(list{Doc.line, printedExpr}))
+ } else {
+ Doc.concat(list{Doc.space, printedExpr})
+ },
+ }),
+ )
+ }
+ }
+ }
+
+ and printPackageType = (
+ ~printModuleKeywordAndParens,
+ packageType: Parsetree.package_type,
+ cmtTbl,
+ ) => {
+ let doc = switch packageType {
+ | (longidentLoc, list{}) =>
+ Doc.group(Doc.concat(list{printLongidentLocation(longidentLoc, cmtTbl)}))
+ | (longidentLoc, packageConstraints) =>
+ Doc.group(
+ Doc.concat(list{
+ printLongidentLocation(longidentLoc, cmtTbl),
+ printPackageConstraints(packageConstraints, cmtTbl),
+ Doc.softLine,
+ }),
+ )
+ }
+
+ if printModuleKeywordAndParens {
+ Doc.concat(list{Doc.text("module("), doc, Doc.rparen})
+ } else {
+ doc
+ }
+ }
+
+ and printPackageConstraints = (packageConstraints, cmtTbl) =>
+ Doc.concat(list{
+ Doc.text(" with"),
+ Doc.indent(Doc.concat(list{Doc.line, Doc.join(~sep=Doc.line, List.mapi((i, pc) => {
+ let (longident, typexpr) = pc
+ let cmtLoc = {
+ ...longident.Asttypes.loc,
+ loc_end: typexpr.Parsetree.ptyp_loc.loc_end,
+ }
+ let doc = printPackageConstraint(i, cmtTbl, pc)
+ printComments(doc, cmtTbl, cmtLoc)
+ }, packageConstraints))})),
+ })
+
+ and printPackageConstraint = (i, cmtTbl, (longidentLoc, typ)) => {
+ let prefix = if i === 0 {
+ Doc.text("type ")
+ } else {
+ Doc.text("and type ")
+ }
+ Doc.concat(list{
+ prefix,
+ printLongidentLocation(longidentLoc, cmtTbl),
+ Doc.text(" = "),
+ printTypExpr(typ, cmtTbl),
+ })
+ }
+
+ and printExtensionWithComments = (~atModuleLvl, (stringLoc, payload), cmtTbl) => {
+ let extName = {
+ let doc = Doc.concat(list{
+ Doc.text("%"),
+ if atModuleLvl {
+ Doc.text("%")
+ } else {
+ Doc.nil
+ },
+ Doc.text(stringLoc.Location.txt),
+ })
+ printComments(doc, cmtTbl, stringLoc.Location.loc)
+ }
+
+ switch payload {
+ | Parsetree.PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) =>
+ let exprDoc = printExpressionWithComments(expr, cmtTbl)
+ let needsParens = switch attrs {
+ | list{} => false
+ | _ => true
+ }
+ Doc.group(
+ Doc.concat(list{
+ extName,
+ addParens(
+ Doc.concat(list{
+ printAttributes(attrs),
+ if needsParens {
+ addParens(exprDoc)
+ } else {
+ exprDoc
+ },
+ }),
+ ),
+ }),
+ )
+ | _ => extName
+ }
+ }
+
+ and printPattern = (p: Parsetree.pattern, cmtTbl) => {
+ let patternWithoutAttributes = switch p.ppat_desc {
+ | Ppat_any => Doc.text("_")
+ | Ppat_var(var) => printIdentLike(var.txt)
+ | Ppat_constant(c) => printConstant(c)
+ | Ppat_tuple(patterns) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.text(","), Doc.line}),
+ List.map(pat => printPattern(pat, cmtTbl), patterns),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ | Ppat_array(list{}) =>
+ Doc.concat(list{Doc.lbracket, printCommentsInside(cmtTbl, p.ppat_loc), Doc.rbracket})
+ | Ppat_array(patterns) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.text("["),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.text(","), Doc.line}),
+ List.map(pat => printPattern(pat, cmtTbl), patterns),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.text("]"),
+ }),
+ )
+ | Ppat_construct({txt: Longident.Lident("()")}, _) =>
+ Doc.concat(list{Doc.lparen, printCommentsInside(cmtTbl, p.ppat_loc), Doc.rparen})
+ | Ppat_construct({txt: Longident.Lident("[]")}, _) =>
+ Doc.concat(list{Doc.text("list["), printCommentsInside(cmtTbl, p.ppat_loc), Doc.rbracket})
+ | Ppat_construct({txt: Longident.Lident("::")}, _) =>
+ let (patterns, tail) = ParsetreeViewer.collectPatternsFromListConstruct(list{}, p)
+ let shouldHug = switch (patterns, tail) {
+ | (list{pat}, {ppat_desc: Ppat_construct({txt: Longident.Lident("[]")}, _)})
+ when ParsetreeViewer.isHuggablePattern(pat) => true
+ | _ => false
+ }
+
+ let children = Doc.concat(list{
+ if shouldHug {
+ Doc.nil
+ } else {
+ Doc.softLine
+ },
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.text(","), Doc.line}),
+ List.map(pat => printPattern(pat, cmtTbl), patterns),
+ ),
+ switch tail.Parsetree.ppat_desc {
+ | Ppat_construct({txt: Longident.Lident("[]")}, _) => Doc.nil
+ | _ =>
+ let doc = Doc.concat(list{Doc.text("..."), printPattern(tail, cmtTbl)})
+ let tail = printComments(doc, cmtTbl, tail.ppat_loc)
+ Doc.concat(list{Doc.text(","), Doc.line, tail})
+ },
+ })
+ Doc.group(
+ Doc.concat(list{
+ Doc.text("list["),
+ if shouldHug {
+ children
+ } else {
+ Doc.concat(list{
+ Doc.indent(children),
+ Doc.ifBreaks(Doc.text(","), Doc.nil),
+ Doc.softLine,
+ })
+ },
+ Doc.rbracket,
+ }),
+ )
+ | Ppat_construct(constrName, constructorArgs) =>
+ let constrName = printLongident(constrName.txt)
+ let argsDoc = switch constructorArgs {
+ | None => Doc.nil
+ | Some({ppat_loc, ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, _)}) =>
+ Doc.concat(list{Doc.lparen, printCommentsInside(cmtTbl, ppat_loc), Doc.rparen})
+ | Some({ppat_desc: Ppat_tuple(list{}), ppat_loc: loc}) =>
+ Doc.concat(list{Doc.lparen, Doc.softLine, printCommentsInside(cmtTbl, loc), Doc.rparen})
+ /* Some((1, 2) */
+ | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as arg})}) =>
+ Doc.concat(list{Doc.lparen, printPattern(arg, cmtTbl), Doc.rparen})
+ | Some({ppat_desc: Ppat_tuple(patterns)}) =>
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(pat => printPattern(pat, cmtTbl), patterns),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ })
+ | Some(arg) =>
+ let argDoc = printPattern(arg, cmtTbl)
+ let shouldHug = ParsetreeViewer.isHuggablePattern(arg)
+ Doc.concat(list{
+ Doc.lparen,
+ if shouldHug {
+ argDoc
+ } else {
+ Doc.concat(list{
+ Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),
+ Doc.trailingComma,
+ Doc.softLine,
+ })
+ },
+ Doc.rparen,
+ })
+ }
+
+ Doc.group(Doc.concat(list{constrName, argsDoc}))
+ | Ppat_variant(label, None) =>
+ Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, label)})
+ | Ppat_variant(label, variantArgs) =>
+ let variantName = Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, label)})
+ let argsDoc = switch variantArgs {
+ | None => Doc.nil
+ | Some({ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, _)}) => Doc.text("()")
+ | Some({ppat_desc: Ppat_tuple(list{}), ppat_loc: loc}) =>
+ Doc.concat(list{Doc.lparen, Doc.softLine, printCommentsInside(cmtTbl, loc), Doc.rparen})
+ /* Some((1, 2) */
+ | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as arg})}) =>
+ Doc.concat(list{Doc.lparen, printPattern(arg, cmtTbl), Doc.rparen})
+ | Some({ppat_desc: Ppat_tuple(patterns)}) =>
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(pat => printPattern(pat, cmtTbl), patterns),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ })
+ | Some(arg) =>
+ let argDoc = printPattern(arg, cmtTbl)
+ let shouldHug = ParsetreeViewer.isHuggablePattern(arg)
+ Doc.concat(list{
+ Doc.lparen,
+ if shouldHug {
+ argDoc
+ } else {
+ Doc.concat(list{
+ Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),
+ Doc.trailingComma,
+ Doc.softLine,
+ })
+ },
+ Doc.rparen,
+ })
+ }
+
+ Doc.group(Doc.concat(list{variantName, argsDoc}))
+ | Ppat_type(ident) => Doc.concat(list{Doc.text("##"), printIdentPath(ident, cmtTbl)})
+ | Ppat_record(rows, openFlag) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.text(","), Doc.line}),
+ List.map(row => printPatternRecordRow(row, cmtTbl), rows),
+ ),
+ switch openFlag {
+ | Open => Doc.concat(list{Doc.text(","), Doc.line, Doc.text("_")})
+ | Closed => Doc.nil
+ },
+ }),
+ ),
+ Doc.ifBreaks(Doc.text(","), Doc.nil),
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ )
+
+ | Ppat_exception(p) =>
+ let needsParens = switch p.ppat_desc {
+ | Ppat_or(_, _) | Ppat_alias(_, _) => true
+ | _ => false
+ }
+
+ let pat = {
+ let p = printPattern(p, cmtTbl)
+ if needsParens {
+ Doc.concat(list{Doc.text("("), p, Doc.text(")")})
+ } else {
+ p
+ }
+ }
+
+ Doc.group(Doc.concat(list{Doc.text("exception"), Doc.line, pat}))
+ | Ppat_or(_) =>
+ /* Blue | Red | Green -> [Blue; Red; Green] */
+ let orChain = ParsetreeViewer.collectOrPatternChain(p)
+ let docs = List.mapi((i, pat) => {
+ let patternDoc = printPattern(pat, cmtTbl)
+ Doc.concat(list{
+ if i === 0 {
+ Doc.nil
+ } else {
+ Doc.concat(list{Doc.line, Doc.text("| ")})
+ },
+ switch pat.ppat_desc {
+ /* (Blue | Red) | (Green | Black) | White */
+ | Ppat_or(_) => addParens(patternDoc)
+ | _ => patternDoc
+ },
+ })
+ }, orChain)
+ Doc.group(Doc.concat(docs))
+ | Ppat_extension(ext) => printExtensionWithComments(~atModuleLvl=false, ext, cmtTbl)
+ | Ppat_lazy(p) =>
+ let needsParens = switch p.ppat_desc {
+ | Ppat_or(_, _) | Ppat_alias(_, _) => true
+ | _ => false
+ }
+
+ let pat = {
+ let p = printPattern(p, cmtTbl)
+ if needsParens {
+ Doc.concat(list{Doc.text("("), p, Doc.text(")")})
+ } else {
+ p
+ }
+ }
+
+ Doc.concat(list{Doc.text("lazy "), pat})
+ | Ppat_alias(p, aliasLoc) =>
+ let needsParens = switch p.ppat_desc {
+ | Ppat_or(_, _) | Ppat_alias(_, _) => true
+ | _ => false
+ }
+
+ let renderedPattern = {
+ let p = printPattern(p, cmtTbl)
+ if needsParens {
+ Doc.concat(list{Doc.text("("), p, Doc.text(")")})
+ } else {
+ p
+ }
+ }
+
+ Doc.concat(list{renderedPattern, Doc.text(" as "), printStringLoc(aliasLoc, cmtTbl)})
+
+ /* Note: module(P : S) is represented as */
+ /* Ppat_constraint(Ppat_unpack, Ptyp_package) */
+ | Ppat_constraint(
+ {ppat_desc: Ppat_unpack(stringLoc)},
+ {ptyp_desc: Ptyp_package(packageType), ptyp_loc},
+ ) =>
+ Doc.concat(list{
+ Doc.text("module("),
+ printComments(Doc.text(stringLoc.txt), cmtTbl, stringLoc.loc),
+ Doc.text(": "),
+ printComments(
+ printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl),
+ cmtTbl,
+ ptyp_loc,
+ ),
+ Doc.rparen,
+ })
+ | Ppat_constraint(pattern, typ) =>
+ Doc.concat(list{printPattern(pattern, cmtTbl), Doc.text(": "), printTypExpr(typ, cmtTbl)})
+
+ /* Note: module(P : S) is represented as */
+ /* Ppat_constraint(Ppat_unpack, Ptyp_package) */
+ | Ppat_unpack(stringLoc) =>
+ Doc.concat(list{
+ Doc.text("module("),
+ printComments(Doc.text(stringLoc.txt), cmtTbl, stringLoc.loc),
+ Doc.rparen,
+ })
+ | Ppat_interval(a, b) => Doc.concat(list{printConstant(a), Doc.text(" .. "), printConstant(b)})
+ | Ppat_open(_) => Doc.nil
+ }
+
+ let doc = switch p.ppat_attributes {
+ | list{} => patternWithoutAttributes
+ | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), patternWithoutAttributes}))
+ }
+
+ printComments(doc, cmtTbl, p.ppat_loc)
+ }
+
+ and printPatternRecordRow = (row, cmtTbl) =>
+ switch row {
+ /* punned {x} */
+ | (
+ {Location.txt: Longident.Lident(ident)} as longident,
+ {Parsetree.ppat_desc: Ppat_var({txt, _})},
+ ) when ident == txt =>
+ printLidentPath(longident, cmtTbl)
+ | (longident, pattern) =>
+ let locForComments = {
+ ...longident.loc,
+ loc_end: pattern.Parsetree.ppat_loc.loc_end,
+ }
+ let doc = Doc.group(
+ Doc.concat(list{
+ printLidentPath(longident, cmtTbl),
+ Doc.text(": "),
+ Doc.indent(Doc.concat(list{Doc.softLine, printPattern(pattern, cmtTbl)})),
+ }),
+ )
+ printComments(doc, cmtTbl, locForComments)
+ }
+
+ and printExpressionWithComments = (expr, cmtTbl) => {
+ let doc = printExpression(expr, cmtTbl)
+ printComments(doc, cmtTbl, expr.Parsetree.pexp_loc)
+ }
+
+ and printExpression = (e: Parsetree.expression, cmtTbl) => {
+ let printedExpression = switch e.pexp_desc {
+ | Parsetree.Pexp_constant(c) => printConstant(c)
+ | Pexp_construct(_) when ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) =>
+ printJsxFragment(e, cmtTbl)
+ | Pexp_construct({txt: Longident.Lident("()")}, _) => Doc.text("()")
+ | Pexp_construct({txt: Longident.Lident("[]")}, _) =>
+ Doc.concat(list{Doc.text("list["), printCommentsInside(cmtTbl, e.pexp_loc), Doc.rbracket})
+ | Pexp_construct({txt: Longident.Lident("::")}, _) =>
+ let (expressions, spread) = ParsetreeViewer.collectListExpressions(e)
+ let spreadDoc = switch spread {
+ | Some(expr) =>
+ Doc.concat(list{
+ Doc.text(","),
+ Doc.line,
+ Doc.dotdotdot,
+ {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ },
+ })
+ | None => Doc.nil
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ Doc.text("list["),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.text(","), Doc.line}), List.map(expr => {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }, expressions)),
+ spreadDoc,
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbracket,
+ }),
+ )
+ | Pexp_construct(longidentLoc, args) =>
+ let constr = printLongidentLocation(longidentLoc, cmtTbl)
+ let args = switch args {
+ | None => Doc.nil
+ | Some({pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)}) => Doc.text("()")
+ /* Some((1, 2)) */
+ | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as arg})}) =>
+ Doc.concat(list{
+ Doc.lparen,
+ {
+ let doc = printExpressionWithComments(arg, cmtTbl)
+ switch Parens.expr(arg) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, arg, braces)
+ | Nothing => doc
+ }
+ },
+ Doc.rparen,
+ })
+ | Some({pexp_desc: Pexp_tuple(args)}) =>
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(expr => {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }, args)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ })
+ | Some(arg) =>
+ let argDoc = {
+ let doc = printExpressionWithComments(arg, cmtTbl)
+ switch Parens.expr(arg) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, arg, braces)
+ | Nothing => doc
+ }
+ }
+
+ let shouldHug = ParsetreeViewer.isHuggableExpression(arg)
+ Doc.concat(list{
+ Doc.lparen,
+ if shouldHug {
+ argDoc
+ } else {
+ Doc.concat(list{
+ Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),
+ Doc.trailingComma,
+ Doc.softLine,
+ })
+ },
+ Doc.rparen,
+ })
+ }
+
+ Doc.group(Doc.concat(list{constr, args}))
+ | Pexp_ident(path) => printLidentPath(path, cmtTbl)
+ | Pexp_tuple(exprs) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.text(","), Doc.line}), List.map(expr => {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }, exprs)),
+ }),
+ ),
+ Doc.ifBreaks(Doc.text(","), Doc.nil),
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ | Pexp_array(list{}) =>
+ Doc.concat(list{Doc.lbracket, printCommentsInside(cmtTbl, e.pexp_loc), Doc.rbracket})
+ | Pexp_array(exprs) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lbracket,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.text(","), Doc.line}), List.map(expr => {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }, exprs)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbracket,
+ }),
+ )
+ | Pexp_variant(label, args) =>
+ let variantName = Doc.concat(list{Doc.text("#"), printIdentLike(~allowUident=true, label)})
+ let args = switch args {
+ | None => Doc.nil
+ | Some({pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)}) => Doc.text("()")
+ /* #poly((1, 2) */
+ | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as arg})}) =>
+ Doc.concat(list{
+ Doc.lparen,
+ {
+ let doc = printExpressionWithComments(arg, cmtTbl)
+ switch Parens.expr(arg) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, arg, braces)
+ | Nothing => doc
+ }
+ },
+ Doc.rparen,
+ })
+ | Some({pexp_desc: Pexp_tuple(args)}) =>
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(~sep=Doc.concat(list{Doc.comma, Doc.line}), List.map(expr => {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }, args)),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ })
+ | Some(arg) =>
+ let argDoc = {
+ let doc = printExpressionWithComments(arg, cmtTbl)
+ switch Parens.expr(arg) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, arg, braces)
+ | Nothing => doc
+ }
+ }
+
+ let shouldHug = ParsetreeViewer.isHuggableExpression(arg)
+ Doc.concat(list{
+ Doc.lparen,
+ if shouldHug {
+ argDoc
+ } else {
+ Doc.concat(list{
+ Doc.indent(Doc.concat(list{Doc.softLine, argDoc})),
+ Doc.trailingComma,
+ Doc.softLine,
+ })
+ },
+ Doc.rparen,
+ })
+ }
+
+ Doc.group(Doc.concat(list{variantName, args}))
+ | Pexp_record(rows, spreadExpr) =>
+ let spread = switch spreadExpr {
+ | None => Doc.nil
+ | Some(expr) =>
+ Doc.concat(list{
+ Doc.dotdotdot,
+ {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ },
+ Doc.comma,
+ Doc.line,
+ })
+ }
+
+ /* If the record is written over multiple lines, break automatically
+ * `let x = {a: 1, b: 3}` -> same line, break when line-width exceeded
+ * `let x = {
+ * a: 1,
+ * b: 2,
+ * }` -> record is written on multiple lines, break the group */
+ let forceBreak = e.pexp_loc.loc_start.pos_lnum < e.pexp_loc.loc_end.pos_lnum
+
+ Doc.breakableGroup(
+ ~forceBreak,
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ spread,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.text(","), Doc.line}),
+ List.map(row => printRecordRow(row, cmtTbl), rows),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ )
+ | Pexp_extension(extension) =>
+ switch extension {
+ | (
+ {txt: "bs.obj"},
+ PStr(list{{
+ pstr_loc: loc,
+ pstr_desc: Pstr_eval({pexp_desc: Pexp_record(rows, _)}, list{}),
+ }}),
+ ) =>
+ /* If the object is written over multiple lines, break automatically
+ * `let x = {"a": 1, "b": 3}` -> same line, break when line-width exceeded
+ * `let x = {
+ * "a": 1,
+ * "b": 2,
+ * }` -> object is written on multiple lines, break the group */
+ let forceBreak = loc.loc_start.pos_lnum < loc.loc_end.pos_lnum
+
+ Doc.breakableGroup(
+ ~forceBreak,
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.text(","), Doc.line}),
+ List.map(row => printBsObjectRow(row, cmtTbl), rows),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ )
+ | extension => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl)
+ }
+ | Pexp_apply(_) =>
+ if ParsetreeViewer.isUnaryExpression(e) {
+ printUnaryExpression(e, cmtTbl)
+ } else if ParsetreeViewer.isTemplateLiteral(e) {
+ printTemplateLiteral(e, cmtTbl)
+ } else if ParsetreeViewer.isBinaryExpression(e) {
+ printBinaryExpression(e, cmtTbl)
+ } else {
+ printPexpApply(e, cmtTbl)
+ }
+ | Pexp_unreachable => Doc.dot
+ | Pexp_field(expr, longidentLoc) =>
+ let lhs = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.fieldExpr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{lhs, Doc.dot, printLidentPath(longidentLoc, cmtTbl)})
+ | Pexp_setfield(expr1, longidentLoc, expr2) =>
+ printSetFieldExpr(e.pexp_attributes, expr1, longidentLoc, expr2, e.pexp_loc, cmtTbl)
+ | Pexp_ifthenelse(_ifExpr, _thenExpr, _elseExpr) =>
+ if ParsetreeViewer.isTernaryExpr(e) {
+ let (parts, alternate) = ParsetreeViewer.collectTernaryParts(e)
+ let ternaryDoc = switch parts {
+ | list{(condition1, consequent1), ...rest} =>
+ Doc.group(
+ Doc.concat(list{
+ printTernaryOperand(condition1, cmtTbl),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ Doc.indent(
+ Doc.concat(list{Doc.text("? "), printTernaryOperand(consequent1, cmtTbl)}),
+ ),
+ Doc.concat(
+ List.map(
+ ((condition, consequent)) =>
+ Doc.concat(list{
+ Doc.line,
+ Doc.text(": "),
+ printTernaryOperand(condition, cmtTbl),
+ Doc.line,
+ Doc.text("? "),
+ printTernaryOperand(consequent, cmtTbl),
+ }),
+ rest,
+ ),
+ ),
+ Doc.line,
+ Doc.text(": "),
+ Doc.indent(printTernaryOperand(alternate, cmtTbl)),
+ }),
+ ),
+ }),
+ )
+ | _ => Doc.nil
+ }
+
+ let attrs = ParsetreeViewer.filterTernaryAttributes(e.pexp_attributes)
+ let needsParens = switch ParsetreeViewer.filterParsingAttrs(attrs) {
+ | list{} => false
+ | _ => true
+ }
+
+ Doc.concat(list{
+ printAttributes(attrs),
+ if needsParens {
+ addParens(ternaryDoc)
+ } else {
+ ternaryDoc
+ },
+ })
+ } else {
+ let (ifs, elseExpr) = ParsetreeViewer.collectIfExpressions(e)
+ let ifDocs = Doc.join(~sep=Doc.space, List.mapi((i, (ifExpr, thenExpr)) => {
+ let ifTxt = if i > 0 {
+ Doc.text("else if ")
+ } else {
+ Doc.text("if ")
+ }
+ let condition = if ParsetreeViewer.isBlockExpr(ifExpr) {
+ printExpressionBlock(~braces=true, ifExpr, cmtTbl)
+ } else {
+ let doc = printExpressionWithComments(ifExpr, cmtTbl)
+ switch Parens.expr(ifExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, ifExpr, braces)
+ | Nothing => Doc.ifBreaks(addParens(doc), doc)
+ }
+ }
+
+ Doc.concat(list{
+ ifTxt,
+ Doc.group(condition),
+ Doc.space,
+ {
+ let thenExpr = switch ParsetreeViewer.processBracesAttr(thenExpr) {
+ /* This case only happens when coming from Reason, we strip braces */
+ | (Some(_), expr) => expr
+ | _ => thenExpr
+ }
+
+ printExpressionBlock(~braces=true, thenExpr, cmtTbl)
+ },
+ })
+ }, ifs))
+ let elseDoc = switch elseExpr {
+ | None => Doc.nil
+ | Some(expr) =>
+ Doc.concat(list{Doc.text(" else "), printExpressionBlock(~braces=true, expr, cmtTbl)})
+ }
+
+ Doc.concat(list{printAttributes(e.pexp_attributes), ifDocs, elseDoc})
+ }
+ | Pexp_while(expr1, expr2) =>
+ let condition = {
+ let doc = printExpressionWithComments(expr1, cmtTbl)
+ switch Parens.expr(expr1) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr1, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.text("while "),
+ if ParsetreeViewer.isBlockExpr(expr1) {
+ condition
+ } else {
+ Doc.group(Doc.ifBreaks(addParens(condition), condition))
+ },
+ Doc.space,
+ printExpressionBlock(~braces=true, expr2, cmtTbl),
+ }),
+ )
+ | Pexp_for(pattern, fromExpr, toExpr, directionFlag, body) =>
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.text("for "),
+ printPattern(pattern, cmtTbl),
+ Doc.text(" in "),
+ {
+ let doc = printExpressionWithComments(fromExpr, cmtTbl)
+ switch Parens.expr(fromExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, fromExpr, braces)
+ | Nothing => doc
+ }
+ },
+ printDirectionFlag(directionFlag),
+ {
+ let doc = printExpressionWithComments(toExpr, cmtTbl)
+ switch Parens.expr(toExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, toExpr, braces)
+ | Nothing => doc
+ }
+ },
+ Doc.space,
+ printExpressionBlock(~braces=true, body, cmtTbl),
+ }),
+ )
+ | Pexp_constraint(
+ {pexp_desc: Pexp_pack(modExpr)},
+ {ptyp_desc: Ptyp_package(packageType), ptyp_loc},
+ ) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.text("module("),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ printModExpr(modExpr, cmtTbl),
+ Doc.text(": "),
+ printComments(
+ printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl),
+ cmtTbl,
+ ptyp_loc,
+ ),
+ }),
+ ),
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+
+ | Pexp_constraint(expr, typ) =>
+ let exprDoc = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{exprDoc, Doc.text(": "), printTypExpr(typ, cmtTbl)})
+ | Pexp_letmodule({txt: _modName}, _modExpr, _expr) =>
+ printExpressionBlock(~braces=true, e, cmtTbl)
+ | Pexp_letexception(_extensionConstructor, _expr) =>
+ printExpressionBlock(~braces=true, e, cmtTbl)
+ | Pexp_assert(expr) =>
+ let rhs = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.lazyOrAssertExprRhs(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{Doc.text("assert "), rhs})
+ | Pexp_lazy(expr) =>
+ let rhs = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.lazyOrAssertExprRhs(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.group(Doc.concat(list{Doc.text("lazy "), rhs}))
+ | Pexp_open(_overrideFlag, _longidentLoc, _expr) =>
+ printExpressionBlock(~braces=true, e, cmtTbl)
+ | Pexp_pack(modExpr) =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.text("module("),
+ Doc.indent(Doc.concat(list{Doc.softLine, printModExpr(modExpr, cmtTbl)})),
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ | Pexp_sequence(_) => printExpressionBlock(~braces=true, e, cmtTbl)
+ | Pexp_let(_) => printExpressionBlock(~braces=true, e, cmtTbl)
+ | Pexp_fun(Nolabel, None, {ppat_desc: Ppat_var({txt: "__x"})}, {pexp_desc: Pexp_apply(_)}) =>
+ /* (__x) => f(a, __x, c) -----> f(a, _, c) */
+ printExpressionWithComments(ParsetreeViewer.rewriteUnderscoreApply(e), cmtTbl)
+ | Pexp_fun(_) | Pexp_newtype(_) =>
+ let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr(e)
+ let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsOnArrow)
+
+ let (returnExpr, typConstraint) = switch returnExpr.pexp_desc {
+ | Pexp_constraint(expr, typ) => (
+ {
+ ...expr,
+ pexp_attributes: List.concat(list{expr.pexp_attributes, returnExpr.pexp_attributes}),
+ },
+ Some(typ),
+ )
+ | _ => (returnExpr, None)
+ }
+
+ let hasConstraint = switch typConstraint {
+ | Some(_) => true
+ | None => false
+ }
+ let parametersDoc = printExprFunParameters(
+ ~inCallback=false,
+ ~uncurried,
+ ~hasConstraint,
+ parameters,
+ cmtTbl,
+ )
+
+ let returnExprDoc = {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(returnExpr)
+ let shouldInline = switch (returnExpr.pexp_desc, optBraces) {
+ | (_, Some(_)) => true
+ | (Pexp_array(_) | Pexp_tuple(_) | Pexp_construct(_, Some(_)) | Pexp_record(_), _) => true
+ | _ => false
+ }
+
+ let shouldIndent = switch returnExpr.pexp_desc {
+ | Pexp_sequence(_)
+ | Pexp_let(_)
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_open(_) => false
+ | _ => true
+ }
+
+ let returnDoc = {
+ let doc = printExpressionWithComments(returnExpr, cmtTbl)
+ switch Parens.expr(returnExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, returnExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ if shouldInline {
+ Doc.concat(list{Doc.space, returnDoc})
+ } else {
+ Doc.group(
+ if shouldIndent {
+ Doc.indent(Doc.concat(list{Doc.line, returnDoc}))
+ } else {
+ Doc.concat(list{Doc.space, returnDoc})
+ },
+ )
+ }
+ }
+
+ let typConstraintDoc = switch typConstraint {
+ | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)})
+ | _ => Doc.nil
+ }
+
+ let attrs = printAttributes(attrs)
+ Doc.group(
+ Doc.concat(list{attrs, parametersDoc, typConstraintDoc, Doc.text(" =>"), returnExprDoc}),
+ )
+ | Pexp_try(expr, cases) =>
+ let exprDoc = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{Doc.text("try "), exprDoc, Doc.text(" catch "), printCases(cases, cmtTbl)})
+ | Pexp_match(expr, cases) =>
+ let exprDoc = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{Doc.text("switch "), exprDoc, Doc.space, printCases(cases, cmtTbl)})
+ | Pexp_function(cases) =>
+ Doc.concat(list{Doc.text("x => switch x "), printCases(cases, cmtTbl)})
+ | Pexp_coerce(expr, typOpt, typ) =>
+ let docExpr = printExpressionWithComments(expr, cmtTbl)
+ let docTyp = printTypExpr(typ, cmtTbl)
+ let ofType = switch typOpt {
+ | None => Doc.nil
+ | Some(typ1) => Doc.concat(list{Doc.text(": "), printTypExpr(typ1, cmtTbl)})
+ }
+
+ Doc.concat(list{Doc.lparen, docExpr, ofType, Doc.text(" :> "), docTyp, Doc.rparen})
+ | Pexp_send(_) => Doc.text("Pexp_send not impemented in printer")
+ | Pexp_new(_) => Doc.text("Pexp_new not impemented in printer")
+ | Pexp_setinstvar(_) => Doc.text("Pexp_setinstvar not impemented in printer")
+ | Pexp_override(_) => Doc.text("Pexp_override not impemented in printer")
+ | Pexp_poly(_) => Doc.text("Pexp_poly not impemented in printer")
+ | Pexp_object(_) => Doc.text("Pexp_object not impemented in printer")
+ }
+
+ let shouldPrintItsOwnAttributes = switch e.pexp_desc {
+ | Pexp_apply(_) | Pexp_fun(_) | Pexp_newtype(_) | Pexp_setfield(_) | Pexp_ifthenelse(_) => true
+ | Pexp_construct(_) when ParsetreeViewer.hasJsxAttribute(e.pexp_attributes) => true
+ | _ => false
+ }
+
+ switch e.pexp_attributes {
+ | list{} => printedExpression
+ | attrs when !shouldPrintItsOwnAttributes =>
+ Doc.group(Doc.concat(list{printAttributes(attrs), printedExpression}))
+ | _ => printedExpression
+ }
+ }
+
+ and printPexpFun = (~inCallback, e, cmtTbl) => {
+ let (attrsOnArrow, parameters, returnExpr) = ParsetreeViewer.funExpr(e)
+ let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrsOnArrow)
+
+ let (returnExpr, typConstraint) = switch returnExpr.pexp_desc {
+ | Pexp_constraint(expr, typ) => (
+ {
+ ...expr,
+ pexp_attributes: List.concat(list{expr.pexp_attributes, returnExpr.pexp_attributes}),
+ },
+ Some(typ),
+ )
+ | _ => (returnExpr, None)
+ }
+
+ let parametersDoc = printExprFunParameters(
+ ~inCallback,
+ ~uncurried,
+ ~hasConstraint=switch typConstraint {
+ | Some(_) => true
+ | None => false
+ },
+ parameters,
+ cmtTbl,
+ )
+ let returnShouldIndent = switch returnExpr.pexp_desc {
+ | Pexp_sequence(_)
+ | Pexp_let(_)
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_open(_) => false
+ | _ => true
+ }
+
+ let returnExprDoc = {
+ let (optBraces, _) = ParsetreeViewer.processBracesAttr(returnExpr)
+ let shouldInline = switch (returnExpr.pexp_desc, optBraces) {
+ | (_, Some(_)) => true
+ | (Pexp_array(_) | Pexp_tuple(_) | Pexp_construct(_, Some(_)) | Pexp_record(_), _) => true
+ | _ => false
+ }
+
+ let returnDoc = {
+ let doc = printExpressionWithComments(returnExpr, cmtTbl)
+ switch Parens.expr(returnExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, returnExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ if shouldInline {
+ Doc.concat(list{Doc.space, returnDoc})
+ } else {
+ Doc.group(
+ if returnShouldIndent {
+ Doc.concat(list{
+ Doc.indent(Doc.concat(list{Doc.line, returnDoc})),
+ if inCallback {
+ Doc.softLine
+ } else {
+ Doc.nil
+ },
+ })
+ } else {
+ Doc.concat(list{Doc.space, returnDoc})
+ },
+ )
+ }
+ }
+
+ let typConstraintDoc = switch typConstraint {
+ | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)})
+ | _ => Doc.nil
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(attrs),
+ parametersDoc,
+ typConstraintDoc,
+ Doc.text(" =>"),
+ returnExprDoc,
+ }),
+ )
+ }
+
+ and printTernaryOperand = (expr, cmtTbl) => {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.ternaryOperand(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ and printSetFieldExpr = (attrs, lhs, longidentLoc, rhs, loc, cmtTbl) => {
+ let rhsDoc = {
+ let doc = printExpressionWithComments(rhs, cmtTbl)
+ switch Parens.setFieldExprRhs(rhs) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, rhs, braces)
+ | Nothing => doc
+ }
+ }
+
+ let lhsDoc = {
+ let doc = printExpressionWithComments(lhs, cmtTbl)
+ switch Parens.fieldExpr(lhs) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, lhs, braces)
+ | Nothing => doc
+ }
+ }
+
+ let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs)
+ let doc = Doc.group(
+ Doc.concat(list{
+ lhsDoc,
+ Doc.dot,
+ printLidentPath(longidentLoc, cmtTbl),
+ Doc.text(" ="),
+ if shouldIndent {
+ Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc})))
+ } else {
+ Doc.concat(list{Doc.space, rhsDoc})
+ },
+ }),
+ )
+ let doc = switch attrs {
+ | list{} => doc
+ | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), doc}))
+ }
+
+ printComments(doc, cmtTbl, loc)
+ }
+
+ and printTemplateLiteral = (expr, cmtTbl) => {
+ let tag = ref("j")
+ let rec walkExpr = expr => {
+ open Parsetree
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("^")})},
+ list{(Nolabel, arg1), (Nolabel, arg2)},
+ ) =>
+ let lhs = walkExpr(arg1)
+ let rhs = walkExpr(arg2)
+ Doc.concat(list{lhs, rhs})
+ | Pexp_constant(Pconst_string(txt, Some(prefix))) =>
+ tag := prefix
+ Doc.text(txt)
+ | _ =>
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ Doc.concat(list{Doc.text("${"), doc, Doc.rbrace})
+ }
+ }
+
+ let content = walkExpr(expr)
+ Doc.concat(list{
+ if tag.contents == "j" {
+ Doc.nil
+ } else {
+ Doc.text(tag.contents)
+ },
+ Doc.text("`"),
+ content,
+ Doc.text("`"),
+ })
+ }
+
+ and printUnaryExpression = (expr, cmtTbl) => {
+ let printUnaryOperator = op =>
+ Doc.text(
+ switch op {
+ | "~+" => "+"
+ | "~+." => "+."
+ | "~-" => "-"
+ | "~-." => "-."
+ | "not" => "!"
+ | _ => assert false
+ },
+ )
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(Nolabel, operand)},
+ ) =>
+ let printedOperand = {
+ let doc = printExpressionWithComments(operand, cmtTbl)
+ switch Parens.unaryExprOperand(operand) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, operand, braces)
+ | Nothing => doc
+ }
+ }
+
+ let doc = Doc.concat(list{printUnaryOperator(operator), printedOperand})
+ printComments(doc, cmtTbl, expr.pexp_loc)
+ | _ => assert false
+ }
+ }
+
+ and printBinaryExpression = (expr: Parsetree.expression, cmtTbl) => {
+ let printBinaryOperator = (~inlineRhs, operator) => {
+ let operatorTxt = switch operator {
+ | "|." => "->"
+ | "^" => "++"
+ | "=" => "=="
+ | "==" => "==="
+ | "<>" => "!="
+ | "!=" => "!=="
+ | txt => txt
+ }
+
+ let spacingBeforeOperator = if operator == "|." {
+ Doc.softLine
+ } else if operator == "|>" {
+ Doc.line
+ } else {
+ Doc.space
+ }
+
+ let spacingAfterOperator = if operator == "|." {
+ Doc.nil
+ } else if operator == "|>" {
+ Doc.space
+ } else if inlineRhs {
+ Doc.space
+ } else {
+ Doc.line
+ }
+
+ Doc.concat(list{spacingBeforeOperator, Doc.text(operatorTxt), spacingAfterOperator})
+ }
+
+ let printOperand = (~isLhs, expr, parentOperator) => {
+ let rec flatten = (~isLhs, expr, parentOperator) =>
+ if ParsetreeViewer.isBinaryExpression(expr) {
+ switch expr {
+ | {
+ pexp_desc:
+ Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(_, left), (_, right)},
+ ),
+ } =>
+ if (
+ ParsetreeViewer.flattenableOperators(parentOperator, operator) &&
+ !ParsetreeViewer.hasAttributes(expr.pexp_attributes)
+ ) {
+ let leftPrinted = flatten(~isLhs=true, left, operator)
+ let rightPrinted = {
+ let (_, rightAttrs) = ParsetreeViewer.partitionPrinteableAttributes(
+ right.pexp_attributes,
+ )
+
+ let doc = printExpressionWithComments(
+ {...right, pexp_attributes: rightAttrs},
+ cmtTbl,
+ )
+
+ let doc = if Parens.flattenOperandRhs(parentOperator, right) {
+ Doc.concat(list{Doc.lparen, doc, Doc.rparen})
+ } else {
+ doc
+ }
+
+ let printeableAttrs = ParsetreeViewer.filterPrinteableAttributes(
+ right.pexp_attributes,
+ )
+
+ Doc.concat(list{printAttributes(printeableAttrs), doc})
+ }
+
+ let doc = Doc.concat(list{
+ leftPrinted,
+ printBinaryOperator(~inlineRhs=false, operator),
+ rightPrinted,
+ })
+ let doc = if !isLhs && Parens.rhsBinaryExprOperand(operator, expr) {
+ Doc.concat(list{Doc.lparen, doc, Doc.rparen})
+ } else {
+ doc
+ }
+
+ printComments(doc, cmtTbl, expr.pexp_loc)
+ } else {
+ let doc = printExpressionWithComments({...expr, pexp_attributes: list{}}, cmtTbl)
+ let doc = if (
+ Parens.subBinaryExprOperand(parentOperator, operator) ||
+ (expr.pexp_attributes != list{} &&
+ (ParsetreeViewer.isBinaryExpression(expr) || ParsetreeViewer.isTernaryExpr(expr)))
+ ) {
+ Doc.concat(list{Doc.lparen, doc, Doc.rparen})
+ } else {
+ doc
+ }
+ Doc.concat(list{printAttributes(expr.pexp_attributes), doc})
+ }
+ | _ => assert false
+ }
+ } else {
+ switch expr.pexp_desc {
+ | Pexp_setfield(lhs, field, rhs) =>
+ let doc = printSetFieldExpr(
+ expr.pexp_attributes,
+ lhs,
+ field,
+ rhs,
+ expr.pexp_loc,
+ cmtTbl,
+ )
+ if isLhs {
+ addParens(doc)
+ } else {
+ doc
+ }
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("#=")})},
+ list{(Nolabel, lhs), (Nolabel, rhs)},
+ ) =>
+ let rhsDoc = printExpressionWithComments(rhs, cmtTbl)
+ let lhsDoc = printExpressionWithComments(lhs, cmtTbl)
+ /* TODO: unify indentation of "=" */
+ let shouldIndent = ParsetreeViewer.isBinaryExpression(rhs)
+ let doc = Doc.group(
+ Doc.concat(list{
+ lhsDoc,
+ Doc.text(" ="),
+ if shouldIndent {
+ Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc})))
+ } else {
+ Doc.concat(list{Doc.space, rhsDoc})
+ },
+ }),
+ )
+ let doc = switch expr.pexp_attributes {
+ | list{} => doc
+ | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), doc}))
+ }
+
+ if isLhs {
+ addParens(doc)
+ } else {
+ doc
+ }
+ | _ =>
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.binaryExprOperand(~isLhs, expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+ }
+
+ flatten(~isLhs, expr, parentOperator)
+ }
+
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(("|." | "|>") as op)})},
+ list{(Nolabel, lhs), (Nolabel, rhs)},
+ )
+ when !(ParsetreeViewer.isBinaryExpression(lhs) || ParsetreeViewer.isBinaryExpression(rhs)) =>
+ let lhsDoc = printOperand(~isLhs=true, lhs, op)
+ let rhsDoc = printOperand(~isLhs=false, rhs, op)
+ Doc.group(
+ Doc.concat(list{
+ lhsDoc,
+ switch op {
+ | "|." => Doc.text("->")
+ | "|>" => Doc.text(" |> ")
+ | _ => assert false
+ },
+ rhsDoc,
+ }),
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident(operator)})},
+ list{(Nolabel, lhs), (Nolabel, rhs)},
+ ) =>
+ let right = {
+ let operatorWithRhs = {
+ let rhsDoc = printOperand(~isLhs=false, rhs, operator)
+ Doc.concat(list{
+ printBinaryOperator(
+ ~inlineRhs=ParsetreeViewer.shouldInlineRhsBinaryExpr(rhs),
+ operator,
+ ),
+ rhsDoc,
+ })
+ }
+ if ParsetreeViewer.shouldIndentBinaryExpr(expr) {
+ Doc.group(Doc.indent(operatorWithRhs))
+ } else {
+ operatorWithRhs
+ }
+ }
+
+ let doc = Doc.group(Doc.concat(list{printOperand(~isLhs=true, lhs, operator), right}))
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(expr.pexp_attributes),
+ switch Parens.binaryExpr({
+ ...expr,
+ pexp_attributes: List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "ns.braces"}, _) => false
+ | _ => true
+ }
+ , expr.pexp_attributes),
+ }) {
+ | Braced(bracesLoc) => printBraces(doc, expr, bracesLoc)
+ | Parenthesized => addParens(doc)
+ | Nothing => doc
+ },
+ }),
+ )
+ | _ => Doc.nil
+ }
+ }
+
+ /* callExpr(arg1, arg2) */
+ and printPexpApply = (expr, cmtTbl) =>
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("##")})},
+ list{(Nolabel, parentExpr), (Nolabel, memberExpr)},
+ ) =>
+ let parentDoc = {
+ let doc = printExpressionWithComments(parentExpr, cmtTbl)
+ switch Parens.unaryExprOperand(parentExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, parentExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ let member = {
+ let memberDoc = switch memberExpr.pexp_desc {
+ | Pexp_ident(lident) =>
+ printComments(printLongident(lident.txt), cmtTbl, memberExpr.pexp_loc)
+ | _ => printExpressionWithComments(memberExpr, cmtTbl)
+ }
+
+ Doc.concat(list{Doc.text("\""), memberDoc, Doc.text("\"")})
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(expr.pexp_attributes),
+ parentDoc,
+ Doc.lbracket,
+ member,
+ Doc.rbracket,
+ }),
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("#=")})},
+ list{(Nolabel, lhs), (Nolabel, rhs)},
+ ) =>
+ let rhsDoc = {
+ let doc = printExpressionWithComments(rhs, cmtTbl)
+ switch Parens.expr(rhs) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, rhs, braces)
+ | Nothing => doc
+ }
+ }
+
+ /* TODO: unify indentation of "=" */
+ let shouldIndent =
+ !ParsetreeViewer.isBracedExpr(rhs) && ParsetreeViewer.isBinaryExpression(rhs)
+ let doc = Doc.group(
+ Doc.concat(list{
+ printExpressionWithComments(lhs, cmtTbl),
+ Doc.text(" ="),
+ if shouldIndent {
+ Doc.group(Doc.indent(Doc.concat(list{Doc.line, rhsDoc})))
+ } else {
+ Doc.concat(list{Doc.space, rhsDoc})
+ },
+ }),
+ )
+ switch expr.pexp_attributes {
+ | list{} => doc
+ | attrs => Doc.group(Doc.concat(list{printAttributes(attrs), doc}))
+ }
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "get")})},
+ list{(Nolabel, parentExpr), (Nolabel, memberExpr)},
+ ) =>
+ let member = {
+ let memberDoc = {
+ let doc = printExpressionWithComments(memberExpr, cmtTbl)
+ switch Parens.expr(memberExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, memberExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ let shouldInline = switch memberExpr.pexp_desc {
+ | Pexp_constant(_) | Pexp_ident(_) => true
+ | _ => false
+ }
+
+ if shouldInline {
+ memberDoc
+ } else {
+ Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, memberDoc})), Doc.softLine})
+ }
+ }
+
+ let parentDoc = {
+ let doc = printExpressionWithComments(parentExpr, cmtTbl)
+ switch Parens.unaryExprOperand(parentExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, parentExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(expr.pexp_attributes),
+ parentDoc,
+ Doc.lbracket,
+ member,
+ Doc.rbracket,
+ }),
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Ldot(Lident("Array"), "set")})},
+ list{(Nolabel, parentExpr), (Nolabel, memberExpr), (Nolabel, targetExpr)},
+ ) =>
+ let member = {
+ let memberDoc = {
+ let doc = printExpressionWithComments(memberExpr, cmtTbl)
+ switch Parens.expr(memberExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, memberExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ let shouldInline = switch memberExpr.pexp_desc {
+ | Pexp_constant(_) | Pexp_ident(_) => true
+ | _ => false
+ }
+
+ if shouldInline {
+ memberDoc
+ } else {
+ Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, memberDoc})), Doc.softLine})
+ }
+ }
+
+ let shouldIndentTargetExpr = if ParsetreeViewer.isBracedExpr(targetExpr) {
+ false
+ } else {
+ ParsetreeViewer.isBinaryExpression(targetExpr) ||
+ switch targetExpr {
+ | {
+ pexp_attributes: list{({Location.txt: "ns.ternary"}, _)},
+ pexp_desc: Pexp_ifthenelse(ifExpr, _, _),
+ } =>
+ ParsetreeViewer.isBinaryExpression(ifExpr) ||
+ ParsetreeViewer.hasAttributes(ifExpr.pexp_attributes)
+ | {pexp_desc: Pexp_newtype(_)} => false
+ | e => ParsetreeViewer.hasAttributes(e.pexp_attributes) || ParsetreeViewer.isArrayAccess(e)
+ }
+ }
+
+ let targetExpr = {
+ let doc = printExpressionWithComments(targetExpr, cmtTbl)
+ switch Parens.expr(targetExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, targetExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ let parentDoc = {
+ let doc = printExpressionWithComments(parentExpr, cmtTbl)
+ switch Parens.unaryExprOperand(parentExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, parentExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(expr.pexp_attributes),
+ parentDoc,
+ Doc.lbracket,
+ member,
+ Doc.rbracket,
+ Doc.text(" ="),
+ if shouldIndentTargetExpr {
+ Doc.indent(Doc.concat(list{Doc.line, targetExpr}))
+ } else {
+ Doc.concat(list{Doc.space, targetExpr})
+ },
+ }),
+ )
+ /* TODO: cleanup, are those branches even remotely performant? */
+ | Pexp_apply({pexp_desc: Pexp_ident(lident)}, args)
+ when ParsetreeViewer.isJsxExpression(expr) =>
+ printJsxExpression(lident, args, cmtTbl)
+ | Pexp_apply(callExpr, args) =>
+ let args = List.map(((lbl, arg)) => (lbl, ParsetreeViewer.rewriteUnderscoreApply(arg)), args)
+
+ let (uncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(expr.pexp_attributes)
+
+ let callExprDoc = {
+ let doc = printExpressionWithComments(callExpr, cmtTbl)
+ switch Parens.callExpr(callExpr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, callExpr, braces)
+ | Nothing => doc
+ }
+ }
+
+ if ParsetreeViewer.requiresSpecialCallbackPrintingFirstArg(args) {
+ let argsDoc = printArgumentsWithCallbackInFirstPosition(~uncurried, args, cmtTbl)
+
+ Doc.concat(list{printAttributes(attrs), callExprDoc, argsDoc})
+ } else if ParsetreeViewer.requiresSpecialCallbackPrintingLastArg(args) {
+ let argsDoc = printArgumentsWithCallbackInLastPosition(~uncurried, args, cmtTbl)
+
+ Doc.concat(list{printAttributes(attrs), callExprDoc, argsDoc})
+ } else {
+ let argsDoc = printArguments(~uncurried, args, cmtTbl)
+ Doc.concat(list{printAttributes(attrs), callExprDoc, argsDoc})
+ }
+ | _ => assert false
+ }
+
+ and printJsxExpression = (lident, args, cmtTbl) => {
+ let name = printJsxName(lident)
+ let (formattedProps, children) = printJsxProps(args, cmtTbl)
+ /* */
+ let isSelfClosing = switch children {
+ | list{} => true
+ | _ => false
+ }
+ Doc.group(
+ Doc.concat(list{
+ Doc.group(
+ Doc.concat(list{
+ printComments(Doc.concat(list{Doc.lessThan, name}), cmtTbl, lident.Asttypes.loc),
+ formattedProps,
+ if isSelfClosing {
+ Doc.concat(list{Doc.line, Doc.text("/>")})
+ } else {
+ Doc.nil
+ },
+ }),
+ ),
+ if isSelfClosing {
+ Doc.nil
+ } else {
+ Doc.concat(list{
+ Doc.greaterThan,
+ Doc.indent(Doc.concat(list{Doc.line, printJsxChildren(children, cmtTbl)})),
+ Doc.line,
+ Doc.text(""),
+ name,
+ Doc.greaterThan,
+ })
+ },
+ }),
+ )
+ }
+
+ and printJsxFragment = (expr, cmtTbl) => {
+ let opening = Doc.text("<>")
+ let closing = Doc.text(">")
+ let (children, _) = ParsetreeViewer.collectListExpressions(expr)
+ Doc.group(
+ Doc.concat(list{
+ opening,
+ switch children {
+ | list{} => Doc.nil
+ | children => Doc.indent(Doc.concat(list{Doc.line, printJsxChildren(children, cmtTbl)}))
+ },
+ Doc.line,
+ closing,
+ }),
+ )
+ }
+
+ and printJsxChildren = (children: list, cmtTbl) =>
+ Doc.group(Doc.join(~sep=Doc.line, List.map(expr => {
+ let exprDoc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.jsxChildExpr(expr) {
+ | Parenthesized | Braced(_) =>
+ /* {(20: int)} make sure that we also protect the expression inside */
+ addBraces(
+ if Parens.bracedExpr(expr) {
+ addParens(exprDoc)
+ } else {
+ exprDoc
+ },
+ )
+ | Nothing => exprDoc
+ }
+ }, children)))
+
+ and printJsxProps = (args, cmtTbl) => {
+ let rec loop = (props, args) =>
+ switch args {
+ | list{} => (Doc.nil, list{})
+ | list{
+ (Asttypes.Labelled("children"), children),
+ (
+ Asttypes.Nolabel,
+ {Parsetree.pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, None)},
+ ),
+ } =>
+ let formattedProps = Doc.indent(
+ switch props {
+ | list{} => Doc.nil
+ | props =>
+ Doc.concat(list{Doc.line, Doc.group(Doc.join(~sep=Doc.line, props |> List.rev))})
+ },
+ )
+ let (children, _) = ParsetreeViewer.collectListExpressions(children)
+ (formattedProps, children)
+ | list{arg, ...args} =>
+ let propDoc = printJsxProp(arg, cmtTbl)
+ loop(list{propDoc, ...props}, args)
+ }
+
+ loop(list{}, args)
+ }
+
+ and printJsxProp = (arg, cmtTbl) =>
+ switch arg {
+ | (
+ (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl,
+ {
+ Parsetree.pexp_attributes: list{({Location.txt: "ns.namedArgLoc", loc: argLoc}, _)},
+ pexp_desc: Pexp_ident({txt: Longident.Lident(ident)}),
+ },
+ ) when lblTxt == ident /* jsx punning */ =>
+ switch lbl {
+ | Nolabel => Doc.nil
+ | Labelled(_lbl) => printComments(printIdentLike(ident), cmtTbl, argLoc)
+ | Optional(_lbl) =>
+ let doc = Doc.concat(list{Doc.question, printIdentLike(ident)})
+ printComments(doc, cmtTbl, argLoc)
+ }
+ | (
+ (Asttypes.Labelled(lblTxt) | Optional(lblTxt)) as lbl,
+ {Parsetree.pexp_attributes: list{}, pexp_desc: Pexp_ident({txt: Longident.Lident(ident)})},
+ ) when lblTxt == ident /* jsx punning when printing from Reason */ =>
+ switch lbl {
+ | Nolabel => Doc.nil
+ | Labelled(_lbl) => printIdentLike(ident)
+ | Optional(_lbl) => Doc.concat(list{Doc.question, printIdentLike(ident)})
+ }
+ | (lbl, expr) =>
+ let (argLoc, expr) = switch expr.pexp_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ...attrs} => (
+ loc,
+ {...expr, pexp_attributes: attrs},
+ )
+ | _ => (Location.none, expr)
+ }
+
+ let lblDoc = switch lbl {
+ | Asttypes.Labelled(lbl) =>
+ let lbl = printComments(printIdentLike(lbl), cmtTbl, argLoc)
+ Doc.concat(list{lbl, Doc.equal})
+ | Asttypes.Optional(lbl) =>
+ let lbl = printComments(printIdentLike(lbl), cmtTbl, argLoc)
+ Doc.concat(list{lbl, Doc.equal, Doc.question})
+ | Nolabel => Doc.nil
+ }
+
+ let exprDoc = {
+ let doc = printExpression(expr, cmtTbl)
+ switch Parens.jsxPropExpr(expr) {
+ | Parenthesized | Braced(_) =>
+ /* {(20: int)} make sure that we also protect the expression inside */
+ addBraces(
+ if Parens.bracedExpr(expr) {
+ addParens(doc)
+ } else {
+ doc
+ },
+ )
+ | _ => doc
+ }
+ }
+
+ let fullLoc = {...argLoc, loc_end: expr.pexp_loc.loc_end}
+ printComments(Doc.concat(list{lblDoc, exprDoc}), cmtTbl, fullLoc)
+ }
+
+ /* div -> div.
+ * Navabar.createElement -> Navbar
+ * Staff.Users.createElement -> Staff.Users */
+ and printJsxName = ({txt: lident}) => {
+ let rec flatten = (acc, lident) =>
+ switch lident {
+ | Longident.Lident(txt) => list{txt, ...acc}
+ | Ldot(lident, txt) =>
+ let acc = if txt == "createElement" {
+ acc
+ } else {
+ list{txt, ...acc}
+ }
+ flatten(acc, lident)
+ | _ => acc
+ }
+
+ switch lident {
+ | Longident.Lident(txt) => Doc.text(txt)
+ | _ as lident =>
+ let segments = flatten(list{}, lident)
+ Doc.join(~sep=Doc.dot, List.map(Doc.text, segments))
+ }
+ }
+
+ and printArgumentsWithCallbackInFirstPosition = (~uncurried, args, cmtTbl) => {
+ let (callback, printedArgs) = switch args {
+ | list{(lbl, expr), ...args} =>
+ let lblDoc = switch lbl {
+ | Asttypes.Nolabel => Doc.nil
+ | Asttypes.Labelled(txt) => Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal})
+ | Asttypes.Optional(txt) =>
+ Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal, Doc.question})
+ }
+
+ let callback = Doc.concat(list{lblDoc, printPexpFun(~inCallback=true, expr, cmtTbl)})
+ let printedArgs =
+ List.map(arg => printArgument(arg, cmtTbl), args) |> Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ )
+
+ (callback, printedArgs)
+ | _ => assert false
+ }
+
+ /* Thing.map((arg1, arg2) => MyModuleBlah.toList(argument), foo) */
+ /* Thing.map((arg1, arg2) => {
+ * MyModuleBlah.toList(argument)
+ * }, longArgumet, veryLooooongArgument)
+ */
+ let fitsOnOneLine = Doc.concat(list{
+ if uncurried {
+ Doc.text("(. ")
+ } else {
+ Doc.lparen
+ },
+ callback,
+ Doc.comma,
+ Doc.line,
+ printedArgs,
+ Doc.rparen,
+ })
+
+ /* Thing.map(
+ * (param1, parm2) => doStuff(param1, parm2),
+ * arg1,
+ * arg2,
+ * arg3,
+ * )
+ */
+ let breakAllArgs = printArguments(~uncurried, args, cmtTbl)
+ Doc.customLayout(list{fitsOnOneLine, breakAllArgs})
+ }
+
+ and printArgumentsWithCallbackInLastPosition = (~uncurried, args, cmtTbl) => {
+ let rec loop = (acc, args) =>
+ switch args {
+ | list{} => (Doc.nil, Doc.nil)
+ | list{(lbl, expr)} =>
+ let lblDoc = switch lbl {
+ | Asttypes.Nolabel => Doc.nil
+ | Asttypes.Labelled(txt) => Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal})
+ | Asttypes.Optional(txt) =>
+ Doc.concat(list{Doc.tilde, printIdentLike(txt), Doc.equal, Doc.question})
+ }
+
+ let callback = printPexpFun(~inCallback=true, expr, cmtTbl)
+ (Doc.concat(List.rev(acc)), Doc.concat(list{lblDoc, callback}))
+ | list{arg, ...args} =>
+ let argDoc = printArgument(arg, cmtTbl)
+ loop(list{Doc.line, Doc.comma, argDoc, ...acc}, args)
+ }
+
+ let (printedArgs, callback) = loop(list{}, args)
+
+ /* Thing.map(foo, (arg1, arg2) => MyModuleBlah.toList(argument)) */
+ let fitsOnOneLine = Doc.concat(list{
+ if uncurried {
+ Doc.text("(.")
+ } else {
+ Doc.lparen
+ },
+ printedArgs,
+ callback,
+ Doc.rparen,
+ })
+
+ /* Thing.map(longArgumet, veryLooooongArgument, (arg1, arg2) =>
+ * MyModuleBlah.toList(argument)
+ * )
+ */
+ let arugmentsFitOnOneLine = Doc.concat(list{
+ if uncurried {
+ Doc.text("(.")
+ } else {
+ Doc.lparen
+ },
+ Doc.softLine,
+ printedArgs,
+ Doc.breakableGroup(~forceBreak=true, callback),
+ Doc.softLine,
+ Doc.rparen,
+ })
+
+ /* Thing.map(
+ * arg1,
+ * arg2,
+ * arg3,
+ * (param1, parm2) => doStuff(param1, parm2)
+ * )
+ */
+ let breakAllArgs = printArguments(~uncurried, args, cmtTbl)
+ Doc.customLayout(list{fitsOnOneLine, arugmentsFitOnOneLine, breakAllArgs})
+ }
+
+ and printArguments = (
+ ~uncurried,
+ args: list<(Asttypes.arg_label, Parsetree.expression)>,
+ cmtTbl,
+ ) =>
+ switch args {
+ | list{(Nolabel, {pexp_desc: Pexp_construct({txt: Longident.Lident("()")}, _)})} =>
+ if uncurried {
+ Doc.text("(.)")
+ } else {
+ Doc.text("()")
+ }
+ | list{(Nolabel, arg)} when ParsetreeViewer.isHuggableExpression(arg) =>
+ let argDoc = {
+ let doc = printExpressionWithComments(arg, cmtTbl)
+ switch Parens.expr(arg) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, arg, braces)
+ | Nothing => doc
+ }
+ }
+
+ Doc.concat(list{
+ if uncurried {
+ Doc.text("(.")
+ } else {
+ Doc.lparen
+ },
+ argDoc,
+ Doc.rparen,
+ })
+ | args =>
+ Doc.group(
+ Doc.concat(list{
+ if uncurried {
+ Doc.text("(.")
+ } else {
+ Doc.lparen
+ },
+ Doc.indent(
+ Doc.concat(list{
+ if uncurried {
+ Doc.line
+ } else {
+ Doc.softLine
+ },
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(arg => printArgument(arg, cmtTbl), args),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ }
+
+ /*
+ * argument ::=
+ * | _ (* syntax sugar *)
+ * | expr
+ * | expr : type
+ * | ~ label-name
+ * | ~ label-name
+ * | ~ label-name ?
+ * | ~ label-name = expr
+ * | ~ label-name = _ (* syntax sugar *)
+ * | ~ label-name = expr : type
+ * | ~ label-name = ? expr
+ * | ~ label-name = ? _ (* syntax sugar *)
+ * | ~ label-name = ? expr : type */
+ and printArgument = ((argLbl, arg), cmtTbl) =>
+ switch (argLbl, arg) {
+ /* ~a (punned) */
+ | (
+ Asttypes.Labelled(lbl),
+ {
+ pexp_desc: Pexp_ident({txt: Longident.Lident(name)}),
+ pexp_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)},
+ } as argExpr,
+ ) when lbl == name && !ParsetreeViewer.isBracedExpr(argExpr) =>
+ let loc = switch arg.pexp_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => loc
+ | _ => arg.pexp_loc
+ }
+
+ let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl)})
+ printComments(doc, cmtTbl, loc)
+
+ /* ~a: int (punned) */
+ | (
+ Asttypes.Labelled(lbl),
+ {
+ pexp_desc:
+ Pexp_constraint({pexp_desc: Pexp_ident({txt: Longident.Lident(name)})} as argExpr, typ),
+ pexp_loc,
+ pexp_attributes: (list{} | list{({Location.txt: "ns.namedArgLoc"}, _)}) as attrs,
+ },
+ ) when lbl == name && !ParsetreeViewer.isBracedExpr(argExpr) =>
+ let loc = switch attrs {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => {
+ ...loc,
+ loc_end: pexp_loc.loc_end,
+ }
+ | _ => arg.pexp_loc
+ }
+
+ let doc = Doc.concat(list{
+ Doc.tilde,
+ printIdentLike(lbl),
+ Doc.text(": "),
+ printTypExpr(typ, cmtTbl),
+ })
+ printComments(doc, cmtTbl, loc)
+ /* ~a? (optional lbl punned) */
+ | (
+ Asttypes.Optional(lbl),
+ {
+ pexp_desc: Pexp_ident({txt: Longident.Lident(name)}),
+ pexp_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)},
+ },
+ ) when lbl == name =>
+ let loc = switch arg.pexp_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => loc
+ | _ => arg.pexp_loc
+ }
+
+ let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.question})
+ printComments(doc, cmtTbl, loc)
+ | (_lbl, expr) =>
+ let (argLoc, expr) = switch expr.pexp_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ...attrs} => (
+ loc,
+ {...expr, pexp_attributes: attrs},
+ )
+ | _ => (expr.pexp_loc, expr)
+ }
+
+ let printedLbl = switch argLbl {
+ | Asttypes.Nolabel => Doc.nil
+ | Asttypes.Labelled(lbl) =>
+ let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.equal})
+ printComments(doc, cmtTbl, argLoc)
+ | Asttypes.Optional(lbl) =>
+ let doc = Doc.concat(list{Doc.tilde, printIdentLike(lbl), Doc.equal, Doc.question})
+ printComments(doc, cmtTbl, argLoc)
+ }
+
+ let printedExpr = {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ let loc = {...argLoc, loc_end: expr.pexp_loc.loc_end}
+ let doc = Doc.concat(list{printedLbl, printedExpr})
+ printComments(doc, cmtTbl, loc)
+ }
+
+ and printCases = (cases: list, cmtTbl) =>
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{Doc.lbrace, Doc.concat(list{Doc.line, printList(~getLoc=n => {
+ ...n.Parsetree.pc_lhs.ppat_loc,
+ loc_end: switch ParsetreeViewer.processBracesAttr(n.Parsetree.pc_rhs) {
+ | (None, _) => n.pc_rhs.pexp_loc.loc_end
+ | (Some({loc}, _), _) => loc.Location.loc_end
+ },
+ }, ~print=printCase, ~nodes=cases, cmtTbl)}), Doc.line, Doc.rbrace}),
+ )
+
+ and printCase = (case: Parsetree.case, cmtTbl) => {
+ let rhs = switch case.pc_rhs.pexp_desc {
+ | Pexp_let(_) | Pexp_letmodule(_) | Pexp_letexception(_) | Pexp_open(_) | Pexp_sequence(_) =>
+ printExpressionBlock(~braces=ParsetreeViewer.isBracedExpr(case.pc_rhs), case.pc_rhs, cmtTbl)
+ | _ =>
+ let doc = printExpressionWithComments(case.pc_rhs, cmtTbl)
+ switch Parens.expr(case.pc_rhs) {
+ | Parenthesized => addParens(doc)
+ | _ => doc
+ }
+ }
+
+ let guard = switch case.pc_guard {
+ | None => Doc.nil
+ | Some(expr) =>
+ Doc.group(
+ Doc.concat(list{Doc.line, Doc.text("when "), printExpressionWithComments(expr, cmtTbl)}),
+ )
+ }
+
+ let shouldInlineRhs = switch case.pc_rhs.pexp_desc {
+ | Pexp_construct({txt: Longident.Lident("()" | "true" | "false")}, _)
+ | Pexp_constant(_)
+ | Pexp_ident(_) => true
+ | _ when ParsetreeViewer.isHuggableRhs(case.pc_rhs) => true
+ | _ => false
+ }
+
+ let shouldIndentPattern = switch case.pc_lhs.ppat_desc {
+ | Ppat_or(_) => false
+ | _ => true
+ }
+
+ let patternDoc = {
+ let doc = printPattern(case.pc_lhs, cmtTbl)
+ switch case.pc_lhs.ppat_desc {
+ | Ppat_constraint(_) => addParens(doc)
+ | _ => doc
+ }
+ }
+
+ let content = Doc.concat(list{
+ if shouldIndentPattern {
+ Doc.indent(patternDoc)
+ } else {
+ patternDoc
+ },
+ Doc.indent(guard),
+ Doc.text(" =>"),
+ Doc.indent(
+ Doc.concat(list{
+ if shouldInlineRhs {
+ Doc.space
+ } else {
+ Doc.line
+ },
+ rhs,
+ }),
+ ),
+ })
+ Doc.group(Doc.concat(list{Doc.text("| "), content}))
+ }
+
+ and printExprFunParameters = (~inCallback, ~uncurried, ~hasConstraint, parameters, cmtTbl) =>
+ switch parameters {
+ /* let f = _ => () */
+ | list{ParsetreeViewer.Parameter({
+ attrs: list{},
+ lbl: Asttypes.Nolabel,
+ defaultExpr: None,
+ pat: {Parsetree.ppat_desc: Ppat_any},
+ })} when !uncurried =>
+ if hasConstraint {
+ Doc.text("(_)")
+ } else {
+ Doc.text("_")
+ }
+ /* let f = a => () */
+ | list{ParsetreeViewer.Parameter({
+ attrs: list{},
+ lbl: Asttypes.Nolabel,
+ defaultExpr: None,
+ pat: {Parsetree.ppat_desc: Ppat_var(stringLoc)},
+ })} when !uncurried =>
+ let txtDoc = {
+ let var = printIdentLike(stringLoc.txt)
+ if hasConstraint {
+ addParens(var)
+ } else {
+ var
+ }
+ }
+
+ printComments(txtDoc, cmtTbl, stringLoc.loc)
+ /* let f = () => () */
+ | list{ParsetreeViewer.Parameter({
+ attrs: list{},
+ lbl: Asttypes.Nolabel,
+ defaultExpr: None,
+ pat: {ppat_desc: Ppat_construct({txt: Longident.Lident("()")}, None)},
+ })} when !uncurried =>
+ Doc.text("()")
+ /* let f = (~greeting, ~from as hometown, ~x=?) => () */
+ | parameters =>
+ let lparen = if uncurried {
+ Doc.text("(. ")
+ } else {
+ Doc.lparen
+ }
+ let shouldHug = ParsetreeViewer.parametersShouldHug(parameters)
+ let printedParamaters = Doc.concat(list{
+ if shouldHug || inCallback {
+ Doc.nil
+ } else {
+ Doc.softLine
+ },
+ Doc.join(
+ ~sep=Doc.concat(list{
+ Doc.comma,
+ if inCallback {
+ Doc.space
+ } else {
+ Doc.line
+ },
+ }),
+ List.map(p => printExpFunParameter(p, cmtTbl), parameters),
+ ),
+ })
+ Doc.group(
+ Doc.concat(list{
+ lparen,
+ if shouldHug || inCallback {
+ printedParamaters
+ } else {
+ Doc.indent(printedParamaters)
+ },
+ if shouldHug || inCallback {
+ Doc.nil
+ } else {
+ Doc.concat(list{Doc.trailingComma, Doc.softLine})
+ },
+ Doc.rparen,
+ }),
+ )
+ }
+
+ and printExpFunParameter = (parameter, cmtTbl) =>
+ switch parameter {
+ | ParsetreeViewer.NewTypes({attrs, locs: lbls}) =>
+ Doc.group(
+ Doc.concat(list{
+ printAttributes(attrs),
+ Doc.text("type "),
+ Doc.join(
+ ~sep=Doc.space,
+ List.map(
+ lbl => printComments(printIdentLike(lbl.Asttypes.txt), cmtTbl, lbl.Asttypes.loc),
+ lbls,
+ ),
+ ),
+ }),
+ )
+ | Parameter({attrs, lbl, defaultExpr, pat: pattern}) =>
+ let (isUncurried, attrs) = ParsetreeViewer.processUncurriedAttribute(attrs)
+ let uncurried = if isUncurried {
+ Doc.concat(list{Doc.dot, Doc.space})
+ } else {
+ Doc.nil
+ }
+ let attrs = printAttributes(attrs)
+ /* =defaultValue */
+ let defaultExprDoc = switch defaultExpr {
+ | Some(expr) => Doc.concat(list{Doc.text("="), printExpressionWithComments(expr, cmtTbl)})
+ | None => Doc.nil
+ }
+
+ /* ~from as hometown
+ * ~from -> punning */
+ let labelWithPattern = switch (lbl, pattern) {
+ | (Asttypes.Nolabel, pattern) => printPattern(pattern, cmtTbl)
+ | (
+ Asttypes.Labelled(lbl) | Optional(lbl),
+ {
+ ppat_desc: Ppat_var(stringLoc),
+ ppat_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)},
+ },
+ ) when lbl == stringLoc.txt =>
+ /* ~d */
+ Doc.concat(list{Doc.text("~"), printIdentLike(lbl)})
+ | (
+ Asttypes.Labelled(lbl) | Optional(lbl),
+ {
+ ppat_desc: Ppat_constraint({ppat_desc: Ppat_var({txt})}, typ),
+ ppat_attributes: list{} | list{({Location.txt: "ns.namedArgLoc"}, _)},
+ },
+ ) when lbl == txt =>
+ /* ~d: e */
+ Doc.concat(list{
+ Doc.text("~"),
+ printIdentLike(lbl),
+ Doc.text(": "),
+ printTypExpr(typ, cmtTbl),
+ })
+ | (Asttypes.Labelled(lbl) | Optional(lbl), pattern) =>
+ /* ~b as c */
+ Doc.concat(list{
+ Doc.text("~"),
+ printIdentLike(lbl),
+ Doc.text(" as "),
+ printPattern(pattern, cmtTbl),
+ })
+ }
+
+ let optionalLabelSuffix = switch (lbl, defaultExpr) {
+ | (Asttypes.Optional(_), None) => Doc.text("=?")
+ | _ => Doc.nil
+ }
+
+ let doc = Doc.group(
+ Doc.concat(list{uncurried, attrs, labelWithPattern, defaultExprDoc, optionalLabelSuffix}),
+ )
+ let cmtLoc = switch defaultExpr {
+ | None =>
+ switch pattern.ppat_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => {
+ ...loc,
+ loc_end: pattern.ppat_loc.loc_end,
+ }
+ | _ => pattern.ppat_loc
+ }
+ | Some(expr) =>
+ let startPos = switch pattern.ppat_attributes {
+ | list{({Location.txt: "ns.namedArgLoc", loc}, _), ..._} => loc.loc_start
+ | _ => pattern.ppat_loc.loc_start
+ }
+ {
+ ...pattern.ppat_loc,
+ loc_start: startPos,
+ loc_end: expr.pexp_loc.loc_end,
+ }
+ }
+
+ printComments(doc, cmtTbl, cmtLoc)
+ }
+
+ and printExpressionBlock = (~braces, expr, cmtTbl) => {
+ let rec collectRows = (acc, expr) =>
+ switch expr.Parsetree.pexp_desc {
+ | Parsetree.Pexp_letmodule(modName, modExpr, expr2) =>
+ let name = {
+ let doc = Doc.text(modName.txt)
+ printComments(doc, cmtTbl, modName.loc)
+ }
+
+ let letModuleDoc = Doc.concat(list{
+ Doc.text("module "),
+ name,
+ Doc.text(" = "),
+ printModExpr(modExpr, cmtTbl),
+ })
+ let loc = {...expr.pexp_loc, loc_end: modExpr.pmod_loc.loc_end}
+ collectRows(list{(loc, letModuleDoc), ...acc}, expr2)
+ | Pexp_letexception(extensionConstructor, expr2) =>
+ let loc = {
+ let loc = {...expr.pexp_loc, loc_end: extensionConstructor.pext_loc.loc_end}
+ switch getFirstLeadingComment(cmtTbl, loc) {
+ | None => loc
+ | Some(comment) =>
+ let cmtLoc = Comment.loc(comment)
+ {...cmtLoc, loc_end: loc.loc_end}
+ }
+ }
+
+ let letExceptionDoc = printExceptionDef(extensionConstructor, cmtTbl)
+ collectRows(list{(loc, letExceptionDoc), ...acc}, expr2)
+ | Pexp_open(overrideFlag, longidentLoc, expr2) =>
+ let openDoc = Doc.concat(list{
+ Doc.text("open"),
+ printOverrideFlag(overrideFlag),
+ Doc.space,
+ printLongidentLocation(longidentLoc, cmtTbl),
+ })
+ let loc = {...expr.pexp_loc, loc_end: longidentLoc.loc.loc_end}
+ collectRows(list{(loc, openDoc), ...acc}, expr2)
+ | Pexp_sequence(expr1, expr2) =>
+ let exprDoc = {
+ let doc = printExpression(expr1, cmtTbl)
+ switch Parens.expr(expr1) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr1, braces)
+ | Nothing => doc
+ }
+ }
+
+ let loc = expr1.pexp_loc
+ collectRows(list{(loc, exprDoc), ...acc}, expr2)
+ | Pexp_let(recFlag, valueBindings, expr2) =>
+ let loc = {
+ let loc = switch (valueBindings, List.rev(valueBindings)) {
+ | (list{vb, ..._}, list{lastVb, ..._}) => {...vb.pvb_loc, loc_end: lastVb.pvb_loc.loc_end}
+ | _ => Location.none
+ }
+
+ switch getFirstLeadingComment(cmtTbl, loc) {
+ | None => loc
+ | Some(comment) =>
+ let cmtLoc = Comment.loc(comment)
+ {...cmtLoc, loc_end: loc.loc_end}
+ }
+ }
+
+ let recFlag = switch recFlag {
+ | Asttypes.Nonrecursive => Doc.nil
+ | Asttypes.Recursive => Doc.text("rec ")
+ }
+
+ let letDoc = printValueBindings(~recFlag, valueBindings, cmtTbl)
+ /* let () = {
+ * let () = foo()
+ * ()
+ * }
+ * We don't need to print the () on the last line of the block
+ */
+ switch expr2.pexp_desc {
+ | Pexp_construct({txt: Longident.Lident("()")}, _) => List.rev(list{(loc, letDoc), ...acc})
+ | _ => collectRows(list{(loc, letDoc), ...acc}, expr2)
+ }
+ | _ =>
+ let exprDoc = {
+ let doc = printExpression(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ }
+
+ List.rev(list{(expr.pexp_loc, exprDoc), ...acc})
+ }
+
+ let rows = collectRows(list{}, expr)
+ let block = printList(
+ ~getLoc=fst,
+ ~nodes=rows,
+ ~print=((_, doc), _) => doc,
+ ~forceBreak=true,
+ cmtTbl,
+ )
+
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ if braces {
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(Doc.concat(list{Doc.line, block})),
+ Doc.line,
+ Doc.rbrace,
+ })
+ } else {
+ block
+ },
+ )
+ }
+
+ /*
+ * // user types:
+ * let f = (a, b) => { a + b }
+ *
+ * // printer: everything is on one line
+ * let f = (a, b) => { a + b }
+ *
+ * // user types: over multiple lines
+ * let f = (a, b) => {
+ * a + b
+ * }
+ *
+ * // printer: over multiple lines
+ * let f = (a, b) => {
+ * a + b
+ * }
+ */
+ and printBraces = (doc, expr, bracesLoc) => {
+ let overMultipleLines = {
+ open Location
+ bracesLoc.loc_end.pos_lnum > bracesLoc.loc_start.pos_lnum
+ }
+
+ switch expr.Parsetree.pexp_desc {
+ | Pexp_letmodule(_)
+ | Pexp_letexception(_)
+ | Pexp_let(_)
+ | Pexp_open(_)
+ | Pexp_sequence(_) => /* already has braces */
+ doc
+ | _ =>
+ Doc.breakableGroup(
+ ~forceBreak=overMultipleLines,
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ if Parens.bracedExpr(expr) {
+ addParens(doc)
+ } else {
+ doc
+ },
+ }),
+ ),
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ )
+ }
+ }
+
+ and printOverrideFlag = overrideFlag =>
+ switch overrideFlag {
+ | Asttypes.Override => Doc.text("!")
+ | Fresh => Doc.nil
+ }
+
+ and printDirectionFlag = flag =>
+ switch flag {
+ | Asttypes.Downto => Doc.text(" downto ")
+ | Asttypes.Upto => Doc.text(" to ")
+ }
+
+ and printRecordRow = ((lbl, expr), cmtTbl) => {
+ let cmtLoc = {...lbl.loc, loc_end: expr.pexp_loc.loc_end}
+ let doc = Doc.group(
+ Doc.concat(list{
+ printLidentPath(lbl, cmtTbl),
+ Doc.text(": "),
+ {
+ let doc = printExpressionWithComments(expr, cmtTbl)
+ switch Parens.expr(expr) {
+ | Parens.Parenthesized => addParens(doc)
+ | Braced(braces) => printBraces(doc, expr, braces)
+ | Nothing => doc
+ }
+ },
+ }),
+ )
+ printComments(doc, cmtTbl, cmtLoc)
+ }
+
+ and printBsObjectRow = ((lbl, expr), cmtTbl) => {
+ let cmtLoc = {...lbl.loc, loc_end: expr.pexp_loc.loc_end}
+ let lblDoc = {
+ let doc = Doc.concat(list{Doc.text("\""), printLongident(lbl.txt), Doc.text("\"")})
+ printComments(doc, cmtTbl, lbl.loc)
+ }
+
+ let doc = Doc.concat(list{lblDoc, Doc.text(": "), printExpressionWithComments(expr, cmtTbl)})
+ printComments(doc, cmtTbl, cmtLoc)
+ }
+
+ /* The optional loc indicates whether we need to print the attributes in
+ * relation to some location. In practise this means the following:
+ * `@attr type t = string` -> on the same line, print on the same line
+ * `@attr
+ * type t = string` -> attr is on prev line, print the attributes
+ * with a line break between, we respect the users' original layout */
+ and printAttributes = (~loc=?, attrs: Parsetree.attributes) =>
+ switch ParsetreeViewer.filterParsingAttrs(attrs) {
+ | list{} => Doc.nil
+ | attrs =>
+ let lineBreak = switch loc {
+ | None => Doc.line
+ | Some(loc) =>
+ switch List.rev(attrs) {
+ | list{({loc: firstLoc}, _), ..._}
+ when loc.loc_start.pos_lnum > firstLoc.loc_end.pos_lnum => Doc.hardLine
+ | _ => Doc.line
+ }
+ }
+
+ Doc.concat(list{
+ Doc.group(Doc.join(~sep=Doc.line, List.map(printAttribute, attrs))),
+ lineBreak,
+ })
+ }
+
+ and printAttribute = ((id, payload): Parsetree.attribute) => {
+ let attrName = Doc.concat(list{Doc.text("@"), Doc.text(id.txt)})
+ switch payload {
+ | PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) =>
+ let exprDoc = printExpression(expr, CommentTable.empty)
+ let needsParens = switch attrs {
+ | list{} => false
+ | _ => true
+ }
+ Doc.group(
+ Doc.concat(list{
+ attrName,
+ addParens(
+ Doc.concat(list{
+ printAttributes(attrs),
+ if needsParens {
+ addParens(exprDoc)
+ } else {
+ exprDoc
+ },
+ }),
+ ),
+ }),
+ )
+ | PTyp(typ) =>
+ Doc.group(
+ Doc.concat(list{
+ attrName,
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{Doc.softLine, Doc.text(": "), printTypExpr(typ, CommentTable.empty)}),
+ ),
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ | _ => attrName
+ }
+ }
+
+ and printAttributeWithComments = ((id, payload): Parsetree.attribute, cmtTbl) => {
+ let attrName = Doc.text("@" ++ id.txt)
+ switch payload {
+ | PStr(list{{pstr_desc: Pstr_eval(expr, attrs)}}) =>
+ let exprDoc = printExpressionWithComments(expr, cmtTbl)
+ let needsParens = switch attrs {
+ | list{} => false
+ | _ => true
+ }
+ Doc.group(
+ Doc.concat(list{
+ attrName,
+ addParens(
+ Doc.concat(list{
+ printAttributes(attrs),
+ if needsParens {
+ addParens(exprDoc)
+ } else {
+ exprDoc
+ },
+ }),
+ ),
+ }),
+ )
+ | _ => attrName
+ }
+ }
+
+ and printModExpr = (modExpr, cmtTbl) => {
+ let doc = switch modExpr.pmod_desc {
+ | Pmod_ident(longidentLoc) => printLongidentLocation(longidentLoc, cmtTbl)
+ | Pmod_structure(structure) =>
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.lbrace,
+ Doc.indent(Doc.concat(list{Doc.softLine, printStructure(structure, cmtTbl)})),
+ Doc.softLine,
+ Doc.rbrace,
+ }),
+ )
+ | Pmod_unpack(expr) =>
+ let shouldHug = switch expr.pexp_desc {
+ | Pexp_let(_) => true
+ | Pexp_constraint({pexp_desc: Pexp_let(_)}, {ptyp_desc: Ptyp_package(_packageType)}) => true
+ | _ => false
+ }
+
+ let (expr, moduleConstraint) = switch expr.pexp_desc {
+ | Pexp_constraint(expr, {ptyp_desc: Ptyp_package(packageType), ptyp_loc}) =>
+ let packageDoc = {
+ let doc = printPackageType(~printModuleKeywordAndParens=false, packageType, cmtTbl)
+ printComments(doc, cmtTbl, ptyp_loc)
+ }
+
+ let typeDoc = Doc.group(
+ Doc.concat(list{Doc.text(":"), Doc.indent(Doc.concat(list{Doc.line, packageDoc}))}),
+ )
+ (expr, typeDoc)
+ | _ => (expr, Doc.nil)
+ }
+
+ let unpackDoc = Doc.group(
+ Doc.concat(list{printExpressionWithComments(expr, cmtTbl), moduleConstraint}),
+ )
+ Doc.group(
+ Doc.concat(list{
+ Doc.text("unpack("),
+ if shouldHug {
+ unpackDoc
+ } else {
+ Doc.concat(list{Doc.indent(Doc.concat(list{Doc.softLine, unpackDoc})), Doc.softLine})
+ },
+ Doc.rparen,
+ }),
+ )
+ | Pmod_extension(extension) => printExtensionWithComments(~atModuleLvl=false, extension, cmtTbl)
+ | Pmod_apply(_) =>
+ let (args, callExpr) = ParsetreeViewer.modExprApply(modExpr)
+ let isUnitSugar = switch args {
+ | list{{pmod_desc: Pmod_structure(list{})}} => true
+ | _ => false
+ }
+
+ let shouldHug = switch args {
+ | list{{pmod_desc: Pmod_structure(_)}} => true
+ | _ => false
+ }
+
+ Doc.group(
+ Doc.concat(list{
+ printModExpr(callExpr, cmtTbl),
+ if isUnitSugar {
+ printModApplyArg(@doesNotRaise List.hd(args), cmtTbl)
+ } else {
+ Doc.concat(list{
+ Doc.lparen,
+ if shouldHug {
+ printModApplyArg(@doesNotRaise List.hd(args), cmtTbl)
+ } else {
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(modArg => printModApplyArg(modArg, cmtTbl), args),
+ ),
+ }),
+ )
+ },
+ if !shouldHug {
+ Doc.concat(list{Doc.trailingComma, Doc.softLine})
+ } else {
+ Doc.nil
+ },
+ Doc.rparen,
+ })
+ },
+ }),
+ )
+ | Pmod_constraint(modExpr, modType) =>
+ Doc.concat(list{printModExpr(modExpr, cmtTbl), Doc.text(": "), printModType(modType, cmtTbl)})
+ | Pmod_functor(_) => printModFunctor(modExpr, cmtTbl)
+ }
+
+ printComments(doc, cmtTbl, modExpr.pmod_loc)
+ }
+
+ and printModFunctor = (modExpr, cmtTbl) => {
+ let (parameters, returnModExpr) = ParsetreeViewer.modExprFunctor(modExpr)
+ /* let shouldInline = match returnModExpr.pmod_desc with */
+ /* | Pmod_structure _ | Pmod_ident _ -> true */
+ /* | Pmod_constraint ({pmod_desc = Pmod_structure _}, _) -> true */
+ /* | _ -> false */
+ /* in */
+ let (returnConstraint, returnModExpr) = switch returnModExpr.pmod_desc {
+ | Pmod_constraint(modExpr, modType) =>
+ let constraintDoc = {
+ let doc = printModType(modType, cmtTbl)
+ if Parens.modExprFunctorConstraint(modType) {
+ addParens(doc)
+ } else {
+ doc
+ }
+ }
+
+ let modConstraint = Doc.concat(list{Doc.text(": "), constraintDoc})
+ (modConstraint, printModExpr(modExpr, cmtTbl))
+ | _ => (Doc.nil, printModExpr(returnModExpr, cmtTbl))
+ }
+
+ let parametersDoc = switch parameters {
+ | list{(attrs, {txt: "*"}, None)} =>
+ let attrs = switch attrs {
+ | list{} => Doc.nil
+ | attrs =>
+ Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line})
+ }
+ Doc.group(Doc.concat(list{attrs, Doc.text("()")}))
+ | list{(list{}, {txt: lbl}, None)} => Doc.text(lbl)
+ | parameters =>
+ Doc.group(
+ Doc.concat(list{
+ Doc.lparen,
+ Doc.indent(
+ Doc.concat(list{
+ Doc.softLine,
+ Doc.join(
+ ~sep=Doc.concat(list{Doc.comma, Doc.line}),
+ List.map(param => printModFunctorParam(param, cmtTbl), parameters),
+ ),
+ }),
+ ),
+ Doc.trailingComma,
+ Doc.softLine,
+ Doc.rparen,
+ }),
+ )
+ }
+
+ Doc.group(Doc.concat(list{parametersDoc, returnConstraint, Doc.text(" => "), returnModExpr}))
+ }
+
+ and printModFunctorParam = ((attrs, lbl, optModType), cmtTbl) => {
+ let cmtLoc = switch optModType {
+ | None => lbl.Asttypes.loc
+ | Some(modType) => {
+ ...lbl.loc,
+ loc_end: modType.Parsetree.pmty_loc.loc_end,
+ }
+ }
+
+ let attrs = switch attrs {
+ | list{} => Doc.nil
+ | attrs => Doc.concat(list{Doc.join(~sep=Doc.line, List.map(printAttribute, attrs)), Doc.line})
+ }
+ let lblDoc = {
+ let doc = Doc.text(lbl.txt)
+ printComments(doc, cmtTbl, lbl.loc)
+ }
+
+ let doc = Doc.group(
+ Doc.concat(list{
+ attrs,
+ lblDoc,
+ switch optModType {
+ | None => Doc.nil
+ | Some(modType) => Doc.concat(list{Doc.text(": "), printModType(modType, cmtTbl)})
+ },
+ }),
+ )
+ printComments(doc, cmtTbl, cmtLoc)
+ }
+
+ and printModApplyArg = (modExpr, cmtTbl) =>
+ switch modExpr.pmod_desc {
+ | Pmod_structure(list{}) => Doc.text("()")
+ | _ => printModExpr(modExpr, cmtTbl)
+ }
+
+ and printExceptionDef = (constr: Parsetree.extension_constructor, cmtTbl) => {
+ let kind = switch constr.pext_kind {
+ | Pext_rebind(longident) =>
+ Doc.indent(
+ Doc.concat(list{Doc.text(" ="), Doc.line, printLongidentLocation(longident, cmtTbl)}),
+ )
+ | Pext_decl(Pcstr_tuple(list{}), None) => Doc.nil
+ | Pext_decl(args, gadt) =>
+ let gadtDoc = switch gadt {
+ | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)})
+ | None => Doc.nil
+ }
+
+ Doc.concat(list{printConstructorArguments(~indent=false, args, cmtTbl), gadtDoc})
+ }
+
+ let name = printComments(Doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc)
+
+ let doc = Doc.group(
+ Doc.concat(list{printAttributes(constr.pext_attributes), Doc.text("exception "), name, kind}),
+ )
+ printComments(doc, cmtTbl, constr.pext_loc)
+ }
+
+ and printExtensionConstructor = (constr: Parsetree.extension_constructor, cmtTbl, i) => {
+ let attrs = printAttributes(constr.pext_attributes)
+ let bar = if i > 0 {
+ Doc.text("| ")
+ } else {
+ Doc.ifBreaks(Doc.text("| "), Doc.nil)
+ }
+
+ let kind = switch constr.pext_kind {
+ | Pext_rebind(longident) =>
+ Doc.indent(
+ Doc.concat(list{Doc.text(" ="), Doc.line, printLongidentLocation(longident, cmtTbl)}),
+ )
+ | Pext_decl(Pcstr_tuple(list{}), None) => Doc.nil
+ | Pext_decl(args, gadt) =>
+ let gadtDoc = switch gadt {
+ | Some(typ) => Doc.concat(list{Doc.text(": "), printTypExpr(typ, cmtTbl)})
+ | None => Doc.nil
+ }
+
+ Doc.concat(list{printConstructorArguments(~indent=false, args, cmtTbl), gadtDoc})
+ }
+
+ let name = printComments(Doc.text(constr.pext_name.txt), cmtTbl, constr.pext_name.loc)
+
+ Doc.concat(list{bar, Doc.group(Doc.concat(list{attrs, name, kind}))})
+ }
+
+ let printImplementation = (~width, s: Parsetree.structure, comments) => {
+ let cmtTbl = CommentTable.make()
+ CommentTable.walkStructure(s, cmtTbl, comments)
+ /* CommentTable.log cmtTbl; */
+ let doc = printStructure(s, cmtTbl)
+ /* Doc.debug doc; */
+ let stringDoc = Doc.toString(~width, doc)
+ print_string(stringDoc)
+ }
+
+ let printInterface = (~width, s: Parsetree.signature, comments) => {
+ let cmtTbl = CommentTable.make()
+ CommentTable.walkSignature(s, cmtTbl, comments)
+ let stringDoc = Doc.toString(~width, printSignature(s, cmtTbl))
+ print_string(stringDoc)
+ }
+}
+
+module Scanner = {
+ type mode = Template | Jsx | Diamond
+
+ type t = {
+ filename: string,
+ src: bytes,
+ mutable err: (
+ ~startPos: Lexing.position,
+ ~endPos: Lexing.position,
+ Diagnostics.category,
+ ) => unit,
+ mutable ch: int /* current character */,
+ mutable offset: int /* character offset */,
+ mutable rdOffset: int /* reading offset (position after current character) */,
+ mutable lineOffset: int /* current line offset */,
+ mutable lnum: int /* current line number */,
+ mutable mode: list,
+ }
+
+ let setDiamondMode = scanner => scanner.mode = list{Diamond, ...scanner.mode}
+
+ let setTemplateMode = scanner => scanner.mode = list{Template, ...scanner.mode}
+
+ let setJsxMode = scanner => scanner.mode = list{Jsx, ...scanner.mode}
+
+ let popMode = (scanner, mode) =>
+ switch scanner.mode {
+ | list{m, ...ms} when m == mode => scanner.mode = ms
+ | _ => ()
+ }
+
+ let inDiamondMode = scanner =>
+ switch scanner.mode {
+ | list{Diamond, ..._} => true
+ | _ => false
+ }
+
+ let inJsxMode = scanner =>
+ switch scanner.mode {
+ | list{Jsx, ..._} => true
+ | _ => false
+ }
+
+ let inTemplateMode = scanner =>
+ switch scanner.mode {
+ | list{Template, ..._} => true
+ | _ => false
+ }
+
+ let position = scanner => {
+ open Lexing
+ {
+ pos_fname: scanner.filename,
+ /* line number */
+ pos_lnum: scanner.lnum,
+ /* offset of the beginning of the line (number
+ of characters between the beginning of the scanner and the beginning
+ of the line) */
+ pos_bol: scanner.lineOffset,
+ /* [pos_cnum] is the offset of the position (number of
+ characters between the beginning of the scanner and the position). */
+ pos_cnum: scanner.offset,
+ }
+ }
+
+ let next = scanner =>
+ if scanner.rdOffset < Bytes.length(scanner.src) {
+ scanner.offset = scanner.rdOffset
+ let ch = (@doesNotRaise Bytes.get)(scanner.src, scanner.rdOffset)
+ scanner.rdOffset = scanner.rdOffset + 1
+ scanner.ch = int_of_char(ch)
+ } else {
+ scanner.offset = Bytes.length(scanner.src)
+ scanner.ch = -1
+ }
+
+ let peek = scanner =>
+ if scanner.rdOffset < Bytes.length(scanner.src) {
+ int_of_char(Bytes.unsafe_get(scanner.src, scanner.rdOffset))
+ } else {
+ -1
+ }
+
+ let make = (b, filename) => {
+ let scanner = {
+ filename: filename,
+ src: b,
+ err: (~startPos as _, ~endPos as _, _) => (),
+ ch: CharacterCodes.space,
+ offset: 0,
+ rdOffset: 0,
+ lineOffset: 0,
+ lnum: 1,
+ mode: list{},
+ }
+ next(scanner)
+ scanner
+ }
+
+ let skipWhitespace = scanner => {
+ let rec scan = () =>
+ if scanner.ch === CharacterCodes.space || scanner.ch === CharacterCodes.tab {
+ next(scanner)
+ scan()
+ } else if CharacterCodes.isLineBreak(scanner.ch) {
+ scanner.lineOffset = scanner.offset + 1
+ scanner.lnum = scanner.lnum + 1
+ next(scanner)
+ scan()
+ } else {
+ ()
+ }
+
+ scan()
+ }
+
+ let scanIdentifier = scanner => {
+ let startOff = scanner.offset
+ while (
+ CharacterCodes.isLetter(scanner.ch) ||
+ (CharacterCodes.isDigit(scanner.ch) ||
+ (CharacterCodes.underscore === scanner.ch || CharacterCodes.singleQuote === scanner.ch))
+ ) {
+ next(scanner)
+ }
+ let str = Bytes.sub_string(scanner.src, startOff, scanner.offset - startOff)
+ Token.lookupKeyword(str)
+ }
+
+ let scanDigits = (scanner, ~base) =>
+ if base <= 10 {
+ while CharacterCodes.isDigit(scanner.ch) || scanner.ch === CharacterCodes.underscore {
+ next(scanner)
+ }
+ } else {
+ while CharacterCodes.isHex(scanner.ch) || scanner.ch === CharacterCodes.underscore {
+ next(scanner)
+ }
+ }
+
+ /* float: (0…9) { 0…9∣ _ } [. { 0…9∣ _ }] [(e∣ E) [+∣ -] (0…9) { 0…9∣ _ }] */
+ let scanNumber = scanner => {
+ let startOff = scanner.offset
+
+ /* integer part */
+ let (base, _prefix) = if scanner.ch !== CharacterCodes.dot {
+ if scanner.ch === CharacterCodes._0 {
+ next(scanner)
+ let ch = CharacterCodes.lower(scanner.ch)
+ if ch === CharacterCodes.Lower.x {
+ next(scanner)
+ (16, 'x')
+ } else if ch === CharacterCodes.Lower.o {
+ next(scanner)
+ (8, 'o')
+ } else if ch === CharacterCodes.Lower.b {
+ next(scanner)
+ (2, 'b')
+ } else {
+ (8, '0')
+ }
+ } else {
+ (10, ' ')
+ }
+ } else {
+ (10, ' ')
+ }
+
+ scanDigits(scanner, ~base)
+
+ /* */
+ let isFloat = if CharacterCodes.dot === scanner.ch {
+ next(scanner)
+ scanDigits(scanner, ~base)
+ true
+ } else {
+ false
+ }
+
+ /* exponent part */
+ let isFloat = if {
+ let exp = CharacterCodes.lower(scanner.ch)
+ exp === CharacterCodes.Lower.e || exp === CharacterCodes.Lower.p
+ } {
+ next(scanner)
+ if scanner.ch === CharacterCodes.plus || scanner.ch === CharacterCodes.minus {
+ next(scanner)
+ }
+ scanDigits(scanner, ~base)
+ true
+ } else {
+ isFloat
+ }
+
+ let literal = Bytes.sub_string(scanner.src, startOff, scanner.offset - startOff)
+
+ /* suffix */
+ let suffix = if (
+ (scanner.ch >= CharacterCodes.Lower.g && scanner.ch <= CharacterCodes.Lower.z) ||
+ (scanner.ch >= CharacterCodes.Upper.g && scanner.ch <= CharacterCodes.Upper.z)
+ ) {
+ let ch = scanner.ch
+ next(scanner)
+ Some(Char.unsafe_chr(ch))
+ } else {
+ None
+ }
+
+ if isFloat {
+ Token.Float({f: literal, suffix: suffix})
+ } else {
+ Token.Int({i: literal, suffix: suffix})
+ }
+ }
+
+ let scanExoticIdentifier = scanner => {
+ next(scanner)
+ let buffer = Buffer.create(20)
+ let startPos = position(scanner)
+
+ let rec scan = () =>
+ if scanner.ch === CharacterCodes.eof {
+ let endPos = position(scanner)
+ scanner.err(~startPos, ~endPos, Diagnostics.message("Did you forget a \" here?"))
+ } else if scanner.ch === CharacterCodes.doubleQuote {
+ next(scanner)
+ } else if CharacterCodes.isLineBreak(scanner.ch) {
+ scanner.lineOffset = scanner.offset + 1
+ scanner.lnum = scanner.lnum + 1
+ let endPos = position(scanner)
+ scanner.err(~startPos, ~endPos, Diagnostics.message("Did you forget a \" here?"))
+ next(scanner)
+ } else {
+ Buffer.add_char(buffer, (@doesNotRaise Char.chr)(scanner.ch))
+ next(scanner)
+ scan()
+ }
+
+ scan()
+ Token.Lident(Buffer.contents(buffer))
+ }
+
+ let scanStringEscapeSequence = (~startPos, scanner) =>
+ /* \ already consumed */
+ if (
+ CharacterCodes.Lower.n === scanner.ch ||
+ (CharacterCodes.Lower.t === scanner.ch ||
+ (CharacterCodes.Lower.b === scanner.ch ||
+ (CharacterCodes.Lower.r === scanner.ch ||
+ (CharacterCodes.backslash === scanner.ch ||
+ (CharacterCodes.space === scanner.ch ||
+ (CharacterCodes.singleQuote === scanner.ch ||
+ CharacterCodes.doubleQuote === scanner.ch))))))
+ ) {
+ next(scanner)
+ } else {
+ let (n, base, max) = if CharacterCodes.isDigit(scanner.ch) {
+ /* decimal */
+ (3, 10, 255)
+ } else if scanner.ch === CharacterCodes.Lower.o {
+ /* octal */
+ let () = next(scanner)
+ (3, 8, 255)
+ } else if scanner.ch === CharacterCodes.Lower.x {
+ /* hex */
+ let () = next(scanner)
+ (2, 16, 255)
+ } else {
+ /* unknown escape sequence
+ * TODO: we should warn the user here. Let's not make it a hard error for now, for reason compat */
+ /* let pos = position scanner in */
+ /* let () = */
+ /* let msg = if scanner.ch == -1 then */
+ /* "unclosed escape sequence" */
+ /* else "unknown escape sequence" */
+ /* in */
+ /* scanner.err ~startPos ~endPos:pos (Diagnostics.message msg) */
+ /* in */
+ (-1, -1, -1)
+ }
+
+ if n < 0 {
+ ()
+ } else {
+ let rec while_ = (n, x) =>
+ if n === 0 {
+ x
+ } else {
+ let d = CharacterCodes.digitValue(scanner.ch)
+ if d >= base {
+ let pos = position(scanner)
+ let msg = if scanner.ch === -1 {
+ "unclosed escape sequence"
+ } else {
+ "unknown escape sequence"
+ }
+
+ scanner.err(~startPos, ~endPos=pos, Diagnostics.message(msg))
+ -1
+ } else {
+ let () = next(scanner)
+ while_(n - 1, x * base + d)
+ }
+ }
+
+ let x = while_(n, 0)
+ if x > max {
+ let pos = position(scanner)
+ let msg = "invalid escape sequence (value too high)"
+ scanner.err(~startPos, ~endPos=pos, Diagnostics.message(msg))
+ ()
+ }
+ }
+ }
+
+ let scanString = scanner => {
+ let offs = scanner.offset
+
+ let startPos = position(scanner)
+ let rec scan = () =>
+ if scanner.ch === CharacterCodes.eof {
+ let endPos = position(scanner)
+ scanner.err(~startPos, ~endPos, Diagnostics.unclosedString)
+ } else if scanner.ch === CharacterCodes.doubleQuote {
+ next(scanner)
+ } else if scanner.ch === CharacterCodes.backslash {
+ let startPos = position(scanner)
+ next(scanner)
+ scanStringEscapeSequence(~startPos, scanner)
+ scan()
+ } else if CharacterCodes.isLineBreak(scanner.ch) {
+ scanner.lineOffset = scanner.offset + 1
+ scanner.lnum = scanner.lnum + 1
+ next(scanner)
+ scan()
+ } else {
+ next(scanner)
+ scan()
+ }
+
+ scan()
+ Token.String(Bytes.sub_string(scanner.src, offs, scanner.offset - offs - 1))
+ }
+
+ /* I wonder if this gets inlined */
+ let convertNumber = (scanner, ~n, ~base) => {
+ let x = ref(0)
+ for _ in n downto 1 {
+ let d = CharacterCodes.digitValue(scanner.ch)
+ x := x.contents * base + d
+ next(scanner)
+ }
+ x.contents
+ }
+
+ let scanEscape = scanner => {
+ /* let offset = scanner.offset in */
+ let c = switch scanner.ch {
+ | 98 /* b */ =>
+ next(scanner)
+ '\b'
+ | 110 /* n */ =>
+ next(scanner)
+ '\n'
+ | 114 /* r */ =>
+ next(scanner)
+ '\r'
+ | 116 /* t */ =>
+ next(scanner)
+ '\t'
+ | ch when CharacterCodes.isDigit(ch) =>
+ let x = convertNumber(scanner, ~n=3, ~base=10)
+ (@doesNotRaise Char.chr)(x)
+ | ch when ch === CharacterCodes.Lower.x =>
+ next(scanner)
+ let x = convertNumber(scanner, ~n=2, ~base=16)
+ (@doesNotRaise Char.chr)(x)
+ | ch when ch === CharacterCodes.Lower.o =>
+ next(scanner)
+ let x = convertNumber(scanner, ~n=3, ~base=8)
+ (@doesNotRaise Char.chr)(x)
+ | ch =>
+ next(scanner)
+ (@doesNotRaise Char.chr)(ch)
+ }
+
+ next(scanner) /* Consume \' */
+ Token.Character(c)
+ }
+
+ let scanSingleLineComment = scanner => {
+ let startOff = scanner.offset
+ let startPos = position(scanner)
+ while !CharacterCodes.isLineBreak(scanner.ch) && scanner.ch >= 0 {
+ next(scanner)
+ }
+ let endPos = position(scanner)
+ Token.Comment(
+ Comment.makeSingleLineComment(
+ ~loc={
+ open Location
+ {loc_start: startPos, loc_end: endPos, loc_ghost: false}
+ },
+ Bytes.sub_string(scanner.src, startOff, scanner.offset - startOff),
+ ),
+ )
+ }
+
+ let scanMultiLineComment = scanner => {
+ let startOff = scanner.offset
+ let startPos = position(scanner)
+ let rec scan = (~depth, ()) =>
+ if scanner.ch === CharacterCodes.asterisk && peek(scanner) === CharacterCodes.forwardslash {
+ next(scanner)
+ next(scanner)
+ if depth > 0 {
+ scan(~depth=depth - 1, ())
+ } else {
+ ()
+ }
+ } else if scanner.ch === CharacterCodes.eof {
+ let endPos = position(scanner)
+ scanner.err(~startPos, ~endPos, Diagnostics.unclosedComment)
+ } else if (
+ scanner.ch === CharacterCodes.forwardslash && peek(scanner) === CharacterCodes.asterisk
+ ) {
+ next(scanner)
+ next(scanner)
+ scan(~depth=depth + 1, ())
+ } else {
+ if CharacterCodes.isLineBreak(scanner.ch) {
+ scanner.lineOffset = scanner.offset + 1
+ scanner.lnum = scanner.lnum + 1
+ }
+ next(scanner)
+ scan(~depth, ())
+ }
+
+ scan(~depth=0, ())
+ Token.Comment(
+ Comment.makeMultiLineComment(
+ ~loc={
+ open Location
+ {loc_start: startPos, loc_end: position(scanner), loc_ghost: false}
+ },
+ Bytes.sub_string(scanner.src, startOff, scanner.offset - 2 - startOff),
+ ),
+ )
+ }
+
+ let scanTemplate = scanner => {
+ let startOff = scanner.offset
+ let startPos = position(scanner)
+
+ let rec scan = () =>
+ if scanner.ch === CharacterCodes.eof {
+ let endPos = position(scanner)
+ scanner.err(~startPos, ~endPos, Diagnostics.unclosedTemplate)
+ popMode(scanner, Template)
+ Token.TemplateTail(Bytes.sub_string(scanner.src, startOff, scanner.offset - 2 - startOff))
+ } else if scanner.ch === CharacterCodes.backslash {
+ next(scanner)
+ if (
+ scanner.ch === CharacterCodes.backtick ||
+ (scanner.ch === CharacterCodes.backslash ||
+ scanner.ch === CharacterCodes.dollar)
+ ) {
+ next(scanner)
+ }
+ scan()
+ } else if scanner.ch === CharacterCodes.backtick {
+ next(scanner)
+ let contents = Bytes.sub_string(scanner.src, startOff, scanner.offset - 1 - startOff)
+
+ popMode(scanner, Template)
+ Token.TemplateTail(contents)
+ } else if scanner.ch === CharacterCodes.dollar && peek(scanner) === CharacterCodes.lbrace {
+ next(scanner) /* consume $ */
+ next(scanner) /* consume { */
+ let contents = Bytes.sub_string(scanner.src, startOff, scanner.offset - 2 - startOff)
+
+ popMode(scanner, Template)
+ Token.TemplatePart(contents)
+ } else {
+ if CharacterCodes.isLineBreak(scanner.ch) {
+ scanner.lineOffset = scanner.offset + 1
+ scanner.lnum = scanner.lnum + 1
+ }
+ next(scanner)
+ scan()
+ }
+
+ scan()
+ }
+
+ let rec scan = scanner => {
+ if !inTemplateMode(scanner) {
+ skipWhitespace(scanner)
+ }
+ let startPos = position(scanner)
+ let ch = scanner.ch
+ let token = if inTemplateMode(scanner) {
+ scanTemplate(scanner)
+ } else if ch === CharacterCodes.underscore {
+ let nextCh = peek(scanner)
+ if (
+ nextCh === CharacterCodes.underscore ||
+ (CharacterCodes.isDigit(nextCh) ||
+ CharacterCodes.isLetter(nextCh))
+ ) {
+ scanIdentifier(scanner)
+ } else {
+ next(scanner)
+ Token.Underscore
+ }
+ } else if CharacterCodes.isLetter(ch) {
+ scanIdentifier(scanner)
+ } else if CharacterCodes.isDigit(ch) {
+ scanNumber(scanner)
+ } else {
+ next(scanner)
+ if ch === CharacterCodes.dot {
+ if scanner.ch === CharacterCodes.dot {
+ next(scanner)
+ if scanner.ch === CharacterCodes.dot {
+ next(scanner)
+ Token.DotDotDot
+ } else {
+ Token.DotDot
+ }
+ } else {
+ Token.Dot
+ }
+ } else if ch === CharacterCodes.doubleQuote {
+ scanString(scanner)
+ } else if ch === CharacterCodes.singleQuote {
+ if (
+ scanner.ch === CharacterCodes.backslash && !(peek(scanner) === CharacterCodes.doubleQuote)
+ ) {
+ /* start of exotic ident */
+
+ next(scanner)
+ scanEscape(scanner)
+ } else if peek(scanner) === CharacterCodes.singleQuote {
+ let ch = scanner.ch
+ next(scanner)
+ next(scanner)
+ Token.Character((@doesNotRaise Char.chr)(ch))
+ } else {
+ SingleQuote
+ }
+ } else if ch === CharacterCodes.bang {
+ if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ Token.BangEqualEqual
+ } else {
+ Token.BangEqual
+ }
+ } else {
+ Token.Bang
+ }
+ } else if ch === CharacterCodes.semicolon {
+ Token.Semicolon
+ } else if ch === CharacterCodes.equal {
+ if scanner.ch === CharacterCodes.greaterThan {
+ next(scanner)
+ Token.EqualGreater
+ } else if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ Token.EqualEqualEqual
+ } else {
+ Token.EqualEqual
+ }
+ } else {
+ Token.Equal
+ }
+ } else if ch === CharacterCodes.bar {
+ if scanner.ch === CharacterCodes.bar {
+ next(scanner)
+ Token.Lor
+ } else if scanner.ch === CharacterCodes.greaterThan {
+ next(scanner)
+ Token.BarGreater
+ } else {
+ Token.Bar
+ }
+ } else if ch === CharacterCodes.ampersand {
+ if scanner.ch === CharacterCodes.ampersand {
+ next(scanner)
+ Token.Land
+ } else {
+ Token.Band
+ }
+ } else if ch === CharacterCodes.lparen {
+ Token.Lparen
+ } else if ch === CharacterCodes.rparen {
+ Token.Rparen
+ } else if ch === CharacterCodes.lbracket {
+ Token.Lbracket
+ } else if ch === CharacterCodes.rbracket {
+ Token.Rbracket
+ } else if ch === CharacterCodes.lbrace {
+ Token.Lbrace
+ } else if ch === CharacterCodes.rbrace {
+ Token.Rbrace
+ } else if ch === CharacterCodes.comma {
+ Token.Comma
+ } else if ch === CharacterCodes.colon {
+ if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ Token.ColonEqual
+ } else if scanner.ch === CharacterCodes.greaterThan {
+ next(scanner)
+ Token.ColonGreaterThan
+ } else {
+ Token.Colon
+ }
+ } else if ch === CharacterCodes.backslash {
+ scanExoticIdentifier(scanner)
+ } else if ch === CharacterCodes.forwardslash {
+ if scanner.ch === CharacterCodes.forwardslash {
+ next(scanner)
+ scanSingleLineComment(scanner)
+ } else if scanner.ch === CharacterCodes.asterisk {
+ next(scanner)
+ scanMultiLineComment(scanner)
+ } else if scanner.ch === CharacterCodes.dot {
+ next(scanner)
+ Token.ForwardslashDot
+ } else {
+ Token.Forwardslash
+ }
+ } else if ch === CharacterCodes.minus {
+ if scanner.ch === CharacterCodes.dot {
+ next(scanner)
+ Token.MinusDot
+ } else if scanner.ch === CharacterCodes.greaterThan {
+ next(scanner)
+ Token.MinusGreater
+ } else {
+ Token.Minus
+ }
+ } else if ch === CharacterCodes.plus {
+ if scanner.ch === CharacterCodes.dot {
+ next(scanner)
+ Token.PlusDot
+ } else if scanner.ch === CharacterCodes.plus {
+ next(scanner)
+ Token.PlusPlus
+ } else if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ Token.PlusEqual
+ } else {
+ Token.Plus
+ }
+ } else if ch === CharacterCodes.greaterThan {
+ if scanner.ch === CharacterCodes.equal && !inDiamondMode(scanner) {
+ next(scanner)
+ Token.GreaterEqual
+ } else {
+ Token.GreaterThan
+ }
+ } else if ch === CharacterCodes.lessThan {
+ /* Imagine the following:
<
+ * < indicates the start of a new jsx-element, the parser expects
+ * the name of a new element after the <
+ * Example:
+ * This signals a closing element. To simulate the two-token lookahead,
+ * the is emitted as a single new token LessThanSlash */
+ if inJsxMode(scanner) {
+ skipWhitespace(scanner)
+ if scanner.ch === CharacterCodes.forwardslash {
+ let () = next(scanner)
+ Token.LessThanSlash
+ } else {
+ Token.LessThan
+ }
+ } else if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ Token.LessEqual
+ } else {
+ Token.LessThan
+ }
+ } else if ch === CharacterCodes.hash {
+ if scanner.ch === CharacterCodes.hash {
+ next(scanner)
+ Token.HashHash
+ } else if scanner.ch === CharacterCodes.equal {
+ next(scanner)
+ Token.HashEqual
+ } else {
+ Token.Hash
+ }
+ } else if ch === CharacterCodes.asterisk {
+ if scanner.ch === CharacterCodes.asterisk {
+ next(scanner)
+ Token.Exponentiation
+ } else if scanner.ch === CharacterCodes.dot {
+ next(scanner)
+ Token.AsteriskDot
+ } else {
+ Token.Asterisk
+ }
+ } else if ch === CharacterCodes.tilde {
+ Token.Tilde
+ } else if ch === CharacterCodes.question {
+ Token.Question
+ } else if ch === CharacterCodes.at {
+ if scanner.ch === CharacterCodes.at {
+ next(scanner)
+ Token.AtAt
+ } else {
+ Token.At
+ }
+ } else if ch === CharacterCodes.percent {
+ if scanner.ch === CharacterCodes.percent {
+ next(scanner)
+ Token.PercentPercent
+ } else {
+ Token.Percent
+ }
+ } else if ch === CharacterCodes.backtick {
+ Token.Backtick
+ } else if ch === -1 {
+ Token.Eof
+ } else {
+ /* if we arrive here, we're dealing with an unkown character,
+ * report the error and continue scanning… */
+ let endPos = position(scanner)
+ scanner.err(~startPos, ~endPos, Diagnostics.unknownUchar(ch))
+ let (_, _, token) = scan(scanner)
+ token
+ }
+ }
+ let endPos = position(scanner)
+ (startPos, endPos, token)
+ }
+
+ /* Imagine:
<
+ * is `<` the start of a jsx-child?
+ * reconsiderLessThan peeks at the next token and
+ * determines the correct token to disambiguate */
+ let reconsiderLessThan = scanner => {
+ /* < consumed */
+ skipWhitespace(scanner)
+ if scanner.ch === CharacterCodes.forwardslash {
+ let () = next(scanner)
+ Token.LessThanSlash
+ } else {
+ Token.LessThan
+ }
+ }
+
+ /* If an operator has whitespace around both sides, it's a binary operator */
+ let isBinaryOp = (src, startCnum, endCnum) =>
+ if startCnum === 0 {
+ false
+ } else {
+ let leftOk = {
+ let c = startCnum - 1 |> (@doesNotRaise Bytes.get)(src) |> Char.code
+
+ c === CharacterCodes.space || (c === CharacterCodes.tab || CharacterCodes.isLineBreak(c))
+ }
+
+ let rightOk = {
+ let c = if endCnum === Bytes.length(src) {
+ -1
+ } else {
+ endCnum |> (@doesNotRaise Bytes.get)(src) |> Char.code
+ }
+
+ c === CharacterCodes.space ||
+ (c === CharacterCodes.tab ||
+ (CharacterCodes.isLineBreak(c) || c === CharacterCodes.eof))
+ }
+
+ leftOk && rightOk
+ }
+}
+
+/* AST for js externals */
+module JsFfi = {
+ type scope =
+ | Global
+ | Module(string) /* bs.module("path") */
+ | Scope(Longident.t) /* bs.scope(/"window", "location"/) */
+
+ type label_declaration = {
+ @live jld_attributes: Parsetree.attributes,
+ jld_name: string,
+ jld_alias: string,
+ jld_type: Parsetree.core_type,
+ jld_loc: Location.t,
+ }
+
+ type importSpec =
+ | Default(label_declaration)
+ | Spec(list)
+
+ type import_description = {
+ jid_loc: Location.t,
+ jid_spec: importSpec,
+ jid_scope: scope,
+ jid_attributes: Parsetree.attributes,
+ }
+
+ let decl = (~attrs, ~loc, ~name, ~alias, ~typ) => {
+ jld_loc: loc,
+ jld_attributes: attrs,
+ jld_name: name,
+ jld_alias: alias,
+ jld_type: typ,
+ }
+
+ let importDescr = (~attrs, ~scope, ~importSpec, ~loc) => {
+ jid_loc: loc,
+ jid_spec: importSpec,
+ jid_scope: scope,
+ jid_attributes: attrs,
+ }
+
+ let toParsetree = importDescr => {
+ let bsVal = (Location.mknoloc("bs.val"), Parsetree.PStr(list{}))
+ let attrs = switch importDescr.jid_scope {
+ | Global => list{bsVal}
+ /* @genType.import("./MyMath"),
+ * @genType.import(/"./MyMath", "default"/) */
+ | Module(s) =>
+ let structure = list{
+ Parsetree.Pconst_string(s, None) |> Ast_helper.Exp.constant |> Ast_helper.Str.eval,
+ }
+ let genType = (Location.mknoloc("genType.import"), Parsetree.PStr(structure))
+ list{genType}
+ | Scope(longident) =>
+ let structureItem = {
+ let expr = switch Longident.flatten(longident) |> List.map(s =>
+ Ast_helper.Exp.constant(Parsetree.Pconst_string(s, None))
+ ) {
+ | list{expr} => expr
+ | list{} as exprs | _ as exprs => exprs |> Ast_helper.Exp.tuple
+ }
+
+ Ast_helper.Str.eval(expr)
+ }
+
+ let bsScope = (Location.mknoloc("bs.scope"), Parsetree.PStr(list{structureItem}))
+ list{bsVal, bsScope}
+ }
+
+ let valueDescrs = switch importDescr.jid_spec {
+ | Default(decl) =>
+ let prim = list{decl.jld_name}
+ let allAttrs = List.concat(list{attrs, importDescr.jid_attributes}) |> List.map(attr =>
+ switch attr {
+ | (
+ {Location.txt: "genType.import"} as id,
+ Parsetree.PStr(list{{pstr_desc: Parsetree.Pstr_eval(moduleName, _)}}),
+ ) =>
+ let default = Parsetree.Pconst_string("default", None) |> Ast_helper.Exp.constant
+
+ let structureItem =
+ list{moduleName, default} |> Ast_helper.Exp.tuple |> Ast_helper.Str.eval
+
+ (id, Parsetree.PStr(list{structureItem}))
+ | attr => attr
+ }
+ )
+
+ list{
+ Ast_helper.Val.mk(
+ ~loc=importDescr.jid_loc,
+ ~prim,
+ ~attrs=allAttrs,
+ Location.mknoloc(decl.jld_alias),
+ decl.jld_type,
+ ) |> Ast_helper.Str.primitive,
+ }
+ | Spec(decls) => List.map(decl => {
+ let prim = list{decl.jld_name}
+ let allAttrs = List.concat(list{attrs, decl.jld_attributes})
+ Ast_helper.Val.mk(
+ ~loc=importDescr.jid_loc,
+ ~prim,
+ ~attrs=allAttrs,
+ Location.mknoloc(decl.jld_alias),
+ decl.jld_type,
+ ) |> Ast_helper.Str.primitive(~loc=decl.jld_loc)
+ }, decls)
+ }
+
+ let jsFfiAttr = (Location.mknoloc("ns.jsFfi"), Parsetree.PStr(list{}))
+ Ast_helper.Mod.structure(~loc=importDescr.jid_loc, valueDescrs)
+ |> Ast_helper.Incl.mk(~attrs=list{jsFfiAttr}, ~loc=importDescr.jid_loc)
+ |> Ast_helper.Str.include_(~loc=importDescr.jid_loc)
+ }
+}
+
+module ParsetreeCompatibility = {
+ let concatLongidents = (l1, l2) => {
+ let parts1 = Longident.flatten(l1)
+ let parts2 = Longident.flatten(l2)
+ switch List.concat(list{parts1, parts2}) |> Longident.unflatten {
+ | Some(longident) => longident
+ | None => l2
+ }
+ }
+
+ /* TODO: support nested open's ? */
+ let rec rewritePpatOpen = (longidentOpen, pat) => {
+ open Parsetree
+ switch pat.ppat_desc {
+ | Ppat_array(list{first, ...rest}) => /* Color.[Red, Blue, Green] -> [Color.Red, Blue, Green] */
+ {...pat, ppat_desc: Ppat_array(list{rewritePpatOpen(longidentOpen, first), ...rest})}
+ | Ppat_tuple(list{first, ...rest}) => /* Color.(Red, Blue, Green) -> (Color.Red, Blue, Green) */
+ {...pat, ppat_desc: Ppat_tuple(list{rewritePpatOpen(longidentOpen, first), ...rest})}
+ | Ppat_construct(
+ {txt: Longident.Lident("::")} as listConstructor,
+ Some({ppat_desc: Ppat_tuple(list{pat, ...rest})} as element),
+ ) => /* Color.(list{Red, Blue, Green}) -> list{Color.Red, Blue, Green} */
+ {
+ ...pat,
+ ppat_desc: Ppat_construct(
+ listConstructor,
+ Some({
+ ...element,
+ ppat_desc: Ppat_tuple(list{rewritePpatOpen(longidentOpen, pat), ...rest}),
+ }),
+ ),
+ }
+ | Ppat_construct(
+ {txt: constructor} as longidentLoc,
+ optPattern,
+ ) => /* Foo.(Bar(a)) -> Foo.Bar(a) */
+ {
+ ...pat,
+ ppat_desc: Ppat_construct(
+ {...longidentLoc, txt: concatLongidents(longidentOpen, constructor)},
+ optPattern,
+ ),
+ }
+ | Ppat_record(list{({txt: lbl} as longidentLoc, firstPat), ...rest}, flag) =>
+ /* Foo.{x} -> {Foo.x: x} */
+ let firstRow = ({...longidentLoc, txt: concatLongidents(longidentOpen, lbl)}, firstPat)
+ {...pat, ppat_desc: Ppat_record(list{firstRow, ...rest}, flag)}
+ | Ppat_or(pat1, pat2) => {
+ ...pat,
+ ppat_desc: Ppat_or(
+ rewritePpatOpen(longidentOpen, pat1),
+ rewritePpatOpen(longidentOpen, pat2),
+ ),
+ }
+ | Ppat_constraint(pattern, typ) => {
+ ...pat,
+ ppat_desc: Ppat_constraint(rewritePpatOpen(longidentOpen, pattern), typ),
+ }
+ | Ppat_type({txt: constructor} as longidentLoc) => {
+ ...pat,
+ ppat_desc: Ppat_type({
+ ...longidentLoc,
+ txt: concatLongidents(longidentOpen, constructor),
+ }),
+ }
+ | Ppat_lazy(p) => {...pat, ppat_desc: Ppat_lazy(rewritePpatOpen(longidentOpen, p))}
+ | Ppat_exception(p) => {...pat, ppat_desc: Ppat_exception(rewritePpatOpen(longidentOpen, p))}
+ | _ => pat
+ }
+ }
+
+ let rec rewriteReasonFastPipe = expr => {
+ open Parsetree
+ switch expr.pexp_desc {
+ | Pexp_apply(
+ {
+ pexp_desc:
+ Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("|.")})} as op,
+ list{(Asttypes.Nolabel, lhs), (Nolabel, rhs)},
+ ),
+ pexp_attributes: subAttrs,
+ },
+ args,
+ ) =>
+ let rhsLoc = {...rhs.pexp_loc, loc_end: expr.pexp_loc.loc_end}
+ let newLhs = {
+ let expr = rewriteReasonFastPipe(lhs)
+ {...expr, pexp_attributes: subAttrs}
+ }
+
+ let allArgs = list{
+ (Asttypes.Nolabel, newLhs),
+ (Asttypes.Nolabel, Ast_helper.Exp.apply(~loc=rhsLoc, rhs, args)),
+ }
+
+ Ast_helper.Exp.apply(~attrs=expr.pexp_attributes, ~loc=expr.pexp_loc, op, allArgs)
+ | _ => expr
+ }
+ }
+
+ let makeReasonArityMapper = (~forPrinter) => {
+ open Ast_mapper
+ {
+ ...default_mapper,
+ expr: (mapper, expr) =>
+ switch expr {
+ /* Don't mind this case, Reason doesn't handle this. */
+ /* | {pexp_desc = Pexp_variant (lbl, args); pexp_loc; pexp_attributes} -> */
+ /* let newArgs = match args with */
+ /* | (Some {pexp_desc = Pexp_tuple [{pexp_desc = Pexp_tuple _ } as sp]}) as args-> */
+ /* if forPrinter then args else Some sp */
+ /* | Some {pexp_desc = Pexp_tuple [sp]} -> Some sp */
+ /* | _ -> args */
+ /* in */
+ /* default_mapper.expr mapper {pexp_desc=Pexp_variant(lbl, newArgs); pexp_loc; pexp_attributes} */
+ | {pexp_desc: Pexp_construct(lid, args), pexp_loc, pexp_attributes} =>
+ let newArgs = switch args {
+ | Some({pexp_desc: Pexp_tuple(list{{pexp_desc: Pexp_tuple(_)} as sp})}) as args =>
+ if forPrinter {
+ args
+ } else {
+ Some(sp)
+ }
+ | Some({pexp_desc: Pexp_tuple(list{sp})}) => Some(sp)
+ | _ => args
+ }
+
+ default_mapper.expr(
+ mapper,
+ {
+ pexp_desc: Pexp_construct(lid, newArgs),
+ pexp_loc: pexp_loc,
+ pexp_attributes: pexp_attributes,
+ },
+ )
+ | expr => default_mapper.expr(mapper, rewriteReasonFastPipe(expr))
+ },
+ pat: (mapper, pattern) =>
+ switch pattern {
+ /* Don't mind this case, Reason doesn't handle this. */
+ /* | {ppat_desc = Ppat_variant (lbl, args); ppat_loc; ppat_attributes} -> */
+ /* let newArgs = match args with */
+ /* | (Some {ppat_desc = Ppat_tuple [{ppat_desc = Ppat_tuple _} as sp]}) as args -> */
+ /* if forPrinter then args else Some sp */
+ /* | Some {ppat_desc = Ppat_tuple [sp]} -> Some sp */
+ /* | _ -> args */
+ /* in */
+ /* default_mapper.pat mapper {ppat_desc = Ppat_variant (lbl, newArgs); ppat_loc; ppat_attributes;} */
+ | {ppat_desc: Ppat_construct(lid, args), ppat_loc, ppat_attributes} =>
+ let new_args = switch args {
+ | Some({ppat_desc: Ppat_tuple(list{{ppat_desc: Ppat_tuple(_)} as sp})}) as args =>
+ if forPrinter {
+ args
+ } else {
+ Some(sp)
+ }
+ | Some({ppat_desc: Ppat_tuple(list{sp})}) => Some(sp)
+ | _ => args
+ }
+ default_mapper.pat(
+ mapper,
+ {
+ ppat_desc: Ppat_construct(lid, new_args),
+ ppat_loc: ppat_loc,
+ ppat_attributes: ppat_attributes,
+ },
+ )
+ | x => default_mapper.pat(mapper, x)
+ },
+ }
+ }
+
+ let escapeTemplateLiteral = s => {
+ let len = String.length(s)
+ let b = Buffer.create(len)
+ let i = ref(0)
+ while i.contents < len {
+ let c = (@doesNotRaise String.get)(s, i.contents)
+ if c == '`' {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, '`')
+ incr(i)
+ } else if c == '$' {
+ if i.contents + 1 < len {
+ let c2 = (@doesNotRaise String.get)(s, i.contents + 1)
+ if c2 == '{' {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, '$')
+ Buffer.add_char(b, '{')
+ } else {
+ Buffer.add_char(b, c)
+ Buffer.add_char(b, c2)
+ }
+ i := i.contents + 2
+ } else {
+ Buffer.add_char(b, c)
+ incr(i)
+ }
+ } else if c == '\\' {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, '\\')
+ incr(i)
+ } else {
+ Buffer.add_char(b, c)
+ incr(i)
+ }
+ }
+ Buffer.contents(b)
+ }
+
+ let escapeStringContents = s => {
+ let len = String.length(s)
+ let b = Buffer.create(len)
+
+ let i = ref(0)
+
+ while i.contents < len {
+ let c = String.unsafe_get(s, i.contents)
+ if c == '\\' {
+ incr(i)
+ Buffer.add_char(b, c)
+ let c = String.unsafe_get(s, i.contents)
+ if i.contents < len {
+ let () = Buffer.add_char(b, c)
+ incr(i)
+ } else {
+ ()
+ }
+ } else if c == '"' {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, c)
+ incr(i)
+ } else {
+ Buffer.add_char(b, c)
+ incr(i)
+ }
+ }
+ Buffer.contents(b)
+ }
+
+ let looksLikeRecursiveTypeDeclaration = typeDeclaration => {
+ open Parsetree
+ let name = typeDeclaration.ptype_name.txt
+ let rec checkKind = kind =>
+ switch kind {
+ | Ptype_abstract | Ptype_open => false
+ | Ptype_variant(constructorDeclarations) =>
+ List.exists(checkConstructorDeclaration, constructorDeclarations)
+ | Ptype_record(labelDeclarations) => List.exists(checkLabelDeclaration, labelDeclarations)
+ }
+
+ and checkConstructorDeclaration = constrDecl =>
+ checkConstructorArguments(constrDecl.pcd_args) ||
+ switch constrDecl.pcd_res {
+ | Some(typexpr) => checkTypExpr(typexpr)
+ | None => false
+ }
+
+ and checkLabelDeclaration = labelDeclaration => checkTypExpr(labelDeclaration.pld_type)
+
+ and checkConstructorArguments = constrArg =>
+ switch constrArg {
+ | Pcstr_tuple(types) => List.exists(checkTypExpr, types)
+ | Pcstr_record(labelDeclarations) => List.exists(checkLabelDeclaration, labelDeclarations)
+ }
+
+ and checkTypExpr = typ =>
+ switch typ.ptyp_desc {
+ | Ptyp_any => false
+ | Ptyp_var(_) => false
+ | Ptyp_object(_) => false
+ | Ptyp_class(_) => false
+ | Ptyp_package(_) => false
+ | Ptyp_extension(_) => false
+ | Ptyp_arrow(_lbl, typ1, typ2) => checkTypExpr(typ1) || checkTypExpr(typ2)
+ | Ptyp_tuple(types) => List.exists(checkTypExpr, types)
+ | Ptyp_constr({txt: longident}, types) =>
+ switch longident {
+ | Lident(ident) => ident == name
+ | _ => false
+ } ||
+ List.exists(checkTypExpr, types)
+ | Ptyp_alias(typ, _) => checkTypExpr(typ)
+ | Ptyp_variant(rowFields, _, _) => List.exists(checkRowFields, rowFields)
+ | Ptyp_poly(_, typ) => checkTypExpr(typ)
+ }
+
+ and checkRowFields = rowField =>
+ switch rowField {
+ | Rtag(_, _, _, types) => List.exists(checkTypExpr, types)
+ | Rinherit(typexpr) => checkTypExpr(typexpr)
+ }
+
+ checkKind(typeDeclaration.ptype_kind)
+ }
+
+ let filterReasonRawLiteral = attrs => List.filter(attr =>
+ switch attr {
+ | ({Location.txt: "reason.raw_literal"}, _) => false
+ | _ => true
+ }
+ , attrs)
+
+ let stringLiteralMapper = stringData => {
+ let isSameLocation = (l1, l2) => {
+ open Location
+ l1.loc_start.pos_cnum === l2.loc_start.pos_cnum
+ }
+
+ let remainingStringData = stringData
+ open Ast_mapper
+ {
+ ...default_mapper,
+ expr: (mapper, expr) =>
+ switch expr.pexp_desc {
+ | Pexp_constant(Pconst_string(_txt, None)) =>
+ switch List.find_opt(
+ ((_stringData, stringLoc)) => isSameLocation(stringLoc, expr.pexp_loc),
+ remainingStringData,
+ ) {
+ | Some(stringData, _) =>
+ let stringData = {
+ let attr = List.find_opt(attr =>
+ switch attr {
+ | ({Location.txt: "reason.raw_literal"}, _) => true
+ | _ => false
+ }
+ , expr.pexp_attributes)
+ switch attr {
+ | Some(
+ _,
+ PStr(list{{
+ pstr_desc: Pstr_eval({pexp_desc: Pexp_constant(Pconst_string(raw, _))}, _),
+ }}),
+ ) => raw
+ | _ => (@doesNotRaise String.sub)(stringData, 1, String.length(stringData) - 2)
+ }
+ }
+
+ {
+ ...expr,
+ pexp_attributes: filterReasonRawLiteral(expr.pexp_attributes),
+ pexp_desc: Pexp_constant(Pconst_string(stringData, None)),
+ }
+ | None => default_mapper.expr(mapper, expr)
+ }
+ | _ => default_mapper.expr(mapper, expr)
+ },
+ }
+ }
+
+ let normalize = {
+ open Ast_mapper
+ {
+ ...default_mapper,
+ attributes: (mapper, attrs) => attrs |> List.filter(attr =>
+ switch attr {
+ | (
+ {Location.txt: "reason.preserve_braces" | "explicit_arity" | "implicity_arity"},
+ _,
+ ) => false
+ | _ => true
+ }
+ ) |> default_mapper.attributes(mapper),
+ pat: (mapper, p) =>
+ switch p.ppat_desc {
+ | Ppat_open({txt: longidentOpen}, pattern) =>
+ let p = rewritePpatOpen(longidentOpen, pattern)
+ default_mapper.pat(mapper, p)
+ | _ => default_mapper.pat(mapper, p)
+ },
+ expr: (mapper, expr) =>
+ switch expr.pexp_desc {
+ | Pexp_constant(Pconst_string(txt, None)) =>
+ let raw = escapeStringContents(txt)
+ let s = Parsetree.Pconst_string(raw, None)
+ let expr = Ast_helper.Exp.constant(~attrs=expr.pexp_attributes, ~loc=expr.pexp_loc, s)
+
+ expr
+ | Pexp_constant(Pconst_string(txt, tag)) =>
+ let s = Parsetree.Pconst_string(escapeTemplateLiteral(txt), tag)
+ Ast_helper.Exp.constant(
+ ~attrs=mapper.attributes(mapper, expr.pexp_attributes),
+ ~loc=expr.pexp_loc,
+ s,
+ )
+ | Pexp_function(cases) =>
+ let loc = switch (cases, List.rev(cases)) {
+ | (list{first, ..._}, list{last, ..._}) => {
+ ...first.pc_lhs.ppat_loc,
+ loc_end: last.pc_rhs.pexp_loc.loc_end,
+ }
+ | _ => Location.none
+ }
+
+ Ast_helper.Exp.fun_(
+ ~loc,
+ Asttypes.Nolabel,
+ None,
+ Ast_helper.Pat.var(Location.mknoloc("x")),
+ Ast_helper.Exp.match_(
+ ~loc,
+ Ast_helper.Exp.ident(Location.mknoloc(Longident.Lident("x"))),
+ default_mapper.cases(mapper, cases),
+ ),
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("!")})},
+ list{(Asttypes.Nolabel, operand)},
+ ) =>
+ /* turn `!foo` into `foo.contents` */
+ Ast_helper.Exp.field(
+ ~loc=expr.pexp_loc,
+ ~attrs=expr.pexp_attributes,
+ operand,
+ Location.mknoloc(Longident.Lident("contents")),
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("##")})} as op,
+ list{
+ (Asttypes.Nolabel, lhs),
+ (Nolabel, {pexp_desc: Pexp_constant(Pconst_string(txt, None))} as stringExpr),
+ },
+ ) =>
+ let ident = Ast_helper.Exp.ident(
+ ~loc=stringExpr.pexp_loc,
+ Location.mkloc(Longident.Lident(txt), stringExpr.pexp_loc),
+ )
+
+ Ast_helper.Exp.apply(
+ ~loc=expr.pexp_loc,
+ ~attrs=expr.pexp_attributes,
+ op,
+ list{(Asttypes.Nolabel, lhs), (Nolabel, ident)},
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("@@")})},
+ list{(Asttypes.Nolabel, callExpr), (Nolabel, argExpr)},
+ ) =>
+ Ast_helper.Exp.apply(
+ mapper.expr(mapper, callExpr),
+ list{(Asttypes.Nolabel, mapper.expr(mapper, argExpr))},
+ )
+ | Pexp_apply(
+ {pexp_desc: Pexp_ident({txt: Longident.Lident("@")})},
+ list{(Nolabel, arg1), (Nolabel, arg2)},
+ ) =>
+ let listConcat = Longident.Ldot(Longident.Lident("List"), "append")
+ Ast_helper.Exp.apply(
+ Ast_helper.Exp.ident(Location.mknoloc(listConcat)),
+ list{(Nolabel, mapper.expr(mapper, arg1)), (Nolabel, mapper.expr(mapper, arg2))},
+ )
+ | Pexp_match(
+ condition,
+ list{
+ {
+ pc_lhs: {ppat_desc: Ppat_construct({txt: Longident.Lident("true")}, None)},
+ pc_rhs: thenExpr,
+ },
+ {
+ pc_lhs: {ppat_desc: Ppat_construct({txt: Longident.Lident("false")}, None)},
+ pc_rhs: elseExpr,
+ },
+ },
+ ) =>
+ let ternaryMarker = (Location.mknoloc("ns.ternary"), Parsetree.PStr(list{}))
+ Ast_helper.Exp.ifthenelse(
+ ~loc=expr.pexp_loc,
+ ~attrs=list{ternaryMarker, ...expr.pexp_attributes},
+ default_mapper.expr(mapper, condition),
+ default_mapper.expr(mapper, thenExpr),
+ Some(default_mapper.expr(mapper, elseExpr)),
+ )
+ | _ => default_mapper.expr(mapper, expr)
+ },
+ structure_item: (mapper, structureItem) =>
+ switch structureItem.pstr_desc {
+ /* heuristic: if we have multiple type declarations, mark them recursive */
+ | Pstr_type(recFlag, typeDeclarations) =>
+ let flag = switch typeDeclarations {
+ | list{td} =>
+ if looksLikeRecursiveTypeDeclaration(td) {
+ Asttypes.Recursive
+ } else {
+ Asttypes.Nonrecursive
+ }
+ | _ => recFlag
+ }
+
+ {
+ ...structureItem,
+ pstr_desc: Pstr_type(
+ flag,
+ List.map(
+ typeDeclaration => default_mapper.type_declaration(mapper, typeDeclaration),
+ typeDeclarations,
+ ),
+ ),
+ }
+ | _ => default_mapper.structure_item(mapper, structureItem)
+ },
+ signature_item: (mapper, signatureItem) =>
+ switch signatureItem.psig_desc {
+ /* heuristic: if we have multiple type declarations, mark them recursive */
+ | Psig_type(recFlag, typeDeclarations) =>
+ let flag = switch typeDeclarations {
+ | list{td} =>
+ if looksLikeRecursiveTypeDeclaration(td) {
+ Asttypes.Recursive
+ } else {
+ Asttypes.Nonrecursive
+ }
+ | _ => recFlag
+ }
+
+ {
+ ...signatureItem,
+ psig_desc: Psig_type(
+ flag,
+ List.map(
+ typeDeclaration => default_mapper.type_declaration(mapper, typeDeclaration),
+ typeDeclarations,
+ ),
+ ),
+ }
+ | _ => default_mapper.signature_item(mapper, signatureItem)
+ },
+ value_binding: (mapper, vb) =>
+ switch vb {
+ | {
+ pvb_pat: {ppat_desc: Ppat_var(_)} as pat,
+ pvb_expr: {pexp_loc: expr_loc, pexp_desc: Pexp_constraint(expr, typ)},
+ } when expr_loc.loc_ghost =>
+ /* let t: t = (expr : t) -> let t: t = expr */
+ let typ = default_mapper.typ(mapper, typ)
+ let pat = default_mapper.pat(mapper, pat)
+ let expr = mapper.expr(mapper, expr)
+ let newPattern = Ast_helper.Pat.constraint_(
+ ~loc={...pat.ppat_loc, loc_end: typ.ptyp_loc.loc_end},
+ pat,
+ typ,
+ )
+ {
+ ...vb,
+ pvb_pat: newPattern,
+ pvb_expr: expr,
+ pvb_attributes: default_mapper.attributes(mapper, vb.pvb_attributes),
+ }
+ | {
+ pvb_pat: {ppat_desc: Ppat_constraint(pat, {ptyp_desc: Ptyp_poly(list{}, _)})},
+ pvb_expr: {pexp_loc: expr_loc, pexp_desc: Pexp_constraint(expr, typ)},
+ } when expr_loc.loc_ghost =>
+ /* let t: . t = (expr : t) -> let t: t = expr */
+ let typ = default_mapper.typ(mapper, typ)
+ let pat = default_mapper.pat(mapper, pat)
+ let expr = mapper.expr(mapper, expr)
+ let newPattern = Ast_helper.Pat.constraint_(
+ ~loc={...pat.ppat_loc, loc_end: typ.ptyp_loc.loc_end},
+ pat,
+ typ,
+ )
+ {
+ ...vb,
+ pvb_pat: newPattern,
+ pvb_expr: expr,
+ pvb_attributes: default_mapper.attributes(mapper, vb.pvb_attributes),
+ }
+ | _ => default_mapper.value_binding(mapper, vb)
+ },
+ }
+ }
+
+ let normalizeReasonArityStructure = (~forPrinter, s) => {
+ let mapper = makeReasonArityMapper(~forPrinter)
+ mapper.Ast_mapper.structure(mapper, s)
+ }
+
+ let normalizeReasonAritySignature = (~forPrinter, s) => {
+ let mapper = makeReasonArityMapper(~forPrinter)
+ mapper.Ast_mapper.signature(mapper, s)
+ }
+
+ let structure = s => normalize.Ast_mapper.structure(normalize, s)
+ let signature = s => normalize.Ast_mapper.signature(normalize, s)
+
+ let replaceStringLiteralStructure = (stringData, structure) => {
+ let mapper = stringLiteralMapper(stringData)
+ mapper.Ast_mapper.structure(mapper, structure)
+ }
+
+ let replaceStringLiteralSignature = (stringData, signature) => {
+ let mapper = stringLiteralMapper(stringData)
+ mapper.Ast_mapper.signature(mapper, signature)
+ }
+}
+
+module OcamlParser = Parser
+
+module Parser = {
+ type mode = ParseForTypeChecker | Default
+
+ type regionStatus = Report | Silent
+
+ type t = {
+ mode: mode,
+ mutable scanner: Scanner.t,
+ mutable token: Token.t,
+ mutable startPos: Lexing.position,
+ mutable endPos: Lexing.position,
+ mutable prevEndPos: Lexing.position,
+ mutable breadcrumbs: list<(Grammar.t, Lexing.position)>,
+ mutable errors: list,
+ mutable diagnostics: list,
+ mutable comments: list,
+ mutable regions: list>,
+ }
+
+ let err = (~startPos=?, ~endPos=?, p, error) => {
+ let d = Diagnostics.make(
+ ~filename=p.scanner.filename,
+ ~startPos=switch startPos {
+ | Some(pos) => pos
+ | None => p.startPos
+ },
+ ~endPos=switch endPos {
+ | Some(pos) => pos
+ | None => p.endPos
+ },
+ error,
+ )
+
+ try if List.hd(p.regions).contents == Report {
+ p.diagnostics = list{d, ...p.diagnostics}
+ List.hd(p.regions) := Silent
+ } catch {
+ | Failure(_) => ()
+ }
+ }
+
+ let beginRegion = p => p.regions = list{ref(Report), ...p.regions}
+ let endRegion = p =>
+ try p.regions = List.tl(p.regions) catch {
+ | Failure(_) => ()
+ }
+
+ /* Advance to the next non-comment token and store any encountered comment
+ * in the parser's state. Every comment contains the end position of it's
+ * previous token to facilite comment interleaving */
+ let rec next = (~prevEndPos=?, p) => {
+ let prevEndPos = switch prevEndPos {
+ | Some(pos) => pos
+ | None => p.endPos
+ }
+ let (startPos, endPos, token) = Scanner.scan(p.scanner)
+ switch token {
+ | Comment(c) =>
+ Comment.setPrevTokEndPos(c, p.endPos)
+ p.comments = list{c, ...p.comments}
+ p.prevEndPos = p.endPos
+ p.endPos = endPos
+ next(~prevEndPos, p)
+ | _ =>
+ p.token = token
+
+ /* p.prevEndPos <- prevEndPos; */
+ p.prevEndPos = prevEndPos
+ p.startPos = startPos
+ p.endPos = endPos
+ }
+ }
+
+ let checkProgress = (~prevEndPos, ~result, p) =>
+ if p.endPos === prevEndPos {
+ None
+ } else {
+ Some(result)
+ }
+
+ let make = (~mode=ParseForTypeChecker, src, filename) => {
+ let scanner = Scanner.make(Bytes.of_string(src), filename)
+ let parserState = {
+ mode: mode,
+ scanner: scanner,
+ token: Token.Eof,
+ startPos: Lexing.dummy_pos,
+ prevEndPos: Lexing.dummy_pos,
+ endPos: Lexing.dummy_pos,
+ breadcrumbs: list{},
+ errors: list{},
+ diagnostics: list{},
+ comments: list{},
+ regions: list{ref(Report)},
+ }
+ parserState.scanner.err = (~startPos, ~endPos, error) => {
+ let diagnostic = Diagnostics.make(~filename, ~startPos, ~endPos, error)
+
+ parserState.diagnostics = list{diagnostic, ...parserState.diagnostics}
+ }
+ next(parserState)
+ parserState
+ }
+
+ let leaveBreadcrumb = (p, circumstance) => {
+ let crumb = (circumstance, p.startPos)
+ p.breadcrumbs = list{crumb, ...p.breadcrumbs}
+ }
+
+ let eatBreadcrumb = p =>
+ switch p.breadcrumbs {
+ | list{} => ()
+ | list{_, ...crumbs} => p.breadcrumbs = crumbs
+ }
+
+ let optional = (p, token) =>
+ if p.token == token {
+ let () = next(p)
+ true
+ } else {
+ false
+ }
+
+ let expect = (~grammar=?, token, p) =>
+ if p.token == token {
+ next(p)
+ } else {
+ let error = Diagnostics.expected(~grammar?, p.prevEndPos, token)
+ err(~startPos=p.prevEndPos, p, error)
+ }
+
+ /* Don't use immutable copies here, it trashes certain heuristics
+ * in the ocaml compiler, resulting in massive slowdowns of the parser */
+ let lookahead = (p, callback) => {
+ let err = p.scanner.err
+ let ch = p.scanner.ch
+ let offset = p.scanner.offset
+ let rdOffset = p.scanner.rdOffset
+ let lineOffset = p.scanner.lineOffset
+ let lnum = p.scanner.lnum
+ let mode = p.scanner.mode
+ let token = p.token
+ let startPos = p.startPos
+ let endPos = p.endPos
+ let prevEndPos = p.prevEndPos
+ let breadcrumbs = p.breadcrumbs
+ let errors = p.errors
+ let diagnostics = p.diagnostics
+ let comments = p.comments
+
+ let res = callback(p)
+
+ p.scanner.err = err
+ p.scanner.ch = ch
+ p.scanner.offset = offset
+ p.scanner.rdOffset = rdOffset
+ p.scanner.lineOffset = lineOffset
+ p.scanner.lnum = lnum
+ p.scanner.mode = mode
+ p.token = token
+ p.startPos = startPos
+ p.endPos = endPos
+ p.prevEndPos = prevEndPos
+ p.breadcrumbs = breadcrumbs
+ p.errors = errors
+ p.diagnostics = diagnostics
+ p.comments = comments
+
+ res
+ }
+}
+
+module NapkinScript = {
+ let mkLoc = (startLoc, endLoc) => {
+ open Location
+ {
+ loc_start: startLoc,
+ loc_end: endLoc,
+ loc_ghost: false,
+ }
+ }
+
+ module Recover = {
+ type action = option /* None is abort, Some () is retry */
+
+ let defaultExpr = () => {
+ let id = Location.mknoloc("napkinscript.exprhole")
+ Ast_helper.Exp.mk(Pexp_extension(id, PStr(list{})))
+ }
+
+ let defaultType = () => {
+ let id = Location.mknoloc("napkinscript.typehole")
+ Ast_helper.Typ.extension((id, PStr(list{})))
+ }
+
+ let defaultPattern = () => {
+ let id = Location.mknoloc("napkinscript.patternhole")
+ Ast_helper.Pat.extension((id, PStr(list{})))
+ }
+ /* Ast_helper.Pat.any () */
+
+ let defaultModuleExpr = () => Ast_helper.Mod.structure(list{})
+ let defaultModuleType = () => Ast_helper.Mty.signature(list{})
+
+ let recoverEqualGreater = p => {
+ Parser.expect(EqualGreater, p)
+ switch p.Parser.token {
+ | MinusGreater => Parser.next(p)
+ | _ => ()
+ }
+ }
+
+ let shouldAbortListParse = p => {
+ let rec check = breadcrumbs =>
+ switch breadcrumbs {
+ | list{} => false
+ | list{(grammar, _), ...rest} =>
+ if Grammar.isPartOfList(grammar, p.Parser.token) {
+ true
+ } else {
+ check(rest)
+ }
+ }
+
+ check(p.breadcrumbs)
+ }
+ }
+
+ module ErrorMessages = {
+ let listPatternSpread = "List pattern matches only supports one `...` spread, at the end.
+Explanation: a list spread at the tail is efficient, but a spread in the middle would create new list{s}; out of performance concern, our pattern matching currently guarantees to never create new intermediate data."
+
+ let recordPatternSpread = "Record's `...` spread is not supported in pattern matches.
+Explanation: you can't collect a subset of a record's field into its own record, since a record needs an explicit declaration and that subset wouldn't have one.
+Solution: you need to pull out each field you want explicitly."
+
+ @live let recordPatternUnderscore = "Record patterns only support one `_`, at the end."
+
+ let arrayPatternSpread = "Array's `...` spread is not supported in pattern matches.
+Explanation: such spread would create a subarray; out of performance concern, our pattern matching currently guarantees to never create new intermediate data.
+Solution: if it's to validate the first few elements, use a `when` clause + Array size check + `get` checks on the current pattern. If it's to obtain a subarray, use `Array.sub` or `Belt.Array.slice`."
+
+ let arrayExprSpread = "Arrays can't use the `...` spread currently. Please use `concat` or other Array helpers."
+
+ let recordExprSpread = "Records can only have one `...` spread, at the beginning.
+Explanation: since records have a known, fixed shape, a spread like `{a, ...b}` wouldn't make sense, as `b` would override every field of `a` anyway."
+
+ let listExprSpread = "Lists can only have one `...` spread, and at the end.
+Explanation: lists are singly-linked list, where a node contains a value and points to the next node. `list{a, ...bc}` efficiently creates a new item and links `bc` as its next nodes. `[...bc, a]` would be expensive, as it'd need to traverse `bc` and prepend each item to `a` one by one. We therefore disallow such syntax sugar.
+Solution: directly use `concat`."
+
+ let variantIdent = "A polymorphic variant (e.g. #id) must start with an alphabetical letter."
+ }
+
+ let jsxAttr = (Location.mknoloc("JSX"), Parsetree.PStr(list{}))
+ let uncurryAttr = (Location.mknoloc("bs"), Parsetree.PStr(list{}))
+ let ternaryAttr = (Location.mknoloc("ns.ternary"), Parsetree.PStr(list{}))
+ let makeBracesAttr = loc => (Location.mkloc("ns.braces", loc), Parsetree.PStr(list{}))
+
+ type typDefOrExt =
+ | TypeDef({recFlag: Asttypes.rec_flag, types: list})
+ | TypeExt(Parsetree.type_extension)
+
+ type labelledParameter =
+ | TermParameter({
+ uncurried: bool,
+ attrs: Parsetree.attributes,
+ label: Asttypes.arg_label,
+ expr: option,
+ pat: Parsetree.pattern,
+ pos: Lexing.position,
+ })
+ | TypeParameter({
+ uncurried: bool,
+ attrs: Parsetree.attributes,
+ locs: list>,
+ pos: Lexing.position,
+ })
+
+ type recordPatternItem =
+ | PatUnderscore
+ | PatField((Ast_helper.lid, Parsetree.pattern))
+
+ type context =
+ | OrdinaryExpr
+ | TernaryTrueBranchExpr
+ | WhenExpr
+
+ let getClosingToken = x =>
+ switch x {
+ | Token.Lparen => Token.Rparen
+ | Lbrace => Rbrace
+ | Lbracket => Rbracket
+ | _ => assert false
+ }
+
+ let rec goToClosing = (closingToken, state) =>
+ switch (state.Parser.token, closingToken) {
+ | (Rparen, Token.Rparen) | (Rbrace, Rbrace) | (Rbracket, Rbracket) =>
+ Parser.next(state)
+ ()
+ | ((Token.Lbracket | Lparen | Lbrace) as t, _) =>
+ Parser.next(state)
+ goToClosing(getClosingToken(t), state)
+ goToClosing(closingToken, state)
+ | (Rparen | Token.Rbrace | Rbracket | Eof, _) => () /* TODO: how do report errors here? */
+ | _ =>
+ Parser.next(state)
+ goToClosing(closingToken, state)
+ }
+
+ /* Madness */
+ let isEs6ArrowExpression = (~inTernary, p) => Parser.lookahead(p, state =>
+ switch state.Parser.token {
+ | Lident(_) | List | Underscore =>
+ Parser.next(state)
+ switch state.Parser.token {
+ /* Don't think that this valid
+ * Imagine: let x = (a: int)
+ * This is a parenthesized expression with a type constraint, wait for
+ * the arrow */
+ /* | Colon when not inTernary -> true */
+ | EqualGreater => true
+ | _ => false
+ }
+ | Lparen =>
+ let prevEndPos = state.prevEndPos
+ Parser.next(state)
+ switch state.token {
+ | Rparen =>
+ Parser.next(state)
+ switch state.Parser.token {
+ | Colon when !inTernary => true
+ | EqualGreater => true
+ | _ => false
+ }
+ | Dot /* uncurried */ => true
+ | Tilde => true
+ | Backtick => false /* (` always indicates the start of an expr, can't be es6 parameter */
+ | _ =>
+ goToClosing(Rparen, state)
+ switch state.Parser.token {
+ | EqualGreater => true
+ /* | Lbrace TODO: detect missing =>, is this possible? */
+ | Colon when !inTernary => true
+ | Rparen => /* imagine having something as :
+ * switch colour {
+ * | Red
+ * when l == l'
+ * || (&Clflags.classic && (l == Nolabel && !is_optional(l'))) => (t1, t2)
+ * We'll arrive at the outer rparen just before the =>.
+ * This is not an es6 arrow.
+ * */
+ false
+ | _ =>
+ Parser.next(state)
+ /* error recovery, peek at the next token,
+ * (elements, providerId] => {
+ * in the example above, we have an unbalanced ] here
+ */
+ switch state.Parser.token {
+ | EqualGreater when state.startPos.pos_lnum === prevEndPos.pos_lnum => true
+ | _ => false
+ }
+ }
+ }
+ | _ => false
+ }
+ )
+
+ let isEs6ArrowFunctor = p => Parser.lookahead(p, state =>
+ switch state.Parser.token {
+ /* | Uident _ | Underscore -> */
+ /* Parser.next state; */
+ /* begin match state.Parser.token with */
+ /* | EqualGreater -> true */
+ /* | _ -> false */
+ /* end */
+ | Lparen =>
+ Parser.next(state)
+ switch state.token {
+ | Rparen =>
+ Parser.next(state)
+ switch state.token {
+ | Colon | EqualGreater => true
+ | _ => false
+ }
+ | _ =>
+ goToClosing(Rparen, state)
+ switch state.Parser.token {
+ | EqualGreater | Lbrace => true
+ | Colon => true
+ | _ => false
+ }
+ }
+ | _ => false
+ }
+ )
+
+ let isEs6ArrowType = p => Parser.lookahead(p, state =>
+ switch state.Parser.token {
+ | Lparen =>
+ Parser.next(state)
+ switch state.Parser.token {
+ | Rparen =>
+ Parser.next(state)
+ switch state.Parser.token {
+ | EqualGreater => true
+ | _ => false
+ }
+ | Tilde | Dot => true
+ | _ =>
+ goToClosing(Rparen, state)
+ switch state.Parser.token {
+ | EqualGreater => true
+ | _ => false
+ }
+ }
+ | Tilde => true
+ | _ => false
+ }
+ )
+
+ let buildLongident = words =>
+ switch List.rev(words) {
+ | list{} => assert false
+ | list{hd, ...tl} => List.fold_left((p, s) => Longident.Ldot(p, s), Lident(hd), tl)
+ }
+
+ let makeInfixOperator = (p, token, startPos, endPos) => {
+ let stringifiedToken = if token == Token.MinusGreater {
+ "|."
+ } else if token == Token.PlusPlus {
+ "^"
+ } else if token == Token.BangEqual {
+ "<>"
+ } else if token == Token.BangEqualEqual {
+ "!="
+ } else if token == Token.Equal {
+ /* TODO: could have a totally different meaning like x->fooSet(y) */
+ Parser.err(~startPos, ~endPos, p, Diagnostics.message("Did you mean `==` here?"))
+ "="
+ } else if token == Token.EqualEqual {
+ "="
+ } else if token == Token.EqualEqualEqual {
+ "=="
+ } else {
+ Token.toString(token)
+ }
+
+ let loc = mkLoc(startPos, endPos)
+ let operator = Location.mkloc(Longident.Lident(stringifiedToken), loc)
+
+ Ast_helper.Exp.ident(~loc, operator)
+ }
+
+ let negateString = s =>
+ if String.length(s) > 0 && @doesNotRaise String.get(s, 0) == '-' {
+ (@doesNotRaise String.sub)(s, 1, String.length(s) - 1)
+ } else {
+ "-" ++ s
+ }
+
+ let makeUnaryExpr = (startPos, tokenEnd, token, operand) =>
+ switch (token, operand.Parsetree.pexp_desc) {
+ | (Token.Plus | PlusDot, Pexp_constant(Pconst_integer(_) | Pconst_float(_))) => operand
+ | (Minus, Pexp_constant(Pconst_integer(n, m))) => {
+ ...operand,
+ pexp_desc: Pexp_constant(Pconst_integer(negateString(n), m)),
+ }
+ | (Minus | MinusDot, Pexp_constant(Pconst_float(n, m))) => {
+ ...operand,
+ pexp_desc: Pexp_constant(Pconst_float(negateString(n), m)),
+ }
+ | (Token.Plus | PlusDot | Minus | MinusDot, _) =>
+ let tokenLoc = mkLoc(startPos, tokenEnd)
+ let operator = "~" ++ Token.toString(token)
+ Ast_helper.Exp.apply(
+ ~loc=mkLoc(startPos, operand.Parsetree.pexp_loc.loc_end),
+ Ast_helper.Exp.ident(~loc=tokenLoc, Location.mkloc(Longident.Lident(operator), tokenLoc)),
+ list{(Nolabel, operand)},
+ )
+ | (Token.Bang, _) =>
+ let tokenLoc = mkLoc(startPos, tokenEnd)
+ Ast_helper.Exp.apply(
+ ~loc=mkLoc(startPos, operand.Parsetree.pexp_loc.loc_end),
+ Ast_helper.Exp.ident(~loc=tokenLoc, Location.mkloc(Longident.Lident("not"), tokenLoc)),
+ list{(Nolabel, operand)},
+ )
+ | _ => operand
+ }
+
+ let makeListExpression = (loc, seq, extOpt) => {
+ let rec handleSeq = x =>
+ switch x {
+ | list{} =>
+ switch extOpt {
+ | Some(ext) => ext
+ | None =>
+ let loc = {...loc, Location.loc_ghost: true}
+ let nil = Location.mkloc(Longident.Lident("[]"), loc)
+ Ast_helper.Exp.construct(~loc, nil, None)
+ }
+ | list{e1, ...el} =>
+ let exp_el = handleSeq(el)
+ let loc = mkLoc(e1.Parsetree.pexp_loc.Location.loc_start, exp_el.pexp_loc.loc_end)
+
+ let arg = Ast_helper.Exp.tuple(~loc, list{e1, exp_el})
+ Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("::"), loc), Some(arg))
+ }
+
+ let expr = handleSeq(seq)
+ {...expr, pexp_loc: loc}
+ }
+
+ let makeListPattern = (loc, seq, ext_opt) => {
+ let rec handle_seq = x =>
+ switch x {
+ | list{} =>
+ let base_case = switch ext_opt {
+ | Some(ext) => ext
+ | None =>
+ let loc = {...loc, Location.loc_ghost: true}
+ let nil = {Location.txt: Longident.Lident("[]"), loc: loc}
+ Ast_helper.Pat.construct(~loc, nil, None)
+ }
+
+ base_case
+ | list{p1, ...pl} =>
+ let pat_pl = handle_seq(pl)
+ let loc = mkLoc(p1.Parsetree.ppat_loc.loc_start, pat_pl.ppat_loc.loc_end)
+ let arg = Ast_helper.Pat.mk(~loc, Ppat_tuple(list{p1, pat_pl}))
+ Ast_helper.Pat.mk(
+ ~loc,
+ Ppat_construct(Location.mkloc(Longident.Lident("::"), loc), Some(arg)),
+ )
+ }
+
+ handle_seq(seq)
+ }
+
+ /* {"foo": bar} -> Js.t({. foo: bar})
+ * {.. "foo": bar} -> Js.t({.. foo: bar})
+ * {..} -> Js.t({..}) */
+ let makeBsObjType = (~attrs, ~loc, ~closed, rows) => {
+ let obj = Ast_helper.Typ.object_(~loc, rows, closed)
+ let jsDotTCtor = Location.mkloc(Longident.Ldot(Longident.Lident("Js"), "t"), loc)
+
+ Ast_helper.Typ.constr(~loc, ~attrs, jsDotTCtor, list{obj})
+ }
+
+ /* TODO: diagnostic reporting */
+ let lidentOfPath = longident =>
+ switch Longident.flatten(longident) |> List.rev {
+ | list{} => ""
+ | list{ident, ..._} => ident
+ }
+
+ let makeNewtypes = (~attrs, ~loc, newtypes, exp) => {
+ let expr = List.fold_right(
+ (newtype, exp) => Ast_helper.Exp.mk(~loc, Pexp_newtype(newtype, exp)),
+ newtypes,
+ exp,
+ )
+ {...expr, pexp_attributes: attrs}
+ }
+
+ /* locally abstract types syntax sugar
+ * Transforms
+ * let f: type t u v. = (foo : list) => ...
+ * into
+ * let f = (type t u v. foo : list) => ...
+ */
+ let wrapTypeAnnotation = (~loc, newtypes, core_type, body) => {
+ let exp = makeNewtypes(
+ ~attrs=list{},
+ ~loc,
+ newtypes,
+ Ast_helper.Exp.constraint_(~loc, body, core_type),
+ )
+
+ let typ = Ast_helper.Typ.poly(
+ ~loc,
+ newtypes,
+ Ast_helper.Typ.varify_constructors(newtypes, core_type),
+ )
+
+ (exp, typ)
+ }
+
+ @ocaml.doc(
+ "
+ * process the occurrence of _ in the arguments of a function application
+ * replace _ with a new variable, currently __x, in the arguments
+ * return a wrapping function that wraps ((__x) => ...) around an expression
+ * e.g. foo(_, 3) becomes (__x) => foo(__x, 3)
+ "
+ )
+ let processUnderscoreApplication = args => {
+ open Parsetree
+ let exp_question = ref(None)
+ let hidden_var = "__x"
+ let check_arg = ((lab, exp) as arg) =>
+ switch exp.pexp_desc {
+ | Pexp_ident({txt: Lident("_")} as id) =>
+ let new_id = Location.mkloc(Longident.Lident(hidden_var), id.loc)
+ let new_exp = Ast_helper.Exp.mk(Pexp_ident(new_id), ~loc=exp.pexp_loc)
+ exp_question := Some(new_exp)
+ (lab, new_exp)
+ | _ => arg
+ }
+
+ let args = List.map(check_arg, args)
+ let wrap = exp_apply =>
+ switch exp_question.contents {
+ | Some({pexp_loc: loc}) =>
+ let pattern = Ast_helper.Pat.mk(Ppat_var(Location.mkloc(hidden_var, loc)), ~loc)
+ Ast_helper.Exp.mk(Pexp_fun(Nolabel, None, pattern, exp_apply), ~loc)
+ | None => exp_apply
+ }
+
+ (args, wrap)
+ }
+
+ let rec parseLident = p => {
+ let recoverLident = p =>
+ if Token.isKeyword(p.Parser.token) && p.Parser.prevEndPos.pos_lnum === p.startPos.pos_lnum {
+ Parser.err(p, Diagnostics.lident(p.Parser.token))
+ Parser.next(p)
+ None
+ } else {
+ let rec loop = p =>
+ if !Recover.shouldAbortListParse(p) {
+ Parser.next(p)
+ loop(p)
+ }
+
+ Parser.next(p)
+ loop(p)
+ switch p.Parser.token {
+ | Lident(_) => Some()
+ | _ => None
+ }
+ }
+
+ let startPos = p.Parser.startPos
+ switch p.Parser.token {
+ | Lident(ident) =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ (ident, loc)
+ | List =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ ("list", loc)
+ | _ =>
+ switch recoverLident(p) {
+ | Some() => parseLident(p)
+ | None => ("_", mkLoc(startPos, p.prevEndPos))
+ }
+ }
+ }
+
+ let parseIdent = (~msg, ~startPos, p) =>
+ switch p.Parser.token {
+ | Lident(ident) | Uident(ident) =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ (ident, loc)
+ | List =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ ("list", loc)
+ | _token =>
+ Parser.err(p, Diagnostics.message(msg))
+ Parser.next(p)
+ ("_", mkLoc(startPos, p.prevEndPos))
+ }
+
+ let parseHashIdent = (~startPos, p) => {
+ Parser.expect(Hash, p)
+ parseIdent(~startPos, ~msg=ErrorMessages.variantIdent, p)
+ }
+
+ /* Ldot (Ldot (Lident "Foo", "Bar"), "baz") */
+ let parseValuePath = p => {
+ let startPos = p.Parser.startPos
+ let rec aux = (p, path) =>
+ switch p.Parser.token {
+ | List => Longident.Ldot(path, "list")
+ | Lident(ident) => Longident.Ldot(path, ident)
+ | Uident(uident) =>
+ Parser.next(p)
+ Parser.expect(Dot, p)
+ aux(p, Ldot(path, uident))
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ Longident.Lident("_")
+ }
+
+ let ident = switch p.Parser.token {
+ | List => Longident.Lident("list")
+ | Lident(ident) => Longident.Lident(ident)
+ | Uident(ident) =>
+ Parser.next(p)
+ Parser.expect(Dot, p)
+ aux(p, Lident(ident))
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ Longident.Lident("_")
+ }
+
+ Parser.next(p)
+ Location.mkloc(ident, mkLoc(startPos, p.prevEndPos))
+ }
+
+ let parseValuePathTail = (p, startPos, ident) => {
+ let rec loop = (p, path) =>
+ switch p.Parser.token {
+ | Lident(ident) =>
+ Parser.next(p)
+ Location.mkloc(Longident.Ldot(path, ident), mkLoc(startPos, p.prevEndPos))
+ | List =>
+ Parser.next(p)
+ Location.mkloc(Longident.Ldot(path, "list"), mkLoc(startPos, p.prevEndPos))
+ | Uident(ident) =>
+ Parser.next(p)
+ Parser.expect(Dot, p)
+ loop(p, Longident.Ldot(path, ident))
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ Location.mknoloc(path)
+ }
+
+ loop(p, ident)
+ }
+
+ let parseModuleLongIdentTail = (~lowercase, p, startPos, ident) => {
+ let rec loop = (p, acc) =>
+ switch p.Parser.token {
+ | List when lowercase =>
+ Parser.next(p)
+ let lident = Longident.Ldot(acc, "list")
+ Location.mkloc(lident, mkLoc(startPos, p.prevEndPos))
+ | Lident(ident) when lowercase =>
+ Parser.next(p)
+ let lident = Longident.Ldot(acc, ident)
+ Location.mkloc(lident, mkLoc(startPos, p.prevEndPos))
+ | Uident(ident) =>
+ Parser.next(p)
+ let endPos = p.prevEndPos
+ let lident = Longident.Ldot(acc, ident)
+ switch p.Parser.token {
+ | Dot =>
+ Parser.next(p)
+ loop(p, lident)
+ | _ => Location.mkloc(lident, mkLoc(startPos, endPos))
+ }
+ | t =>
+ Parser.err(p, Diagnostics.uident(t))
+ Location.mkloc(acc, mkLoc(startPos, p.prevEndPos))
+ }
+
+ loop(p, ident)
+ }
+
+ /* Parses module identifiers:
+ Foo
+ Foo.Bar */
+ let parseModuleLongIdent = (~lowercase, p) => {
+ /* Parser.leaveBreadcrumb p Reporting.ModuleLongIdent; */
+ let startPos = p.Parser.startPos
+ let moduleIdent = switch p.Parser.token {
+ | List when lowercase =>
+ let loc = mkLoc(startPos, p.endPos)
+ Parser.next(p)
+ Location.mkloc(Longident.Lident("list"), loc)
+ | Lident(ident) when lowercase =>
+ let loc = mkLoc(startPos, p.endPos)
+ let lident = Longident.Lident(ident)
+ Parser.next(p)
+ Location.mkloc(lident, loc)
+ | Uident(ident) =>
+ let lident = Longident.Lident(ident)
+ let endPos = p.endPos
+ Parser.next(p)
+ switch p.Parser.token {
+ | Dot =>
+ Parser.next(p)
+ parseModuleLongIdentTail(~lowercase, p, startPos, lident)
+ | _ => Location.mkloc(lident, mkLoc(startPos, endPos))
+ }
+ | t =>
+ Parser.err(p, Diagnostics.uident(t))
+ Location.mkloc(Longident.Lident("_"), mkLoc(startPos, p.prevEndPos))
+ }
+
+ /* Parser.eatBreadcrumb p; */
+ moduleIdent
+ }
+
+ /* `window.location` or `Math` or `Foo.Bar` */
+ let parseIdentPath = p => {
+ let rec loop = (p, acc) =>
+ switch p.Parser.token {
+ | Uident(ident) | Lident(ident) =>
+ Parser.next(p)
+ let lident = Longident.Ldot(acc, ident)
+ switch p.Parser.token {
+ | Dot =>
+ Parser.next(p)
+ loop(p, lident)
+ | _ => lident
+ }
+ | _t => acc
+ }
+
+ switch p.Parser.token {
+ | Lident(ident) | Uident(ident) =>
+ Parser.next(p)
+ switch p.Parser.token {
+ | Dot =>
+ Parser.next(p)
+ loop(p, Longident.Lident(ident))
+ | _ => Longident.Lident(ident)
+ }
+ | _ => Longident.Lident("_")
+ }
+ }
+
+ let verifyJsxOpeningClosingName = (p, nameExpr) => {
+ let closing = switch p.Parser.token {
+ | Lident(lident) =>
+ Parser.next(p)
+ Longident.Lident(lident)
+ | Uident(_) => parseModuleLongIdent(~lowercase=false, p).txt
+ | _ => Longident.Lident("")
+ }
+
+ switch nameExpr.Parsetree.pexp_desc {
+ | Pexp_ident(openingIdent) =>
+ let opening = {
+ let withoutCreateElement =
+ Longident.flatten(openingIdent.txt) |> List.filter(s => s != "createElement")
+
+ switch Longident.unflatten(withoutCreateElement) {
+ | Some(li) => li
+ | None => Longident.Lident("")
+ }
+ }
+
+ opening == closing
+ | _ => assert false
+ }
+ }
+
+ let string_of_pexp_ident = nameExpr =>
+ switch nameExpr.Parsetree.pexp_desc {
+ | Pexp_ident(openingIdent) =>
+ Longident.flatten(openingIdent.txt)
+ |> List.filter(s => s != "createElement")
+ |> String.concat(".")
+ | _ => ""
+ }
+
+ /* open-def ::=
+ * | open module-path
+ * | open! module-path */
+ let parseOpenDescription = (~attrs, p) => {
+ Parser.leaveBreadcrumb(p, Grammar.OpenDescription)
+ let startPos = p.Parser.startPos
+ Parser.expect(Open, p)
+ let override = if Parser.optional(p, Token.Bang) {
+ Asttypes.Override
+ } else {
+ Asttypes.Fresh
+ }
+
+ let modident = parseModuleLongIdent(~lowercase=false, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Parser.eatBreadcrumb(p)
+ Ast_helper.Opn.mk(~loc, ~attrs, ~override, modident)
+ }
+
+ let hexValue = x =>
+ switch x {
+ | '0' .. '9' => Char.code(x) - 48
+ | 'A' .. 'Z' => Char.code(x) - 55
+ | 'a' .. 'z' => Char.code(x) - 97
+ | _ => 16
+ }
+
+ let parseStringLiteral = s => {
+ let len = String.length(s)
+ let b = Buffer.create(String.length(s))
+
+ let rec loop = i =>
+ if i == len {
+ ()
+ } else {
+ let c = String.unsafe_get(s, i)
+ switch c {
+ | '\\' as c =>
+ let nextIx = i + 1
+ if nextIx < len {
+ let nextChar = String.unsafe_get(s, nextIx)
+ switch nextChar {
+ | 'n' =>
+ Buffer.add_char(b, '\n')
+ loop(nextIx + 1)
+ | 'r' =>
+ Buffer.add_char(b, '\r')
+ loop(nextIx + 1)
+ | 'b' =>
+ Buffer.add_char(b, '\b')
+ loop(nextIx + 1)
+ | 't' =>
+ Buffer.add_char(b, '\t')
+ loop(nextIx + 1)
+ | '\\' as c =>
+ Buffer.add_char(b, c)
+ loop(nextIx + 1)
+ | ' ' as c =>
+ Buffer.add_char(b, c)
+ loop(nextIx + 1)
+ | '\'' as c =>
+ Buffer.add_char(b, c)
+ loop(nextIx + 1)
+ | '"' as c =>
+ Buffer.add_char(b, c)
+ loop(nextIx + 1)
+ | '0' .. '9' =>
+ if nextIx + 2 < len {
+ let c0 = nextChar
+ let c1 = String.unsafe_get(s, nextIx + 1)
+ let c2 = String.unsafe_get(s, nextIx + 2)
+ let c =
+ 100 * (Char.code(c0) - 48) + 10 * (Char.code(c1) - 48) + (Char.code(c2) - 48)
+
+ if c < 0 || c > 255 {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, c0)
+ Buffer.add_char(b, c1)
+ Buffer.add_char(b, c2)
+ loop(nextIx + 3)
+ } else {
+ Buffer.add_char(b, Char.unsafe_chr(c))
+ loop(nextIx + 3)
+ }
+ } else {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, nextChar)
+ loop(nextIx + 1)
+ }
+ | 'o' =>
+ if nextIx + 3 < len {
+ let c0 = String.unsafe_get(s, nextIx + 1)
+ let c1 = String.unsafe_get(s, nextIx + 2)
+ let c2 = String.unsafe_get(s, nextIx + 3)
+ let c = 64 * (Char.code(c0) - 48) + 8 * (Char.code(c1) - 48) + (Char.code(c2) - 48)
+
+ if c < 0 || c > 255 {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, '0')
+ Buffer.add_char(b, c0)
+ Buffer.add_char(b, c1)
+ Buffer.add_char(b, c2)
+ loop(nextIx + 4)
+ } else {
+ Buffer.add_char(b, Char.unsafe_chr(c))
+ loop(nextIx + 4)
+ }
+ } else {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, nextChar)
+ loop(nextIx + 1)
+ }
+ | 'x' as c =>
+ if nextIx + 2 < len {
+ let c0 = String.unsafe_get(s, nextIx + 1)
+ let c1 = String.unsafe_get(s, nextIx + 2)
+ let c = 16 * hexValue(c0) + hexValue(c1)
+ if c < 0 || c > 255 {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, 'x')
+ Buffer.add_char(b, c0)
+ Buffer.add_char(b, c1)
+ loop(nextIx + 3)
+ } else {
+ Buffer.add_char(b, Char.unsafe_chr(c))
+ loop(nextIx + 3)
+ }
+ } else {
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, c)
+ loop(nextIx + 2)
+ }
+ | _ =>
+ Buffer.add_char(b, c)
+ Buffer.add_char(b, nextChar)
+ loop(nextIx + 1)
+ }
+ } else {
+ Buffer.add_char(b, c)
+ ()
+ }
+ | c =>
+ Buffer.add_char(b, c)
+ loop(i + 1)
+ }
+ }
+
+ loop(0)
+ Buffer.contents(b)
+ }
+
+ let parseTemplateStringLiteral = s => {
+ let len = String.length(s)
+ let b = Buffer.create(len)
+
+ let rec loop = i =>
+ if i < len {
+ let c = String.unsafe_get(s, i)
+ switch c {
+ | '\\' as c =>
+ if i + 1 < len {
+ let nextChar = String.unsafe_get(s, i + 1)
+ switch nextChar {
+ | '\\' as c =>
+ Buffer.add_char(b, c)
+ loop(i + 2)
+ | '$' as c =>
+ Buffer.add_char(b, c)
+ loop(i + 2)
+ | '`' as c =>
+ Buffer.add_char(b, c)
+ loop(i + 2)
+ | c =>
+ Buffer.add_char(b, '\\')
+ Buffer.add_char(b, c)
+ loop(i + 2)
+ }
+ } else {
+ Buffer.add_char(b, c)
+ }
+
+ | c =>
+ Buffer.add_char(b, c)
+ loop(i + 1)
+ }
+ } else {
+ ()
+ }
+
+ loop(0)
+ Buffer.contents(b)
+ }
+
+ /* constant ::= integer-literal */
+ /* ∣ float-literal */
+ /* ∣ string-literal */
+ let parseConstant = p => {
+ let isNegative = switch p.Parser.token {
+ | Token.Minus =>
+ Parser.next(p)
+ true
+ | Plus =>
+ Parser.next(p)
+ false
+ | _ => false
+ }
+
+ let constant = switch p.Parser.token {
+ | Int({i, suffix}) =>
+ let intTxt = if isNegative {
+ "-" ++ i
+ } else {
+ i
+ }
+ Parsetree.Pconst_integer(intTxt, suffix)
+ | Float({f, suffix}) =>
+ let floatTxt = if isNegative {
+ "-" ++ f
+ } else {
+ f
+ }
+ Parsetree.Pconst_float(floatTxt, suffix)
+ | String(s) =>
+ let txt = if p.mode == ParseForTypeChecker {
+ parseStringLiteral(s)
+ } else {
+ s
+ }
+
+ Pconst_string(txt, None)
+ | Character(c) => Pconst_char(c)
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ Pconst_string("", None)
+ }
+
+ Parser.next(p)
+ constant
+ }
+
+ let parseCommaDelimitedRegion = (p, ~grammar, ~closing, ~f) => {
+ Parser.leaveBreadcrumb(p, grammar)
+ let rec loop = nodes =>
+ switch f(p) {
+ | Some(node) =>
+ switch p.Parser.token {
+ | Comma =>
+ Parser.next(p)
+ loop(list{node, ...nodes})
+ | token when token == closing || token == Eof => List.rev(list{node, ...nodes})
+ | _ =>
+ if !(p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p))) {
+ Parser.expect(Comma, p)
+ }
+ if p.token == Semicolon {
+ Parser.next(p)
+ }
+ loop(list{node, ...nodes})
+ }
+ | None =>
+ if p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p)) {
+ List.rev(nodes)
+ } else {
+ Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))
+ Parser.next(p)
+ loop(nodes)
+ }
+ }
+
+ let nodes = loop(list{})
+ Parser.eatBreadcrumb(p)
+ nodes
+ }
+
+ let parseCommaDelimitedReversedList = (p, ~grammar, ~closing, ~f) => {
+ Parser.leaveBreadcrumb(p, grammar)
+ let rec loop = nodes =>
+ switch f(p) {
+ | Some(node) =>
+ switch p.Parser.token {
+ | Comma =>
+ Parser.next(p)
+ loop(list{node, ...nodes})
+ | token when token == closing || token == Eof => list{node, ...nodes}
+ | _ =>
+ if !(p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p))) {
+ Parser.expect(Comma, p)
+ }
+ if p.token == Semicolon {
+ Parser.next(p)
+ }
+ loop(list{node, ...nodes})
+ }
+ | None =>
+ if p.token == Eof || (p.token == closing || Recover.shouldAbortListParse(p)) {
+ nodes
+ } else {
+ Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))
+ Parser.next(p)
+ loop(nodes)
+ }
+ }
+
+ let nodes = loop(list{})
+ Parser.eatBreadcrumb(p)
+ nodes
+ }
+
+ let parseDelimitedRegion = (p, ~grammar, ~closing, ~f) => {
+ Parser.leaveBreadcrumb(p, grammar)
+ let rec loop = nodes =>
+ switch f(p) {
+ | Some(node) => loop(list{node, ...nodes})
+ | None =>
+ if p.Parser.token == Token.Eof || (p.token == closing || Recover.shouldAbortListParse(p)) {
+ List.rev(nodes)
+ } else {
+ Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))
+ Parser.next(p)
+ loop(nodes)
+ }
+ }
+
+ let nodes = loop(list{})
+ Parser.eatBreadcrumb(p)
+ nodes
+ }
+
+ let parseRegion = (p, ~grammar, ~f) => {
+ Parser.leaveBreadcrumb(p, grammar)
+ let rec loop = nodes =>
+ switch f(p) {
+ | Some(node) => loop(list{node, ...nodes})
+ | None =>
+ if p.Parser.token == Token.Eof || Recover.shouldAbortListParse(p) {
+ List.rev(nodes)
+ } else {
+ Parser.err(p, Diagnostics.unexpected(p.token, p.breadcrumbs))
+ Parser.next(p)
+ loop(nodes)
+ }
+ }
+
+ let nodes = loop(list{})
+ Parser.eatBreadcrumb(p)
+ nodes
+ }
+
+ /* let-binding ::= pattern = expr */
+ /* ∣ value-name { parameter } [: typexpr] [:> typexpr] = expr */
+ /* ∣ value-name : poly-typexpr = expr */
+
+ /* pattern ::= value-name */
+ /* ∣ _ */
+ /* ∣ constant */
+ /* ∣ pattern as value-name */
+ /* ∣ ( pattern ) */
+ /* ∣ ( pattern : typexpr ) */
+ /* ∣ pattern | pattern */
+ /* ∣ constr pattern */
+ /* ∣ #variant variant-pattern */
+ /* ∣ ##type */
+ /* ∣ / pattern { , pattern }+ / */
+ /* ∣ { field [: typexpr] [= pattern] { ; field [: typexpr] [= pattern] } [; _ ] [ ; ] } */
+ /* ∣ [ pattern { ; pattern } [ ; ] ] */
+ /* ∣ pattern :: pattern */
+ /* ∣ [| pattern { ; pattern } [ ; ] |] */
+ /* ∣ char-literal .. char-literal */
+ /* ∣ exception pattern */
+ let rec parsePattern = (~alias=true, ~or_=true, p) => {
+ let startPos = p.Parser.startPos
+ let attrs = parseAttributes(p)
+ let pat = switch p.Parser.token {
+ | (True | False) as token =>
+ let endPos = p.endPos
+ Parser.next(p)
+ let loc = mkLoc(startPos, endPos)
+ Ast_helper.Pat.construct(
+ ~loc,
+ Location.mkloc(Longident.Lident(Token.toString(token)), loc),
+ None,
+ )
+ | Int(_) | String(_) | Float(_) | Character(_) | Minus | Plus =>
+ let c = parseConstant(p)
+ switch p.token {
+ | DotDot =>
+ Parser.next(p)
+ let c2 = parseConstant(p)
+ Ast_helper.Pat.interval(~loc=mkLoc(startPos, p.prevEndPos), c, c2)
+ | _ => Ast_helper.Pat.constant(~loc=mkLoc(startPos, p.prevEndPos), c)
+ }
+ | Lparen =>
+ Parser.next(p)
+ switch p.token {
+ | Rparen =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let lid = Location.mkloc(Longident.Lident("()"), loc)
+ Ast_helper.Pat.construct(~loc, lid, None)
+ | _ =>
+ let pat = parseConstrainedPattern(p)
+ switch p.token {
+ | Comma =>
+ Parser.next(p)
+ parseTuplePattern(~attrs, ~first=pat, ~startPos, p)
+ | _ =>
+ Parser.expect(Rparen, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ {...pat, ppat_loc: loc}
+ }
+ }
+ | Lbracket => parseArrayPattern(~attrs, p)
+ | Lbrace => parseRecordPattern(~attrs, p)
+ | Underscore =>
+ let endPos = p.endPos
+ let loc = mkLoc(startPos, endPos)
+ Parser.next(p)
+ Ast_helper.Pat.any(~loc, ~attrs, ())
+ | Lident(ident) =>
+ let endPos = p.endPos
+ let loc = mkLoc(startPos, endPos)
+ Parser.next(p)
+ Ast_helper.Pat.var(~loc, ~attrs, Location.mkloc(ident, loc))
+ | Uident(_) =>
+ let constr = parseModuleLongIdent(~lowercase=false, p)
+ switch p.Parser.token {
+ | Lparen => parseConstructorPatternArgs(p, constr, startPos, attrs)
+ | _ => Ast_helper.Pat.construct(~loc=constr.loc, ~attrs, constr, None)
+ }
+ | Hash =>
+ let (ident, loc) = parseHashIdent(~startPos, p)
+ switch p.Parser.token {
+ | Lparen => parseVariantPatternArgs(p, ident, startPos, attrs)
+ | _ => Ast_helper.Pat.variant(~loc, ~attrs, ident, None)
+ }
+ | HashHash =>
+ Parser.next(p)
+ let ident = parseValuePath(p)
+ let loc = mkLoc(startPos, ident.loc.loc_end)
+ Ast_helper.Pat.type_(~loc, ~attrs, ident)
+ | Exception =>
+ Parser.next(p)
+ let pat = parsePattern(~alias=false, ~or_=false, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.exception_(~loc, ~attrs, pat)
+ | Lazy =>
+ Parser.next(p)
+ let pat = parsePattern(~alias=false, ~or_=false, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.lazy_(~loc, ~attrs, pat)
+ | List =>
+ Parser.next(p)
+ switch p.token {
+ | Lbracket => parseListPattern(~startPos, ~attrs, p)
+ | _ =>
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.var(~loc, ~attrs, Location.mkloc("list", loc))
+ }
+ | Module => parseModulePattern(~attrs, p)
+ | Percent =>
+ let extension = parseExtension(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.extension(~loc, ~attrs, extension)
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicPatternStart) {
+ | None => Recover.defaultPattern()
+ | Some() => parsePattern(p)
+ }
+ }
+
+ let pat = if alias {
+ parseAliasPattern(~attrs, pat, p)
+ } else {
+ pat
+ }
+ if or_ {
+ parseOrPattern(pat, p)
+ } else {
+ pat
+ }
+ }
+
+ and skipTokensAndMaybeRetry = (p, ~isStartOfGrammar) =>
+ if Token.isKeyword(p.Parser.token) && p.Parser.prevEndPos.pos_lnum === p.startPos.pos_lnum {
+ Parser.next(p)
+ None
+ } else if Recover.shouldAbortListParse(p) {
+ if isStartOfGrammar(p.Parser.token) {
+ Parser.next(p)
+ Some()
+ } else {
+ None
+ }
+ } else {
+ Parser.next(p)
+ let rec loop = p =>
+ if !Recover.shouldAbortListParse(p) {
+ Parser.next(p)
+ loop(p)
+ }
+ loop(p)
+ if isStartOfGrammar(p.Parser.token) {
+ Some()
+ } else {
+ None
+ }
+ }
+
+ /* alias ::= pattern as lident */
+ and parseAliasPattern = (~attrs, pattern, p) =>
+ switch p.Parser.token {
+ | As =>
+ Parser.next(p)
+ let (name, loc) = parseLident(p)
+ let name = Location.mkloc(name, loc)
+ Ast_helper.Pat.alias(~loc={...pattern.ppat_loc, loc_end: p.prevEndPos}, ~attrs, pattern, name)
+ | _ => pattern
+ }
+
+ /* or ::= pattern | pattern
+ * precedence: Red | Blue | Green is interpreted as (Red | Blue) | Green */
+ and parseOrPattern = (pattern1, p) => {
+ let rec loop = pattern1 =>
+ switch p.Parser.token {
+ | Bar =>
+ Parser.next(p)
+ let pattern2 = parsePattern(~or_=false, p)
+ let loc = {
+ ...pattern1.Parsetree.ppat_loc,
+ loc_end: pattern2.ppat_loc.loc_end,
+ }
+ loop(Ast_helper.Pat.or_(~loc, pattern1, pattern2))
+ | _ => pattern1
+ }
+
+ loop(pattern1)
+ }
+
+ and parseNonSpreadPattern = (~msg, p) => {
+ let () = switch p.Parser.token {
+ | DotDotDot =>
+ Parser.err(p, Diagnostics.message(msg))
+ Parser.next(p)
+ | _ => ()
+ }
+
+ switch p.Parser.token {
+ | token when Grammar.isPatternStart(token) =>
+ let pat = parsePattern(p)
+ switch p.Parser.token {
+ | Colon =>
+ Parser.next(p)
+ let typ = parseTypExpr(p)
+ let loc = mkLoc(pat.ppat_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end)
+ Some(Ast_helper.Pat.constraint_(~loc, pat, typ))
+ | _ => Some(pat)
+ }
+ | _ => None
+ }
+ }
+
+ and parseConstrainedPattern = p => {
+ let pat = parsePattern(p)
+ switch p.Parser.token {
+ | Colon =>
+ Parser.next(p)
+ let typ = parseTypExpr(p)
+ let loc = mkLoc(pat.ppat_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end)
+ Ast_helper.Pat.constraint_(~loc, pat, typ)
+ | _ => pat
+ }
+ }
+
+ and parseConstrainedPatternRegion = p =>
+ switch p.Parser.token {
+ | token when Grammar.isPatternStart(token) => Some(parseConstrainedPattern(p))
+ | _ => None
+ }
+
+ /* field ::=
+ * | longident
+ * | longident : pattern
+ * | longident as lident
+ *
+ * row ::=
+ * | field ,
+ * | field , _
+ * | field , _,
+ */
+ and parseRecordPatternField = p => {
+ let startPos = p.Parser.startPos
+ let label = parseValuePath(p)
+ let pattern = switch p.Parser.token {
+ | Colon =>
+ Parser.next(p)
+ parsePattern(p)
+ | _ => Ast_helper.Pat.var(~loc=label.loc, Location.mkloc(Longident.last(label.txt), label.loc))
+ }
+
+ switch p.token {
+ | As =>
+ Parser.next(p)
+ let (name, loc) = parseLident(p)
+ let name = Location.mkloc(name, loc)
+ let aliasPattern = Ast_helper.Pat.alias(~loc=mkLoc(startPos, p.prevEndPos), pattern, name)
+
+ (Location.mkloc(label.txt, mkLoc(startPos, aliasPattern.ppat_loc.loc_end)), aliasPattern)
+ | _ => (label, pattern)
+ }
+ }
+
+ /* TODO: there are better representations than PatField|Underscore ? */
+ and parseRecordPatternItem = p =>
+ switch p.Parser.token {
+ | DotDotDot =>
+ Parser.next(p)
+ Some(true, PatField(parseRecordPatternField(p)))
+ | Uident(_) | Lident(_) => Some(false, PatField(parseRecordPatternField(p)))
+ | Underscore =>
+ Parser.next(p)
+ Some(false, PatUnderscore)
+ | _ => None
+ }
+
+ and parseRecordPattern = (~attrs, p) => {
+ let startPos = p.startPos
+ Parser.expect(Lbrace, p)
+ let rawFields = parseCommaDelimitedReversedList(
+ p,
+ ~grammar=PatternRecord,
+ ~closing=Rbrace,
+ ~f=parseRecordPatternItem,
+ )
+
+ Parser.expect(Rbrace, p)
+ let (fields, closedFlag) = {
+ let (rawFields, flag) = switch rawFields {
+ | list{(_hasSpread, PatUnderscore), ...rest} => (rest, Asttypes.Open)
+ | rawFields => (rawFields, Asttypes.Closed)
+ }
+
+ List.fold_left(((fields, flag), curr) => {
+ let (hasSpread, field) = curr
+ switch field {
+ | PatField(field) =>
+ if hasSpread {
+ let (_, pattern) = field
+ Parser.err(
+ ~startPos=pattern.Parsetree.ppat_loc.loc_start,
+ p,
+ Diagnostics.message(ErrorMessages.recordPatternSpread),
+ )
+ }
+ (list{field, ...fields}, flag)
+ | PatUnderscore => (fields, flag)
+ }
+ }, (list{}, flag), rawFields)
+ }
+
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.record(~loc, ~attrs, fields, closedFlag)
+ }
+
+ and parseTuplePattern = (~attrs, ~first, ~startPos, p) => {
+ let patterns = parseCommaDelimitedRegion(
+ p,
+ ~grammar=Grammar.PatternList,
+ ~closing=Rparen,
+ ~f=parseConstrainedPatternRegion,
+ )
+
+ Parser.expect(Rparen, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.tuple(~loc, ~attrs, list{first, ...patterns})
+ }
+
+ and parsePatternRegion = p =>
+ switch p.Parser.token {
+ | DotDotDot =>
+ Parser.next(p)
+ Some(true, parseConstrainedPattern(p))
+ | token when Grammar.isPatternStart(token) => Some(false, parseConstrainedPattern(p))
+ | _ => None
+ }
+
+ and parseModulePattern = (~attrs, p) => {
+ let startPos = p.Parser.startPos
+ Parser.expect(Module, p)
+ Parser.expect(Lparen, p)
+ let uident = switch p.token {
+ | Uident(uident) =>
+ let loc = mkLoc(p.startPos, p.endPos)
+ Parser.next(p)
+ Location.mkloc(uident, loc)
+ | _ =>
+ /* TODO: error recovery */
+ Location.mknoloc("_")
+ }
+
+ switch p.token {
+ | Colon =>
+ let colonStart = p.Parser.startPos
+ Parser.next(p)
+ let packageTypAttrs = parseAttributes(p)
+ let packageType = parsePackageType(~startPos=colonStart, ~attrs=packageTypAttrs, p)
+ Parser.expect(Rparen, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let unpack = Ast_helper.Pat.unpack(~loc=uident.loc, uident)
+ Ast_helper.Pat.constraint_(~loc, ~attrs, unpack, packageType)
+ | _ =>
+ Parser.expect(Rparen, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.unpack(~loc, ~attrs, uident)
+ }
+ }
+
+ and parseListPattern = (~startPos, ~attrs, p) => {
+ Parser.expect(Lbracket, p)
+ let listPatterns = parseCommaDelimitedReversedList(
+ p,
+ ~grammar=Grammar.PatternOcamlList,
+ ~closing=Rbracket,
+ ~f=parsePatternRegion,
+ )
+
+ Parser.expect(Rbracket, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let filterSpread = ((hasSpread, pattern)) =>
+ if hasSpread {
+ Parser.err(
+ ~startPos=pattern.Parsetree.ppat_loc.loc_start,
+ p,
+ Diagnostics.message(ErrorMessages.listPatternSpread),
+ )
+ pattern
+ } else {
+ pattern
+ }
+
+ switch listPatterns {
+ | list{(true, pattern), ...patterns} =>
+ let patterns = patterns |> List.map(filterSpread) |> List.rev
+ let pat = makeListPattern(loc, patterns, Some(pattern))
+ {...pat, ppat_loc: loc, ppat_attributes: attrs}
+ | patterns =>
+ let patterns = patterns |> List.map(filterSpread) |> List.rev
+ let pat = makeListPattern(loc, patterns, None)
+ {...pat, ppat_loc: loc, ppat_attributes: attrs}
+ }
+ }
+
+ and parseArrayPattern = (~attrs, p) => {
+ let startPos = p.startPos
+ Parser.expect(Lbracket, p)
+ let patterns = parseCommaDelimitedRegion(
+ p,
+ ~grammar=Grammar.PatternList,
+ ~closing=Rbracket,
+ ~f=parseNonSpreadPattern(~msg=ErrorMessages.arrayPatternSpread),
+ )
+
+ Parser.expect(Rbracket, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.array(~loc, ~attrs, patterns)
+ }
+
+ and parseConstructorPatternArgs = (p, constr, startPos, attrs) => {
+ let lparen = p.startPos
+ Parser.expect(Lparen, p)
+ let args = parseCommaDelimitedRegion(
+ p,
+ ~grammar=Grammar.PatternList,
+ ~closing=Rparen,
+ ~f=parseConstrainedPatternRegion,
+ )
+
+ Parser.expect(Rparen, p)
+ let args = switch args {
+ | list{} =>
+ let loc = mkLoc(lparen, p.prevEndPos)
+ Some(Ast_helper.Pat.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None))
+ | list{{ppat_desc: Ppat_tuple(_)} as pat} as patterns =>
+ if p.mode == ParseForTypeChecker {
+ /* Some(1, 2) for type-checker */
+ Some(pat)
+ } else {
+ /* Some((1, 2)) for printer */
+ Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))
+ }
+ | list{pattern} => Some(pattern)
+ | patterns => Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))
+ }
+
+ Ast_helper.Pat.construct(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, constr, args)
+ }
+
+ and parseVariantPatternArgs = (p, ident, startPos, attrs) => {
+ let lparen = p.startPos
+ Parser.expect(Lparen, p)
+ let patterns = parseCommaDelimitedRegion(
+ p,
+ ~grammar=Grammar.PatternList,
+ ~closing=Rparen,
+ ~f=parseConstrainedPatternRegion,
+ )
+ let args = switch patterns {
+ | list{{ppat_desc: Ppat_tuple(_)} as pat} as patterns =>
+ if p.mode == ParseForTypeChecker {
+ /* #ident(1, 2) for type-checker */
+ Some(pat)
+ } else {
+ /* #ident((1, 2)) for printer */
+ Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))
+ }
+ | list{pattern} => Some(pattern)
+ | patterns => Some(Ast_helper.Pat.tuple(~loc=mkLoc(lparen, p.endPos), patterns))
+ }
+
+ Parser.expect(Rparen, p)
+ Ast_helper.Pat.variant(~loc=mkLoc(startPos, p.prevEndPos), ~attrs, ident, args)
+ }
+
+ and parseExpr = (~context=OrdinaryExpr, p) => {
+ let expr = parseOperandExpr(~context, p)
+ let expr = parseBinaryExpr(~context, ~a=expr, p, 1)
+ parseTernaryExpr(expr, p)
+ }
+
+ /* expr ? expr : expr */
+ and parseTernaryExpr = (leftOperand, p) =>
+ switch p.Parser.token {
+ | Question =>
+ Parser.leaveBreadcrumb(p, Grammar.Ternary)
+ Parser.next(p)
+ let trueBranch = parseExpr(~context=TernaryTrueBranchExpr, p)
+ Parser.expect(Colon, p)
+ let falseBranch = parseExpr(p)
+ Parser.eatBreadcrumb(p)
+ let loc = {
+ ...leftOperand.Parsetree.pexp_loc,
+ loc_start: leftOperand.pexp_loc.loc_start,
+ loc_end: falseBranch.Parsetree.pexp_loc.loc_end,
+ }
+ Ast_helper.Exp.ifthenelse(
+ ~attrs=list{ternaryAttr},
+ ~loc,
+ leftOperand,
+ trueBranch,
+ Some(falseBranch),
+ )
+ | _ => leftOperand
+ }
+
+ and parseEs6ArrowExpression = (~parameters=?, p) => {
+ let startPos = p.Parser.startPos
+ Parser.leaveBreadcrumb(p, Grammar.Es6ArrowExpr)
+ let parameters = switch parameters {
+ | Some(params) => params
+ | None => parseParameters(p)
+ }
+
+ let returnType = switch p.Parser.token {
+ | Colon =>
+ Parser.next(p)
+ Some(parseTypExpr(~es6Arrow=false, p))
+ | _ => None
+ }
+
+ Parser.expect(EqualGreater, p)
+ let body = {
+ let expr = parseExpr(p)
+ switch returnType {
+ | Some(typ) =>
+ Ast_helper.Exp.constraint_(
+ ~loc=mkLoc(expr.pexp_loc.loc_start, typ.Parsetree.ptyp_loc.loc_end),
+ expr,
+ typ,
+ )
+ | None => expr
+ }
+ }
+
+ Parser.eatBreadcrumb(p)
+ let endPos = p.prevEndPos
+ let arrowExpr = List.fold_right((parameter, expr) =>
+ switch parameter {
+ | TermParameter({uncurried, attrs, label: lbl, expr: defaultExpr, pat, pos: startPos}) =>
+ let attrs = if uncurried {
+ list{uncurryAttr, ...attrs}
+ } else {
+ attrs
+ }
+ Ast_helper.Exp.fun_(~loc=mkLoc(startPos, endPos), ~attrs, lbl, defaultExpr, pat, expr)
+ | TypeParameter({uncurried, attrs, locs: newtypes, pos: startPos}) =>
+ let attrs = if uncurried {
+ list{uncurryAttr, ...attrs}
+ } else {
+ attrs
+ }
+ makeNewtypes(~attrs, ~loc=mkLoc(startPos, endPos), newtypes, expr)
+ }
+ , parameters, body)
+
+ {...arrowExpr, pexp_loc: {...arrowExpr.pexp_loc, loc_start: startPos}}
+ }
+
+ /*
+ * uncurried_parameter ::=
+ * | . parameter
+ *
+ * parameter ::=
+ * | pattern
+ * | pattern : type
+ * | ~ labelName
+ * | ~ labelName as pattern
+ * | ~ labelName as pattern : type
+ * | ~ labelName = expr
+ * | ~ labelName as pattern = expr
+ * | ~ labelName as pattern : type = expr
+ * | ~ labelName = ?
+ * | ~ labelName as pattern = ?
+ * | ~ labelName as pattern : type = ?
+ *
+ * labelName ::= lident
+ */
+ and parseParameter = p =>
+ if (
+ p.Parser.token == Token.Typ ||
+ (p.token == Tilde ||
+ (p.token == Dot || Grammar.isPatternStart(p.token)))
+ ) {
+ let startPos = p.Parser.startPos
+ let uncurried = Parser.optional(p, Token.Dot)
+ /* two scenarios:
+ * attrs ~lbl ...
+ * attrs pattern
+ * Attributes before a labelled arg, indicate that it's on the whole arrow expr
+ * Otherwise it's part of the pattern
+ * */
+ let attrs = parseAttributes(p)
+ if p.Parser.token == Typ {
+ Parser.next(p)
+ let lidents = parseLidentList(p)
+ Some(TypeParameter({uncurried: uncurried, attrs: attrs, locs: lidents, pos: startPos}))
+ } else {
+ let (attrs, lbl, pat) = switch p.Parser.token {
+ | Tilde =>
+ Parser.next(p)
+ let (lblName, loc) = parseLident(p)
+ let propLocAttr = (Location.mkloc("ns.namedArgLoc", loc), Parsetree.PStr(list{}))
+ switch p.Parser.token {
+ | Comma | Equal | Rparen =>
+ let loc = mkLoc(startPos, p.prevEndPos)
+ (
+ attrs,
+ Asttypes.Labelled(lblName),
+ Ast_helper.Pat.var(~attrs=list{propLocAttr}, ~loc, Location.mkloc(lblName, loc)),
+ )
+ | Colon =>
+ let lblEnd = p.prevEndPos
+ Parser.next(p)
+ let typ = parseTypExpr(p)
+ let loc = mkLoc(startPos, lblEnd)
+ let pat = {
+ let pat = Ast_helper.Pat.var(~loc, Location.mkloc(lblName, loc))
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Pat.constraint_(~attrs=list{propLocAttr}, ~loc, pat, typ)
+ }
+ (attrs, Asttypes.Labelled(lblName), pat)
+ | As =>
+ Parser.next(p)
+ let pat = {
+ let pat = parseConstrainedPattern(p)
+ {...pat, ppat_attributes: list{propLocAttr, ...pat.ppat_attributes}}
+ }
+
+ (attrs, Asttypes.Labelled(lblName), pat)
+ | t =>
+ Parser.err(p, Diagnostics.unexpected(t, p.breadcrumbs))
+ let loc = mkLoc(startPos, p.prevEndPos)
+ (
+ attrs,
+ Asttypes.Labelled(lblName),
+ Ast_helper.Pat.var(~loc, Location.mkloc(lblName, loc)),
+ )
+ }
+ | _ =>
+ let pattern = parseConstrainedPattern(p)
+ let attrs = List.concat(list{attrs, pattern.ppat_attributes})
+ (list{}, Asttypes.Nolabel, {...pattern, ppat_attributes: attrs})
+ }
+
+ switch p.Parser.token {
+ | Equal =>
+ Parser.next(p)
+ let lbl = switch lbl {
+ | Asttypes.Labelled(lblName) => Asttypes.Optional(lblName)
+ | Asttypes.Optional(_) as lbl => lbl
+ | Asttypes.Nolabel => Asttypes.Nolabel
+ }
+
+ switch p.Parser.token {
+ | Question =>
+ Parser.next(p)
+ Some(
+ TermParameter({
+ uncurried: uncurried,
+ attrs: attrs,
+ label: lbl,
+ expr: None,
+ pat: pat,
+ pos: startPos,
+ }),
+ )
+ | _ =>
+ let expr = parseConstrainedOrCoercedExpr(p)
+ Some(
+ TermParameter({
+ uncurried: uncurried,
+ attrs: attrs,
+ label: lbl,
+ expr: Some(expr),
+ pat: pat,
+ pos: startPos,
+ }),
+ )
+ }
+ | _ =>
+ Some(
+ TermParameter({
+ uncurried: uncurried,
+ attrs: attrs,
+ label: lbl,
+ expr: None,
+ pat: pat,
+ pos: startPos,
+ }),
+ )
+ }
+ }
+ } else {
+ None
+ }
+
+ and parseParameterList = p => {
+ let parameters = parseCommaDelimitedRegion(
+ ~grammar=Grammar.ParameterList,
+ ~f=parseParameter,
+ ~closing=Rparen,
+ p,
+ )
+
+ Parser.expect(Rparen, p)
+ parameters
+ }
+
+ /* parameters ::=
+ * | _
+ * | lident
+ * | ()
+ * | (.)
+ * | ( parameter {, parameter} [,] )
+ */
+ and parseParameters = p => {
+ let startPos = p.Parser.startPos
+ switch p.Parser.token {
+ | Lident(ident) =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.Parser.prevEndPos)
+ list{
+ TermParameter({
+ uncurried: false,
+ attrs: list{},
+ label: Asttypes.Nolabel,
+ expr: None,
+ pat: Ast_helper.Pat.var(~loc, Location.mkloc(ident, loc)),
+ pos: startPos,
+ }),
+ }
+ | List =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.Parser.prevEndPos)
+ list{
+ TermParameter({
+ uncurried: false,
+ attrs: list{},
+ label: Asttypes.Nolabel,
+ expr: None,
+ pat: Ast_helper.Pat.var(~loc, Location.mkloc("list", loc)),
+ pos: startPos,
+ }),
+ }
+ | Underscore =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.Parser.prevEndPos)
+ list{
+ TermParameter({
+ uncurried: false,
+ attrs: list{},
+ label: Asttypes.Nolabel,
+ expr: None,
+ pat: Ast_helper.Pat.any(~loc, ()),
+ pos: startPos,
+ }),
+ }
+ | Lparen =>
+ Parser.next(p)
+ switch p.Parser.token {
+ | Rparen =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.Parser.prevEndPos)
+ let unitPattern = Ast_helper.Pat.construct(
+ ~loc,
+ Location.mkloc(Longident.Lident("()"), loc),
+ None,
+ )
+
+ list{
+ TermParameter({
+ uncurried: false,
+ attrs: list{},
+ label: Asttypes.Nolabel,
+ expr: None,
+ pat: unitPattern,
+ pos: startPos,
+ }),
+ }
+ | Dot =>
+ Parser.next(p)
+ switch p.token {
+ | Rparen =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.Parser.prevEndPos)
+ let unitPattern = Ast_helper.Pat.construct(
+ ~loc,
+ Location.mkloc(Longident.Lident("()"), loc),
+ None,
+ )
+
+ list{
+ TermParameter({
+ uncurried: true,
+ attrs: list{},
+ label: Asttypes.Nolabel,
+ expr: None,
+ pat: unitPattern,
+ pos: startPos,
+ }),
+ }
+ | _ =>
+ switch parseParameterList(p) {
+ | list{
+ TermParameter({attrs, label: lbl, expr: defaultExpr, pat: pattern, pos: startPos}),
+ ...rest,
+ } => list{
+ TermParameter({
+ uncurried: true,
+ attrs: attrs,
+ label: lbl,
+ expr: defaultExpr,
+ pat: pattern,
+ pos: startPos,
+ }),
+ ...rest,
+ }
+ | parameters => parameters
+ }
+ }
+ | _ => parseParameterList(p)
+ }
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ list{}
+ }
+ }
+
+ and parseCoercedExpr = (~expr: Parsetree.expression, p) => {
+ Parser.expect(ColonGreaterThan, p)
+ let typ = parseTypExpr(p)
+ let loc = mkLoc(expr.pexp_loc.loc_start, p.prevEndPos)
+ Ast_helper.Exp.coerce(~loc, expr, None, typ)
+ }
+
+ and parseConstrainedOrCoercedExpr = p => {
+ let expr = parseExpr(p)
+ switch p.Parser.token {
+ | ColonGreaterThan => parseCoercedExpr(~expr, p)
+ | Colon =>
+ Parser.next(p)
+ switch p.token {
+ | _ =>
+ let typ = parseTypExpr(p)
+ let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)
+ let expr = Ast_helper.Exp.constraint_(~loc, expr, typ)
+ switch p.token {
+ | ColonGreaterThan => parseCoercedExpr(~expr, p)
+ | _ => expr
+ }
+ }
+ | _ => expr
+ }
+ }
+
+ and parseConstrainedExprRegion = p =>
+ switch p.Parser.token {
+ | token when Grammar.isExprStart(token) =>
+ let expr = parseExpr(p)
+ switch p.Parser.token {
+ | Colon =>
+ Parser.next(p)
+ let typ = parseTypExpr(p)
+ let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)
+ Some(Ast_helper.Exp.constraint_(~loc, expr, typ))
+ | _ => Some(expr)
+ }
+ | _ => None
+ }
+
+ /* Atomic expressions represent unambiguous expressions.
+ * This means that regardless of the context, these expressions
+ * are always interpreted correctly. */
+ and parseAtomicExpr = p => {
+ Parser.leaveBreadcrumb(p, Grammar.ExprOperand)
+ let startPos = p.Parser.startPos
+ let expr = switch p.Parser.token {
+ | (True | False) as token =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.construct(
+ ~loc,
+ Location.mkloc(Longident.Lident(Token.toString(token)), loc),
+ None,
+ )
+ | Int(_) | String(_) | Float(_) | Character(_) =>
+ let c = parseConstant(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.constant(~loc, c)
+ | Backtick =>
+ let expr = parseTemplateExpr(p)
+ {...expr, pexp_loc: mkLoc(startPos, p.prevEndPos)}
+ | Uident(_) | Lident(_) => parseValueOrConstructor(p)
+ | Hash => parsePolyVariantExpr(p)
+ | Lparen =>
+ Parser.next(p)
+ switch p.Parser.token {
+ | Rparen =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.construct(~loc, Location.mkloc(Longident.Lident("()"), loc), None)
+ | _t =>
+ let expr = parseConstrainedOrCoercedExpr(p)
+ switch p.token {
+ | Comma =>
+ Parser.next(p)
+ parseTupleExpr(~startPos, ~first=expr, p)
+ | _ =>
+ Parser.expect(Rparen, p)
+ expr
+ /* {expr with pexp_loc = mkLoc startPos p.prevEndPos}
+ * What does this location mean here? It means that when there's
+ * a parenthesized we keep the location here for whitespace interleaving.
+ * Without the closing paren in the location there will always be an extra
+ * line. For now we don't include it, because it does weird things
+ * with for comments. */
+ }
+ }
+ | List =>
+ Parser.next(p)
+ switch p.token {
+ | Lbracket => parseListExpr(~startPos, p)
+ | _ =>
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident("list"), loc))
+ }
+ | Module =>
+ Parser.next(p)
+ parseFirstClassModuleExpr(~startPos, p)
+ | Lbracket => parseArrayExp(p)
+ | Lbrace => parseBracedOrRecordExpr(p)
+ | LessThan => parseJsx(p)
+ | Percent =>
+ let extension = parseExtension(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.extension(~loc, extension)
+ | Underscore as token =>
+ /* This case is for error recovery. Not sure if it's the correct place */
+ Parser.err(p, Diagnostics.lident(token))
+ Parser.next(p)
+ Recover.defaultExpr()
+ | token =>
+ let errPos = p.prevEndPos
+ switch skipTokensAndMaybeRetry(p, ~isStartOfGrammar=Grammar.isAtomicExprStart) {
+ | None =>
+ Parser.err(~startPos=errPos, p, Diagnostics.unexpected(token, p.breadcrumbs))
+ Recover.defaultExpr()
+ | Some() => parseAtomicExpr(p)
+ }
+ }
+
+ Parser.eatBreadcrumb(p)
+ expr
+ }
+
+ /* module(module-expr)
+ * module(module-expr : package-type) */
+ and parseFirstClassModuleExpr = (~startPos, p) => {
+ Parser.expect(Lparen, p)
+
+ let modExpr = parseModuleExpr(p)
+ let modEndLoc = p.prevEndPos
+ switch p.Parser.token {
+ | Colon =>
+ let colonStart = p.Parser.startPos
+ Parser.next(p)
+ let attrs = parseAttributes(p)
+ let packageType = parsePackageType(~startPos=colonStart, ~attrs, p)
+ Parser.expect(Rparen, p)
+ let loc = mkLoc(startPos, modEndLoc)
+ let firstClassModule = Ast_helper.Exp.pack(~loc, modExpr)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.constraint_(~loc, firstClassModule, packageType)
+ | _ =>
+ Parser.expect(Rparen, p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.pack(~loc, modExpr)
+ }
+ }
+
+ and parseBracketAccess = (p, expr, startPos) => {
+ Parser.leaveBreadcrumb(p, Grammar.ExprArrayAccess)
+ let lbracket = p.startPos
+ Parser.next(p)
+ let stringStart = p.startPos
+ switch p.Parser.token {
+ | String(s) =>
+ Parser.next(p)
+ let stringEnd = p.prevEndPos
+ Parser.expect(Rbracket, p)
+ let rbracket = p.prevEndPos
+ let e = {
+ let identLoc = mkLoc(stringStart, stringEnd)
+ let loc = mkLoc(lbracket, rbracket)
+ Ast_helper.Exp.apply(
+ ~loc,
+ Ast_helper.Exp.ident(~loc, Location.mkloc(Longident.Lident("##"), loc)),
+ list{
+ (Nolabel, expr),
+ (
+ Nolabel,
+ Ast_helper.Exp.ident(~loc=identLoc, Location.mkloc(Longident.Lident(s), identLoc)),
+ ),
+ },
+ )
+ }
+
+ let e = parsePrimaryExpr(~operand=e, p)
+ let equalStart = p.startPos
+ switch p.token {
+ | Equal =>
+ Parser.next(p)
+ let equalEnd = p.prevEndPos
+ let rhsExpr = parseExpr(p)
+ let loc = mkLoc(startPos, rhsExpr.pexp_loc.loc_end)
+ let operatorLoc = mkLoc(equalStart, equalEnd)
+ Ast_helper.Exp.apply(
+ ~loc,
+ Ast_helper.Exp.ident(
+ ~loc=operatorLoc,
+ Location.mkloc(Longident.Lident("#="), operatorLoc),
+ ),
+ list{(Nolabel, e), (Nolabel, rhsExpr)},
+ )
+ | _ => e
+ }
+ | _ =>
+ let accessExpr = parseConstrainedOrCoercedExpr(p)
+ Parser.expect(Rbracket, p)
+ let rbracket = p.prevEndPos
+ let arrayLoc = mkLoc(lbracket, rbracket)
+ switch p.token {
+ | Equal =>
+ Parser.leaveBreadcrumb(p, ExprArrayMutation)
+ Parser.next(p)
+ let rhsExpr = parseExpr(p)
+ let arraySet = Location.mkloc(Longident.Ldot(Lident("Array"), "set"), arrayLoc)
+
+ let endPos = p.prevEndPos
+ let arraySet = Ast_helper.Exp.apply(
+ ~loc=mkLoc(startPos, endPos),
+ Ast_helper.Exp.ident(~loc=arrayLoc, arraySet),
+ list{(Nolabel, expr), (Nolabel, accessExpr), (Nolabel, rhsExpr)},
+ )
+
+ Parser.eatBreadcrumb(p)
+ arraySet
+ | _ =>
+ let endPos = p.prevEndPos
+ let e = Ast_helper.Exp.apply(
+ ~loc=mkLoc(startPos, endPos),
+ Ast_helper.Exp.ident(
+ ~loc=arrayLoc,
+ Location.mkloc(Longident.Ldot(Lident("Array"), "get"), arrayLoc),
+ ),
+ list{(Nolabel, expr), (Nolabel, accessExpr)},
+ )
+
+ Parser.eatBreadcrumb(p)
+ parsePrimaryExpr(~operand=e, p)
+ }
+ }
+ }
+
+ /* * A primary expression represents
+ * - atomic-expr
+ * - john.age
+ * - array[0]
+ * - applyFunctionTo(arg1, arg2)
+ *
+ * The "operand" represents the expression that is operated on
+ */
+ and parsePrimaryExpr = (~operand, ~noCall=false, p) => {
+ let startPos = operand.pexp_loc.loc_start
+ let rec loop = (p, expr) =>
+ switch p.Parser.token {
+ | Dot =>
+ Parser.next(p)
+ let lident = parseValuePath(p)
+ switch p.Parser.token {
+ | Equal when noCall == false =>
+ Parser.leaveBreadcrumb(p, Grammar.ExprSetField)
+ Parser.next(p)
+ let targetExpr = parseExpr(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let setfield = Ast_helper.Exp.setfield(~loc, expr, lident, targetExpr)
+ Parser.eatBreadcrumb(p)
+ setfield
+ | _ =>
+ let endPos = p.prevEndPos
+ let loc = mkLoc(startPos, endPos)
+ loop(p, Ast_helper.Exp.field(~loc, expr, lident))
+ }
+ | Lbracket when noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>
+ parseBracketAccess(p, expr, startPos)
+ | Lparen when noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>
+ loop(p, parseCallExpr(p, expr))
+ | Backtick when noCall == false && p.prevEndPos.pos_lnum === p.startPos.pos_lnum =>
+ switch expr.pexp_desc {
+ | Pexp_ident({txt: Longident.Lident(ident)}) => parseTemplateExpr(~prefix=ident, p)
+ | _ =>
+ Parser.err(
+ ~startPos=expr.pexp_loc.loc_start,
+ ~endPos=expr.pexp_loc.loc_end,
+ p,
+ Diagnostics.message(
+ "Tagged template literals are currently restricted to identifiers like: json`null`.",
+ ),
+ )
+ parseTemplateExpr(p)
+ }
+ | _ => expr
+ }
+
+ loop(p, operand)
+ }
+
+ /* a unary expression is an expression with only one operand and
+ * unary operator. Examples:
+ * -1
+ * !condition
+ * -. 1.6
+ */
+ and parseUnaryExpr = p => {
+ let startPos = p.Parser.startPos
+ switch p.Parser.token {
+ | (Minus | MinusDot | Plus | PlusDot | Bang) as token =>
+ Parser.leaveBreadcrumb(p, Grammar.ExprUnary)
+ let tokenEnd = p.endPos
+ Parser.next(p)
+ let operand = parseUnaryExpr(p)
+ let unaryExpr = makeUnaryExpr(startPos, tokenEnd, token, operand)
+ Parser.eatBreadcrumb(p)
+ unaryExpr
+ | _ => parsePrimaryExpr(~operand=parseAtomicExpr(p), p)
+ }
+ }
+
+ /* Represents an "operand" in a binary expression.
+ * If you have `a + b`, `a` and `b` both represent
+ * the operands of the binary expression with opeartor `+` */
+ and parseOperandExpr = (~context, p) => {
+ let startPos = p.Parser.startPos
+ let attrs = parseAttributes(p)
+ let expr = switch p.Parser.token {
+ | Assert =>
+ Parser.next(p)
+ let expr = parseUnaryExpr(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.assert_(~loc, expr)
+ | Lazy =>
+ Parser.next(p)
+ let expr = parseUnaryExpr(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ Ast_helper.Exp.lazy_(~loc, expr)
+ | Try => parseTryExpression(p)
+ | If => parseIfExpression(p)
+ | For => parseForExpression(p)
+ | While => parseWhileExpression(p)
+ | Switch => parseSwitchExpression(p)
+ | _ =>
+ if (
+ context !== WhenExpr && isEs6ArrowExpression(~inTernary=context == TernaryTrueBranchExpr, p)
+ ) {
+ parseEs6ArrowExpression(p)
+ } else {
+ parseUnaryExpr(p)
+ }
+ }
+
+ /* let endPos = p.Parser.prevEndPos in */
+ {
+ ...expr,
+ pexp_attributes: List.concat(list{expr.Parsetree.pexp_attributes, attrs}),
+ /* pexp_loc = mkLoc startPos endPos */
+ }
+ }
+
+ /* a binary expression is an expression that combines two expressions with an
+ * operator. Examples:
+ * a + b
+ * f(x) |> g(y)
+ */
+ and parseBinaryExpr = (~context=OrdinaryExpr, ~a=?, p, prec) => {
+ let a = switch a {
+ | Some(e) => e
+ | None => parseOperandExpr(~context, p)
+ }
+
+ let rec loop = a => {
+ let token = p.Parser.token
+ let tokenPrec = switch token {
+ /* Can the minus be interpreted as a binary operator? Or is it a unary?
+ * let w = {
+ * x
+ * -10
+ * }
+ * vs
+ * let w = {
+ * width
+ * - gap
+ * }
+ *
+ * First case is unary, second is a binary operator.
+ * See Scanner.isBinaryOp */
+ | Minus | MinusDot | LessThan
+ when !Scanner.isBinaryOp(p.scanner.src, p.startPos.pos_cnum, p.endPos.pos_cnum) &&
+ p.startPos.pos_lnum > p.prevEndPos.pos_lnum => -1
+ | token => Token.precedence(token)
+ }
+
+ if tokenPrec < prec {
+ a
+ } else {
+ Parser.leaveBreadcrumb(p, Grammar.ExprBinaryAfterOp(token))
+ let startPos = p.startPos
+ Parser.next(p)
+ let endPos = p.prevEndPos
+ let b = parseBinaryExpr(~context, p, tokenPrec + 1)
+ let loc = mkLoc(a.Parsetree.pexp_loc.loc_start, b.pexp_loc.loc_end)
+ let expr = Ast_helper.Exp.apply(
+ ~loc,
+ makeInfixOperator(p, token, startPos, endPos),
+ list{(Nolabel, a), (Nolabel, b)},
+ )
+
+ loop(expr)
+ }
+ }
+
+ loop(a)
+ }
+
+ /* If we even need this, determines if < might be the start of jsx. Not 100% complete */
+ /* and isStartOfJsx p = */
+ /* Parser.lookahead p (fun p -> */
+ /* match p.Parser.token with */
+ /* | LessThan -> */
+ /* Parser.next p; */
+ /* begin match p.token with */
+ /* | GreaterThan (* <> *) -> true */
+ /* | Lident _ | Uident _ | List -> */
+ /* ignore (parseJsxName p); */
+ /* begin match p.token with */
+ /* | GreaterThan (*
*) -> true */
+ /* | Question (* true */
+ /* | Lident _ | List -> */
+ /* Parser.next p; */
+ /* begin match p.token with */
+ /* | Equal (* true */
+ /* | _ -> false (* TODO *) */
+ /* end */
+ /* | Forwardslash (* */
+ /* Parser.next p; */
+ /* begin match p.token with */
+ /* | GreaterThan (* *) -> true */
+ /* | _ -> false */
+ /* end */
+ /* | _ -> */
+ /* false */
+ /* end */
+ /* | _ -> false */
+ /* end */
+ /* | _ -> false */
+ /* ) */
+
+ and parseTemplateExpr = (~prefix="", p) => {
+ let hiddenOperator = {
+ let op = Location.mknoloc(Longident.Lident("^"))
+ Ast_helper.Exp.ident(op)
+ }
+
+ let rec loop = (acc, p) => {
+ let startPos = p.Parser.startPos
+ switch p.Parser.token {
+ | TemplateTail(txt) =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ if String.length(txt) > 0 {
+ let txt = if p.mode == ParseForTypeChecker {
+ parseTemplateStringLiteral(txt)
+ } else {
+ txt
+ }
+ let str = Ast_helper.Exp.constant(~loc, Pconst_string(txt, Some(prefix)))
+ Ast_helper.Exp.apply(~loc, hiddenOperator, list{(Nolabel, acc), (Nolabel, str)})
+ } else {
+ acc
+ }
+ | TemplatePart(txt) =>
+ Parser.next(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let expr = parseExprBlock(p)
+ let fullLoc = mkLoc(startPos, p.prevEndPos)
+ Scanner.setTemplateMode(p.scanner)
+ Parser.expect(Rbrace, p)
+ let txt = if p.mode == ParseForTypeChecker {
+ parseTemplateStringLiteral(txt)
+ } else {
+ txt
+ }
+ let str = Ast_helper.Exp.constant(~loc, Pconst_string(txt, Some(prefix)))
+ let next = {
+ let a = if String.length(txt) > 0 {
+ Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, acc), (Nolabel, str)})
+ } else {
+ acc
+ }
+
+ Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, a), (Nolabel, expr)})
+ }
+
+ loop(next, p)
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ acc
+ }
+ }
+
+ Scanner.setTemplateMode(p.scanner)
+ Parser.expect(Backtick, p)
+ let startPos = p.Parser.startPos
+ switch p.Parser.token {
+ | TemplateTail(txt) =>
+ let loc = mkLoc(startPos, p.endPos)
+ Parser.next(p)
+ let txt = if p.mode == ParseForTypeChecker {
+ parseTemplateStringLiteral(txt)
+ } else {
+ txt
+ }
+ Ast_helper.Exp.constant(~loc, Pconst_string(txt, Some(prefix)))
+ | TemplatePart(txt) =>
+ let constantLoc = mkLoc(startPos, p.endPos)
+ Parser.next(p)
+ let expr = parseExprBlock(p)
+ let fullLoc = mkLoc(startPos, p.prevEndPos)
+ Scanner.setTemplateMode(p.scanner)
+ Parser.expect(Rbrace, p)
+ let txt = if p.mode == ParseForTypeChecker {
+ parseTemplateStringLiteral(txt)
+ } else {
+ txt
+ }
+ let str = Ast_helper.Exp.constant(~loc=constantLoc, Pconst_string(txt, Some(prefix)))
+ let next = if String.length(txt) > 0 {
+ Ast_helper.Exp.apply(~loc=fullLoc, hiddenOperator, list{(Nolabel, str), (Nolabel, expr)})
+ } else {
+ expr
+ }
+
+ loop(next, p)
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ Ast_helper.Exp.constant(Pconst_string("", None))
+ }
+ }
+
+ /* Overparse: let f = a : int => a + 1, is it (a : int) => or (a): int =>
+ * Also overparse constraints:
+ * let x = {
+ * let a = 1
+ * a + pi: int
+ * }
+ *
+ * We want to give a nice error message in these cases
+ * */
+ and overParseConstrainedOrCoercedOrArrowExpression = (p, expr) =>
+ switch p.Parser.token {
+ | ColonGreaterThan => parseCoercedExpr(~expr, p)
+ | Colon =>
+ Parser.next(p)
+ let typ = parseTypExpr(~es6Arrow=false, p)
+ switch p.Parser.token {
+ | EqualGreater =>
+ Parser.next(p)
+ let body = parseExpr(p)
+ let pat = switch expr.pexp_desc {
+ | Pexp_ident(longident) =>
+ Ast_helper.Pat.var(
+ ~loc=expr.pexp_loc,
+ Location.mkloc(Longident.flatten(longident.txt) |> String.concat("."), longident.loc),
+ )
+ /* TODO: can we convert more expressions to patterns? */
+ | _ => Ast_helper.Pat.var(~loc=expr.pexp_loc, Location.mkloc("pattern", expr.pexp_loc))
+ }
+
+ let arrow1 = Ast_helper.Exp.fun_(
+ ~loc=mkLoc(expr.pexp_loc.loc_start, body.pexp_loc.loc_end),
+ Asttypes.Nolabel,
+ None,
+ pat,
+ Ast_helper.Exp.constraint_(body, typ),
+ )
+
+ let arrow2 = Ast_helper.Exp.fun_(
+ ~loc=mkLoc(expr.pexp_loc.loc_start, body.pexp_loc.loc_end),
+ Asttypes.Nolabel,
+ None,
+ Ast_helper.Pat.constraint_(pat, typ),
+ body,
+ )
+
+ let msg =
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.text("Did you mean to annotate the parameter type or the return type?"),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ Doc.text("1) "),
+ Printer.printExpression(arrow1, CommentTable.empty),
+ Doc.line,
+ Doc.text("2) "),
+ Printer.printExpression(arrow2, CommentTable.empty),
+ }),
+ ),
+ }),
+ ) |> Doc.toString(~width=80)
+
+ Parser.err(
+ ~startPos=expr.pexp_loc.loc_start,
+ ~endPos=body.pexp_loc.loc_end,
+ p,
+ Diagnostics.message(msg),
+ )
+ arrow1
+ | _ =>
+ open Parsetree
+ let loc = mkLoc(expr.pexp_loc.loc_start, typ.ptyp_loc.loc_end)
+ let expr = Ast_helper.Exp.constraint_(~loc, expr, typ)
+ let () = Parser.err(
+ ~startPos=expr.pexp_loc.loc_start,
+ ~endPos=typ.ptyp_loc.loc_end,
+ p,
+ Diagnostics.message(
+ Doc.breakableGroup(
+ ~forceBreak=true,
+ Doc.concat(list{
+ Doc.text("Expressions with type constraints need to be wrapped in parens:"),
+ Doc.indent(
+ Doc.concat(list{
+ Doc.line,
+ Printer.addParens(Printer.printExpression(expr, CommentTable.empty)),
+ }),
+ ),
+ }),
+ ) |> Doc.toString(~width=80),
+ ),
+ )
+
+ expr
+ }
+ | _ => expr
+ }
+
+ and parseLetBindingBody = (~startPos, ~attrs, p) => {
+ Parser.beginRegion(p)
+ Parser.leaveBreadcrumb(p, Grammar.LetBinding)
+ let (pat, exp) = {
+ let pat = parsePattern(p)
+ switch p.Parser.token {
+ | Colon =>
+ Parser.next(p)
+ switch p.token {
+ | Typ =>
+ /* locally abstract types */
+ Parser.next(p)
+ let newtypes = parseLidentList(p)
+ Parser.expect(Dot, p)
+ let typ = parseTypExpr(p)
+ Parser.expect(Equal, p)
+ let expr = parseExpr(p)
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let (exp, poly) = wrapTypeAnnotation(~loc, newtypes, typ, expr)
+ let pat = Ast_helper.Pat.constraint_(~loc, pat, poly)
+ (pat, exp)
+ | _ =>
+ let polyType = parsePolyTypeExpr(p)
+ let loc = {...pat.ppat_loc, loc_end: polyType.Parsetree.ptyp_loc.loc_end}
+ let pat = Ast_helper.Pat.constraint_(~loc, pat, polyType)
+ Parser.expect(Token.Equal, p)
+ let exp = parseExpr(p)
+ let exp = overParseConstrainedOrCoercedOrArrowExpression(p, exp)
+ (pat, exp)
+ }
+ | _ =>
+ Parser.expect(Token.Equal, p)
+ let exp = overParseConstrainedOrCoercedOrArrowExpression(p, parseExpr(p))
+ (pat, exp)
+ }
+ }
+
+ let loc = mkLoc(startPos, p.prevEndPos)
+ let vb = Ast_helper.Vb.mk(~loc, ~attrs, pat, exp)
+ Parser.eatBreadcrumb(p)
+ Parser.endRegion(p)
+ vb
+ }
+
+ /* TODO: find a better way? Is it possible?
+ * let a = 1
+ * @attr
+ * and b = 2
+ *
+ * The problem is that without semi we need a lookahead to determine
+ * if the attr is on the letbinding or the start of a new thing
+ *
+ * let a = 1
+ * @attr
+ * let b = 1
+ *
+ * Here @attr should attach to something "new": `let b = 1`
+ * The parser state is forked, which is quite expensive…
+ */
+ and parseAttributesAndBinding = (p: Parser.t) => {
+ let err = p.scanner.err
+ let ch = p.scanner.ch
+ let offset = p.scanner.offset
+ let rdOffset = p.scanner.rdOffset
+ let lineOffset = p.scanner.lineOffset
+ let lnum = p.scanner.lnum
+ let mode = p.scanner.mode
+ let token = p.token
+ let startPos = p.startPos
+ let endPos = p.endPos
+ let prevEndPos = p.prevEndPos
+ let breadcrumbs = p.breadcrumbs
+ let errors = p.errors
+ let diagnostics = p.diagnostics
+ let comments = p.comments
+
+ switch p.Parser.token {
+ | At =>
+ let attrs = parseAttributes(p)
+ switch p.Parser.token {
+ | And => attrs
+ | _ =>
+ p.scanner.err = err
+ p.scanner.ch = ch
+ p.scanner.offset = offset
+ p.scanner.rdOffset = rdOffset
+ p.scanner.lineOffset = lineOffset
+ p.scanner.lnum = lnum
+ p.scanner.mode = mode
+ p.token = token
+ p.startPos = startPos
+ p.endPos = endPos
+ p.prevEndPos = prevEndPos
+ p.breadcrumbs = breadcrumbs
+ p.errors = errors
+ p.diagnostics = diagnostics
+ p.comments = comments
+ list{}
+ }
+ | _ => list{}
+ }
+ }
+
+ /* definition ::= let [rec] let-binding { and let-binding } */
+ and parseLetBindings = (~attrs, p) => {
+ let startPos = p.Parser.startPos
+ Parser.optional(p, Let) |> ignore
+ let recFlag = if Parser.optional(p, Token.Rec) {
+ Asttypes.Recursive
+ } else {
+ Asttypes.Nonrecursive
+ }
+
+ let first = parseLetBindingBody(~startPos, ~attrs, p)
+
+ let rec loop = (p, bindings) => {
+ let startPos = p.Parser.startPos
+ let attrs = parseAttributesAndBinding(p)
+ switch p.Parser.token {
+ | And =>
+ Parser.next(p)
+ let attrs = switch p.token {
+ | Export =>
+ let exportLoc = mkLoc(p.startPos, p.endPos)
+ Parser.next(p)
+ let genTypeAttr = (Location.mkloc("genType", exportLoc), Parsetree.PStr(list{}))
+ list{genTypeAttr, ...attrs}
+ | _ => attrs
+ }
+
+ ignore(Parser.optional(p, Let)) /* overparse for fault tolerance */
+ let letBinding = parseLetBindingBody(~startPos, ~attrs, p)
+ loop(p, list{letBinding, ...bindings})
+ | _ => List.rev(bindings)
+ }
+ }
+
+ (recFlag, loop(p, list{first}))
+ }
+
+ /*
+ * div -> div
+ * Foo -> Foo.createElement
+ * Foo.Bar -> Foo.Bar.createElement
+ */
+ and parseJsxName = p => {
+ let longident = switch p.Parser.token {
+ | Lident(ident) =>
+ let identStart = p.startPos
+ let identEnd = p.endPos
+ Parser.next(p)
+ let loc = mkLoc(identStart, identEnd)
+ Location.mkloc(Longident.Lident(ident), loc)
+ | Uident(_) =>
+ let longident = parseModuleLongIdent(~lowercase=false, p)
+ Location.mkloc(Longident.Ldot(longident.txt, "createElement"), longident.loc)
+ | _ =>
+ let msg = "A jsx name should start with a lowercase or uppercase identifier, like: div in or Navbar in "
+
+ Parser.err(p, Diagnostics.message(msg))
+ Location.mknoloc(Longident.Lident("_"))
+ }
+
+ Ast_helper.Exp.ident(~loc=longident.loc, longident)
+ }
+
+ and parseJsxOpeningOrSelfClosingElement = (~startPos, p) => {
+ let jsxStartPos = p.Parser.startPos
+ let name = parseJsxName(p)
+ let jsxProps = parseJsxProps(p)
+ let children = switch p.Parser.token {
+ | Forwardslash =>
+ /* */
+ let childrenStartPos = p.Parser.startPos
+ Parser.next(p)
+ let childrenEndPos = p.Parser.startPos
+ Parser.expect(GreaterThan, p)
+ let loc = mkLoc(childrenStartPos, childrenEndPos)
+ makeListExpression(loc, list{}, None) /* no children */
+ | GreaterThan =>
+ /* bar */
+ let childrenStartPos = p.Parser.startPos
+ Scanner.setJsxMode(p.scanner)
+ Parser.next(p)
+ let (spread, children) = parseJsxChildren(p)
+ let childrenEndPos = p.Parser.startPos
+ let () = switch p.token {
+ | LessThanSlash => Parser.next(p)
+ | LessThan =>
+ Parser.next(p)
+ Parser.expect(Forwardslash, p)
+ | token when Grammar.isStructureItemStart(token) => ()
+ | _ => Parser.expect(LessThanSlash, p)
+ }
+
+ switch p.Parser.token {
+ | Lident(_) | Uident(_) when verifyJsxOpeningClosingName(p, name) =>
+ Parser.expect(GreaterThan, p)
+ let loc = mkLoc(childrenStartPos, childrenEndPos)
+ switch (spread, children) {
+ | (true, list{child, ..._}) => child
+ | _ => makeListExpression(loc, children, None)
+ }
+ | token =>
+ let () = if Grammar.isStructureItemStart(token) {
+ let closing = "" ++ (string_of_pexp_ident(name) ++ ">")
+ let msg = Diagnostics.message("Missing " ++ closing)
+ Parser.err(~startPos, ~endPos=p.prevEndPos, p, msg)
+ } else {
+ let opening = "" ++ (string_of_pexp_ident(name) ++ ">")
+ let msg =
+ "Closing jsx name should be the same as the opening name. Did you mean " ++
+ (opening ++
+ " ?")
+ Parser.err(~startPos, ~endPos=p.prevEndPos, p, Diagnostics.message(msg))
+ Parser.expect(GreaterThan, p)
+ }
+
+ let loc = mkLoc(childrenStartPos, childrenEndPos)
+ switch (spread, children) {
+ | (true, list{child, ..._}) => child
+ | _ => makeListExpression(loc, children, None)
+ }
+ }
+ | token =>
+ Parser.err(p, Diagnostics.unexpected(token, p.breadcrumbs))
+ makeListExpression(Location.none, list{}, None)
+ }
+
+ let jsxEndPos = p.prevEndPos
+ let loc = mkLoc(jsxStartPos, jsxEndPos)
+ Ast_helper.Exp.apply(
+ ~loc,
+ name,
+ List.concat(list{
+ jsxProps,
+ list{
+ (Asttypes.Labelled("children"), children),
+ (
+ Asttypes.Nolabel,
+ Ast_helper.Exp.construct(Location.mknoloc(Longident.Lident("()")), None),
+ ),
+ },
+ }),
+ )
+ }
+
+ /*
+ * jsx ::=
+ * | <> jsx-children >
+ * |
+ * | jsx-children
+ *
+ * jsx-children ::= primary-expr* * => 0 or more
+ */
+ and parseJsx = p => {
+ Parser.leaveBreadcrumb(p, Grammar.Jsx)
+ let startPos = p.Parser.startPos
+ Parser.expect(LessThan, p)
+ let jsxExpr = switch p.Parser.token {
+ | Lident(_) | Uident(_) => parseJsxOpeningOrSelfClosingElement(~startPos, p)
+ | GreaterThan =>
+ /* fragment: <> foo > */
+ parseJsxFragment(p)
+ | _ => parseJsxName(p)
+ }
+
+ {...jsxExpr, pexp_attributes: list{jsxAttr}}
+ }
+
+ /*
+ * jsx-fragment ::=
+ * | <> >
+ * | <> jsx-children >
+ */
+ and parseJsxFragment = p => {
+ let childrenStartPos = p.Parser.startPos
+ Scanner.setJsxMode(p.scanner)
+ Parser.expect(GreaterThan, p)
+ let (_spread, children) = parseJsxChildren(p)
+ let childrenEndPos = p.Parser.startPos
+ Parser.expect(LessThanSlash, p)
+ Parser.expect(GreaterThan, p)
+ let loc = mkLoc(childrenStartPos, childrenEndPos)
+ makeListExpression(loc, children, None)
+ }
+
+ /*
+ * jsx-prop ::=
+ * | lident
+ * | ?lident
+ * | lident = jsx_expr
+ * | lident = ?jsx_expr
+ */
+ and parseJsxProp = p => {
+ Parser.leaveBreadcrumb(p, Grammar.JsxAttribute)
+ switch p.Parser.token {
+ | Question | Lident(_) =>
+ let optional = Parser.optional(p, Question)
+ let (name, loc) = parseLident(p)
+ let propLocAttr = (Location.mkloc("ns.namedArgLoc", loc), Parsetree.PStr(list{}))
+ /* optional punning: */
+ if optional {
+ Some(
+ Asttypes.Optional(name),
+ Ast_helper.Exp.ident(
+ ~attrs=list{propLocAttr},
+ ~loc,
+ Location.mkloc(Longident.Lident(name), loc),
+ ),
+ )
+ } else {
+ switch p.Parser.token {
+ | Equal =>
+ Parser.next(p)
+ /* no punning */
+ let optional = Parser.optional(p, Question)
+ let attrExpr = {
+ let e = parsePrimaryExpr(~operand=parseAtomicExpr(p), p)
+ {...e, pexp_attributes: list{propLocAttr, ...e.pexp_attributes}}
+ }
+
+ let label = if optional {
+ Asttypes.Optional(name)
+ } else {
+ Asttypes.Labelled(name)
+ }
+
+ Some(label, attrExpr)
+ | _ =>
+ let attrExpr = Ast_helper.Exp.ident(
+ ~loc,
+ ~attrs=list{propLocAttr},
+ Location.mknoloc(Longident.Lident(name)),
+ )
+ let label = if optional {
+ Asttypes.Optional(name)
+ } else {
+ Asttypes.Labelled(name)
+ }
+
+ Some(label, attrExpr)
+ }
+ }
+ | _ => None
+ }
+ }
+
+ and parseJsxProps = p => parseRegion(~grammar=Grammar.JsxAttribute, ~f=parseJsxProp, p)
+
+ and parseJsxChildren = p => {
+ let rec loop = (p, children) =>
+ switch p.Parser.token {
+ | Token.Eof | LessThanSlash =>
+ Scanner.popMode(p.scanner, Jsx)
+ List.rev(children)
+ | LessThan =>
+ /* Imagine: